trans-array.c 264 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225422642274228422942304231423242334234423542364237423842394240424142424243424442454246424742484249425042514252425342544255425642574258425942604261426242634264426542664267426842694270427142724273427442754276427742784279428042814282428342844285428642874288428942904291429242934294429542964297429842994300430143024303430443054306430743084309431043114312431343144315431643174318431943204321432243234324432543264327432843294330433143324333433443354336433743384339434043414342434343444345434643474348434943504351435243534354435543564357435843594360436143624363436443654366436743684369437043714372437343744375437643774378437943804381438243834384438543864387438843894390439143924393439443954396439743984399440044014402440344044405440644074408440944104411441244134414441544164417441844194420442144224423442444254426442744284429443044314432443344344435443644374438443944404441444244434444444544464447444844494450445144524453445444554456445744584459446044614462446344644465446644674468446944704471447244734474447544764477447844794480448144824483448444854486448744884489449044914492449344944495449644974498449945004501450245034504450545064507450845094510451145124513451445154516451745184519452045214522452345244525452645274528452945304531453245334534453545364537453845394540454145424543454445454546454745484549455045514552455345544555455645574558455945604561456245634564456545664567456845694570457145724573457445754576457745784579458045814582458345844585458645874588458945904591459245934594459545964597459845994600460146024603460446054606460746084609461046114612461346144615461646174618461946204621462246234624462546264627462846294630463146324633463446354636463746384639464046414642464346444645464646474648464946504651465246534654465546564657465846594660466146624663466446654666466746684669467046714672467346744675467646774678467946804681468246834684468546864687468846894690469146924693469446954696469746984699470047014702470347044705470647074708470947104711471247134714471547164717471847194720472147224723472447254726472747284729473047314732473347344735473647374738473947404741474247434744474547464747474847494750475147524753475447554756475747584759476047614762476347644765476647674768476947704771477247734774477547764777477847794780478147824783478447854786478747884789479047914792479347944795479647974798479948004801480248034804480548064807480848094810481148124813481448154816481748184819482048214822482348244825482648274828482948304831483248334834483548364837483848394840484148424843484448454846484748484849485048514852485348544855485648574858485948604861486248634864486548664867486848694870487148724873487448754876487748784879488048814882488348844885488648874888488948904891489248934894489548964897489848994900490149024903490449054906490749084909491049114912491349144915491649174918491949204921492249234924492549264927492849294930493149324933493449354936493749384939494049414942494349444945494649474948494949504951495249534954495549564957495849594960496149624963496449654966496749684969497049714972497349744975497649774978497949804981498249834984498549864987498849894990499149924993499449954996499749984999500050015002500350045005500650075008500950105011501250135014501550165017501850195020502150225023502450255026502750285029503050315032503350345035503650375038503950405041504250435044504550465047504850495050505150525053505450555056505750585059506050615062506350645065506650675068506950705071507250735074507550765077507850795080508150825083508450855086508750885089509050915092509350945095509650975098509951005101510251035104510551065107510851095110511151125113511451155116511751185119512051215122512351245125512651275128512951305131513251335134513551365137513851395140514151425143514451455146514751485149515051515152515351545155515651575158515951605161516251635164516551665167516851695170517151725173517451755176517751785179518051815182518351845185518651875188518951905191519251935194519551965197519851995200520152025203520452055206520752085209521052115212521352145215521652175218521952205221522252235224522552265227522852295230523152325233523452355236523752385239524052415242524352445245524652475248524952505251525252535254525552565257525852595260526152625263526452655266526752685269527052715272527352745275527652775278527952805281528252835284528552865287528852895290529152925293529452955296529752985299530053015302530353045305530653075308530953105311531253135314531553165317531853195320532153225323532453255326532753285329533053315332533353345335533653375338533953405341534253435344534553465347534853495350535153525353535453555356535753585359536053615362536353645365536653675368536953705371537253735374537553765377537853795380538153825383538453855386538753885389539053915392539353945395539653975398539954005401540254035404540554065407540854095410541154125413541454155416541754185419542054215422542354245425542654275428542954305431543254335434543554365437543854395440544154425443544454455446544754485449545054515452545354545455545654575458545954605461546254635464546554665467546854695470547154725473547454755476547754785479548054815482548354845485548654875488548954905491549254935494549554965497549854995500550155025503550455055506550755085509551055115512551355145515551655175518551955205521552255235524552555265527552855295530553155325533553455355536553755385539554055415542554355445545554655475548554955505551555255535554555555565557555855595560556155625563556455655566556755685569557055715572557355745575557655775578557955805581558255835584558555865587558855895590559155925593559455955596559755985599560056015602560356045605560656075608560956105611561256135614561556165617561856195620562156225623562456255626562756285629563056315632563356345635563656375638563956405641564256435644564556465647564856495650565156525653565456555656565756585659566056615662566356645665566656675668566956705671567256735674567556765677567856795680568156825683568456855686568756885689569056915692569356945695569656975698569957005701570257035704570557065707570857095710571157125713571457155716571757185719572057215722572357245725572657275728572957305731573257335734573557365737573857395740574157425743574457455746574757485749575057515752575357545755575657575758575957605761576257635764576557665767576857695770577157725773577457755776577757785779578057815782578357845785578657875788578957905791579257935794579557965797579857995800580158025803580458055806580758085809581058115812581358145815581658175818581958205821582258235824582558265827582858295830583158325833583458355836583758385839584058415842584358445845584658475848584958505851585258535854585558565857585858595860586158625863586458655866586758685869587058715872587358745875587658775878587958805881588258835884588558865887588858895890589158925893589458955896589758985899590059015902590359045905590659075908590959105911591259135914591559165917591859195920592159225923592459255926592759285929593059315932593359345935593659375938593959405941594259435944594559465947594859495950595159525953595459555956595759585959596059615962596359645965596659675968596959705971597259735974597559765977597859795980598159825983598459855986598759885989599059915992599359945995599659975998599960006001600260036004600560066007600860096010601160126013601460156016601760186019602060216022602360246025602660276028602960306031603260336034603560366037603860396040604160426043604460456046604760486049605060516052605360546055605660576058605960606061606260636064606560666067606860696070607160726073607460756076607760786079608060816082608360846085608660876088608960906091609260936094609560966097609860996100610161026103610461056106610761086109611061116112611361146115611661176118611961206121612261236124612561266127612861296130613161326133613461356136613761386139614061416142614361446145614661476148614961506151615261536154615561566157615861596160616161626163616461656166616761686169617061716172617361746175617661776178617961806181618261836184618561866187618861896190619161926193619461956196619761986199620062016202620362046205620662076208620962106211621262136214621562166217621862196220622162226223622462256226622762286229623062316232623362346235623662376238623962406241624262436244624562466247624862496250625162526253625462556256625762586259626062616262626362646265626662676268626962706271627262736274627562766277627862796280628162826283628462856286628762886289629062916292629362946295629662976298629963006301630263036304630563066307630863096310631163126313631463156316631763186319632063216322632363246325632663276328632963306331633263336334633563366337633863396340634163426343634463456346634763486349635063516352635363546355635663576358635963606361636263636364636563666367636863696370637163726373637463756376637763786379638063816382638363846385638663876388638963906391639263936394639563966397639863996400640164026403640464056406640764086409641064116412641364146415641664176418641964206421642264236424642564266427642864296430643164326433643464356436643764386439644064416442644364446445644664476448644964506451645264536454645564566457645864596460646164626463646464656466646764686469647064716472647364746475647664776478647964806481648264836484648564866487648864896490649164926493649464956496649764986499650065016502650365046505650665076508650965106511651265136514651565166517651865196520652165226523652465256526652765286529653065316532653365346535653665376538653965406541654265436544654565466547654865496550655165526553655465556556655765586559656065616562656365646565656665676568656965706571657265736574657565766577657865796580658165826583658465856586658765886589659065916592659365946595659665976598659966006601660266036604660566066607660866096610661166126613661466156616661766186619662066216622662366246625662666276628662966306631663266336634663566366637663866396640664166426643664466456646664766486649665066516652665366546655665666576658665966606661666266636664666566666667666866696670667166726673667466756676667766786679668066816682668366846685668666876688668966906691669266936694669566966697669866996700670167026703670467056706670767086709671067116712671367146715671667176718671967206721672267236724672567266727672867296730673167326733673467356736673767386739674067416742674367446745674667476748674967506751675267536754675567566757675867596760676167626763676467656766676767686769677067716772677367746775677667776778677967806781678267836784678567866787678867896790679167926793679467956796679767986799680068016802680368046805680668076808680968106811681268136814681568166817681868196820682168226823682468256826682768286829683068316832683368346835683668376838683968406841684268436844684568466847684868496850685168526853685468556856685768586859686068616862686368646865686668676868686968706871687268736874687568766877687868796880688168826883688468856886688768886889689068916892689368946895689668976898689969006901690269036904690569066907690869096910691169126913691469156916691769186919692069216922692369246925692669276928692969306931693269336934693569366937693869396940694169426943694469456946694769486949695069516952695369546955695669576958695969606961696269636964696569666967696869696970697169726973697469756976697769786979698069816982698369846985698669876988698969906991699269936994699569966997699869997000700170027003700470057006700770087009701070117012701370147015701670177018701970207021702270237024702570267027702870297030703170327033703470357036703770387039704070417042704370447045704670477048704970507051705270537054705570567057705870597060706170627063706470657066706770687069707070717072707370747075707670777078707970807081708270837084708570867087708870897090709170927093709470957096709770987099710071017102710371047105710671077108710971107111711271137114711571167117711871197120712171227123712471257126712771287129713071317132713371347135713671377138713971407141714271437144714571467147714871497150715171527153715471557156715771587159716071617162716371647165716671677168716971707171717271737174717571767177717871797180718171827183718471857186718771887189719071917192719371947195719671977198719972007201720272037204720572067207720872097210721172127213721472157216721772187219722072217222722372247225722672277228722972307231723272337234723572367237723872397240724172427243724472457246724772487249725072517252725372547255725672577258725972607261726272637264726572667267726872697270727172727273727472757276727772787279728072817282728372847285728672877288728972907291729272937294729572967297729872997300730173027303730473057306730773087309731073117312731373147315731673177318731973207321732273237324732573267327732873297330733173327333733473357336733773387339734073417342734373447345734673477348734973507351735273537354735573567357735873597360736173627363736473657366736773687369737073717372737373747375737673777378737973807381738273837384738573867387738873897390739173927393739473957396739773987399740074017402740374047405740674077408740974107411741274137414741574167417741874197420742174227423742474257426742774287429743074317432743374347435743674377438743974407441744274437444744574467447744874497450745174527453745474557456745774587459746074617462746374647465746674677468746974707471747274737474747574767477747874797480748174827483748474857486748774887489749074917492749374947495749674977498749975007501750275037504750575067507750875097510751175127513751475157516751775187519752075217522752375247525752675277528752975307531753275337534753575367537753875397540754175427543754475457546754775487549755075517552755375547555755675577558755975607561756275637564756575667567756875697570757175727573757475757576757775787579758075817582758375847585758675877588758975907591759275937594759575967597759875997600760176027603760476057606760776087609761076117612761376147615761676177618761976207621762276237624762576267627762876297630763176327633763476357636763776387639764076417642764376447645764676477648764976507651765276537654765576567657765876597660766176627663766476657666766776687669767076717672767376747675767676777678767976807681768276837684768576867687768876897690769176927693769476957696769776987699770077017702770377047705770677077708770977107711771277137714771577167717771877197720772177227723772477257726772777287729773077317732773377347735773677377738773977407741774277437744774577467747774877497750775177527753775477557756775777587759776077617762776377647765776677677768776977707771777277737774777577767777777877797780778177827783778477857786778777887789779077917792779377947795779677977798779978007801780278037804780578067807780878097810781178127813781478157816781778187819782078217822782378247825782678277828782978307831783278337834783578367837783878397840784178427843784478457846784778487849785078517852785378547855785678577858785978607861786278637864786578667867786878697870787178727873787478757876787778787879788078817882788378847885788678877888788978907891789278937894789578967897789878997900790179027903790479057906790779087909791079117912791379147915791679177918791979207921792279237924792579267927792879297930793179327933793479357936793779387939794079417942794379447945794679477948794979507951795279537954795579567957795879597960796179627963796479657966796779687969797079717972797379747975797679777978797979807981798279837984798579867987798879897990799179927993799479957996799779987999800080018002800380048005800680078008800980108011801280138014801580168017801880198020802180228023802480258026802780288029803080318032803380348035803680378038803980408041804280438044804580468047804880498050805180528053805480558056805780588059806080618062806380648065806680678068806980708071807280738074807580768077807880798080808180828083808480858086808780888089809080918092809380948095809680978098809981008101810281038104810581068107810881098110811181128113811481158116811781188119812081218122812381248125812681278128812981308131813281338134813581368137813881398140814181428143814481458146814781488149815081518152815381548155815681578158815981608161816281638164816581668167816881698170817181728173817481758176817781788179818081818182818381848185818681878188818981908191819281938194819581968197819881998200820182028203820482058206820782088209821082118212821382148215821682178218821982208221822282238224822582268227822882298230823182328233823482358236823782388239824082418242824382448245824682478248824982508251825282538254825582568257825882598260826182628263826482658266826782688269827082718272827382748275827682778278827982808281828282838284828582868287828882898290829182928293829482958296829782988299830083018302830383048305830683078308830983108311831283138314831583168317831883198320832183228323832483258326832783288329833083318332833383348335833683378338833983408341834283438344834583468347834883498350835183528353835483558356835783588359836083618362836383648365836683678368836983708371837283738374837583768377837883798380838183828383838483858386838783888389839083918392839383948395839683978398839984008401840284038404840584068407840884098410841184128413841484158416841784188419842084218422842384248425842684278428842984308431843284338434843584368437843884398440844184428443844484458446844784488449845084518452845384548455845684578458845984608461846284638464846584668467846884698470847184728473847484758476847784788479848084818482848384848485848684878488848984908491849284938494849584968497849884998500850185028503850485058506850785088509851085118512851385148515851685178518851985208521852285238524852585268527852885298530853185328533853485358536853785388539854085418542854385448545854685478548854985508551855285538554855585568557855885598560856185628563856485658566856785688569857085718572857385748575857685778578857985808581858285838584858585868587858885898590859185928593859485958596859785988599860086018602860386048605860686078608860986108611861286138614861586168617861886198620862186228623862486258626862786288629863086318632863386348635863686378638863986408641864286438644864586468647864886498650865186528653865486558656865786588659866086618662866386648665866686678668866986708671867286738674867586768677867886798680868186828683868486858686868786888689869086918692869386948695869686978698869987008701870287038704870587068707870887098710871187128713871487158716871787188719872087218722872387248725872687278728872987308731873287338734873587368737873887398740874187428743874487458746874787488749875087518752875387548755875687578758875987608761876287638764876587668767876887698770877187728773877487758776877787788779878087818782878387848785878687878788878987908791879287938794879587968797879887998800880188028803880488058806880788088809881088118812881388148815881688178818881988208821882288238824882588268827882888298830883188328833883488358836883788388839884088418842884388448845884688478848884988508851885288538854885588568857885888598860886188628863886488658866886788688869887088718872887388748875887688778878887988808881888288838884888588868887888888898890889188928893889488958896889788988899890089018902890389048905890689078908890989108911891289138914891589168917891889198920892189228923892489258926892789288929893089318932893389348935893689378938893989408941894289438944894589468947894889498950895189528953895489558956895789588959896089618962896389648965896689678968896989708971897289738974897589768977897889798980898189828983898489858986898789888989899089918992899389948995899689978998899990009001900290039004900590069007900890099010901190129013901490159016901790189019902090219022902390249025902690279028902990309031903290339034903590369037903890399040904190429043904490459046904790489049905090519052905390549055905690579058905990609061906290639064906590669067906890699070907190729073907490759076907790789079908090819082908390849085908690879088908990909091909290939094909590969097909890999100910191029103910491059106910791089109911091119112911391149115911691179118911991209121912291239124912591269127912891299130913191329133913491359136913791389139914091419142914391449145914691479148914991509151915291539154915591569157915891599160916191629163916491659166916791689169917091719172917391749175917691779178917991809181918291839184918591869187918891899190
  1. /* Array translation routines
  2. Copyright (C) 2002-2015 Free Software Foundation, Inc.
  3. Contributed by Paul Brook <paul@nowt.org>
  4. and Steven Bosscher <s.bosscher@student.tudelft.nl>
  5. This file is part of GCC.
  6. GCC is free software; you can redistribute it and/or modify it under
  7. the terms of the GNU General Public License as published by the Free
  8. Software Foundation; either version 3, or (at your option) any later
  9. version.
  10. GCC is distributed in the hope that it will be useful, but WITHOUT ANY
  11. WARRANTY; without even the implied warranty of MERCHANTABILITY or
  12. FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
  13. for more details.
  14. You should have received a copy of the GNU General Public License
  15. along with GCC; see the file COPYING3. If not see
  16. <http://www.gnu.org/licenses/>. */
  17. /* trans-array.c-- Various array related code, including scalarization,
  18. allocation, initialization and other support routines. */
  19. /* How the scalarizer works.
  20. In gfortran, array expressions use the same core routines as scalar
  21. expressions.
  22. First, a Scalarization State (SS) chain is built. This is done by walking
  23. the expression tree, and building a linear list of the terms in the
  24. expression. As the tree is walked, scalar subexpressions are translated.
  25. The scalarization parameters are stored in a gfc_loopinfo structure.
  26. First the start and stride of each term is calculated by
  27. gfc_conv_ss_startstride. During this process the expressions for the array
  28. descriptors and data pointers are also translated.
  29. If the expression is an assignment, we must then resolve any dependencies.
  30. In Fortran all the rhs values of an assignment must be evaluated before
  31. any assignments take place. This can require a temporary array to store the
  32. values. We also require a temporary when we are passing array expressions
  33. or vector subscripts as procedure parameters.
  34. Array sections are passed without copying to a temporary. These use the
  35. scalarizer to determine the shape of the section. The flag
  36. loop->array_parameter tells the scalarizer that the actual values and loop
  37. variables will not be required.
  38. The function gfc_conv_loop_setup generates the scalarization setup code.
  39. It determines the range of the scalarizing loop variables. If a temporary
  40. is required, this is created and initialized. Code for scalar expressions
  41. taken outside the loop is also generated at this time. Next the offset and
  42. scaling required to translate from loop variables to array indices for each
  43. term is calculated.
  44. A call to gfc_start_scalarized_body marks the start of the scalarized
  45. expression. This creates a scope and declares the loop variables. Before
  46. calling this gfc_make_ss_chain_used must be used to indicate which terms
  47. will be used inside this loop.
  48. The scalar gfc_conv_* functions are then used to build the main body of the
  49. scalarization loop. Scalarization loop variables and precalculated scalar
  50. values are automatically substituted. Note that gfc_advance_se_ss_chain
  51. must be used, rather than changing the se->ss directly.
  52. For assignment expressions requiring a temporary two sub loops are
  53. generated. The first stores the result of the expression in the temporary,
  54. the second copies it to the result. A call to
  55. gfc_trans_scalarized_loop_boundary marks the end of the main loop code and
  56. the start of the copying loop. The temporary may be less than full rank.
  57. Finally gfc_trans_scalarizing_loops is called to generate the implicit do
  58. loops. The loops are added to the pre chain of the loopinfo. The post
  59. chain may still contain cleanup code.
  60. After the loop code has been added into its parent scope gfc_cleanup_loop
  61. is called to free all the SS allocated by the scalarizer. */
  62. #include "config.h"
  63. #include "system.h"
  64. #include "coretypes.h"
  65. #include "gfortran.h"
  66. #include "hash-set.h"
  67. #include "machmode.h"
  68. #include "vec.h"
  69. #include "double-int.h"
  70. #include "input.h"
  71. #include "alias.h"
  72. #include "symtab.h"
  73. #include "options.h"
  74. #include "wide-int.h"
  75. #include "inchash.h"
  76. #include "tree.h"
  77. #include "fold-const.h"
  78. #include "gimple-expr.h"
  79. #include "diagnostic-core.h" /* For internal_error/fatal_error. */
  80. #include "flags.h"
  81. #include "constructor.h"
  82. #include "trans.h"
  83. #include "trans-stmt.h"
  84. #include "trans-types.h"
  85. #include "trans-array.h"
  86. #include "trans-const.h"
  87. #include "dependency.h"
  88. #include "wide-int.h"
  89. static bool gfc_get_array_constructor_size (mpz_t *, gfc_constructor_base);
  90. /* The contents of this structure aren't actually used, just the address. */
  91. static gfc_ss gfc_ss_terminator_var;
  92. gfc_ss * const gfc_ss_terminator = &gfc_ss_terminator_var;
  93. static tree
  94. gfc_array_dataptr_type (tree desc)
  95. {
  96. return (GFC_TYPE_ARRAY_DATAPTR_TYPE (TREE_TYPE (desc)));
  97. }
  98. /* Build expressions to access the members of an array descriptor.
  99. It's surprisingly easy to mess up here, so never access
  100. an array descriptor by "brute force", always use these
  101. functions. This also avoids problems if we change the format
  102. of an array descriptor.
  103. To understand these magic numbers, look at the comments
  104. before gfc_build_array_type() in trans-types.c.
  105. The code within these defines should be the only code which knows the format
  106. of an array descriptor.
  107. Any code just needing to read obtain the bounds of an array should use
  108. gfc_conv_array_* rather than the following functions as these will return
  109. know constant values, and work with arrays which do not have descriptors.
  110. Don't forget to #undef these! */
  111. #define DATA_FIELD 0
  112. #define OFFSET_FIELD 1
  113. #define DTYPE_FIELD 2
  114. #define DIMENSION_FIELD 3
  115. #define CAF_TOKEN_FIELD 4
  116. #define STRIDE_SUBFIELD 0
  117. #define LBOUND_SUBFIELD 1
  118. #define UBOUND_SUBFIELD 2
  119. /* This provides READ-ONLY access to the data field. The field itself
  120. doesn't have the proper type. */
  121. tree
  122. gfc_conv_descriptor_data_get (tree desc)
  123. {
  124. tree field, type, t;
  125. type = TREE_TYPE (desc);
  126. gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
  127. field = TYPE_FIELDS (type);
  128. gcc_assert (DATA_FIELD == 0);
  129. t = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field), desc,
  130. field, NULL_TREE);
  131. t = fold_convert (GFC_TYPE_ARRAY_DATAPTR_TYPE (type), t);
  132. return t;
  133. }
  134. /* This provides WRITE access to the data field.
  135. TUPLES_P is true if we are generating tuples.
  136. This function gets called through the following macros:
  137. gfc_conv_descriptor_data_set
  138. gfc_conv_descriptor_data_set. */
  139. void
  140. gfc_conv_descriptor_data_set (stmtblock_t *block, tree desc, tree value)
  141. {
  142. tree field, type, t;
  143. type = TREE_TYPE (desc);
  144. gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
  145. field = TYPE_FIELDS (type);
  146. gcc_assert (DATA_FIELD == 0);
  147. t = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field), desc,
  148. field, NULL_TREE);
  149. gfc_add_modify (block, t, fold_convert (TREE_TYPE (field), value));
  150. }
  151. /* This provides address access to the data field. This should only be
  152. used by array allocation, passing this on to the runtime. */
  153. tree
  154. gfc_conv_descriptor_data_addr (tree desc)
  155. {
  156. tree field, type, t;
  157. type = TREE_TYPE (desc);
  158. gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
  159. field = TYPE_FIELDS (type);
  160. gcc_assert (DATA_FIELD == 0);
  161. t = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field), desc,
  162. field, NULL_TREE);
  163. return gfc_build_addr_expr (NULL_TREE, t);
  164. }
  165. static tree
  166. gfc_conv_descriptor_offset (tree desc)
  167. {
  168. tree type;
  169. tree field;
  170. type = TREE_TYPE (desc);
  171. gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
  172. field = gfc_advance_chain (TYPE_FIELDS (type), OFFSET_FIELD);
  173. gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
  174. return fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
  175. desc, field, NULL_TREE);
  176. }
  177. tree
  178. gfc_conv_descriptor_offset_get (tree desc)
  179. {
  180. return gfc_conv_descriptor_offset (desc);
  181. }
  182. void
  183. gfc_conv_descriptor_offset_set (stmtblock_t *block, tree desc,
  184. tree value)
  185. {
  186. tree t = gfc_conv_descriptor_offset (desc);
  187. gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value));
  188. }
  189. tree
  190. gfc_conv_descriptor_dtype (tree desc)
  191. {
  192. tree field;
  193. tree type;
  194. type = TREE_TYPE (desc);
  195. gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
  196. field = gfc_advance_chain (TYPE_FIELDS (type), DTYPE_FIELD);
  197. gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
  198. return fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
  199. desc, field, NULL_TREE);
  200. }
  201. tree
  202. gfc_conv_descriptor_rank (tree desc)
  203. {
  204. tree tmp;
  205. tree dtype;
  206. dtype = gfc_conv_descriptor_dtype (desc);
  207. tmp = build_int_cst (TREE_TYPE (dtype), GFC_DTYPE_RANK_MASK);
  208. tmp = fold_build2_loc (input_location, BIT_AND_EXPR, TREE_TYPE (dtype),
  209. dtype, tmp);
  210. return fold_convert (gfc_get_int_type (gfc_default_integer_kind), tmp);
  211. }
  212. tree
  213. gfc_get_descriptor_dimension (tree desc)
  214. {
  215. tree type, field;
  216. type = TREE_TYPE (desc);
  217. gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
  218. field = gfc_advance_chain (TYPE_FIELDS (type), DIMENSION_FIELD);
  219. gcc_assert (field != NULL_TREE
  220. && TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
  221. && TREE_CODE (TREE_TYPE (TREE_TYPE (field))) == RECORD_TYPE);
  222. return fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
  223. desc, field, NULL_TREE);
  224. }
  225. static tree
  226. gfc_conv_descriptor_dimension (tree desc, tree dim)
  227. {
  228. tree tmp;
  229. tmp = gfc_get_descriptor_dimension (desc);
  230. return gfc_build_array_ref (tmp, dim, NULL);
  231. }
  232. tree
  233. gfc_conv_descriptor_token (tree desc)
  234. {
  235. tree type;
  236. tree field;
  237. type = TREE_TYPE (desc);
  238. gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
  239. gcc_assert (flag_coarray == GFC_FCOARRAY_LIB);
  240. field = gfc_advance_chain (TYPE_FIELDS (type), CAF_TOKEN_FIELD);
  241. /* Should be a restricted pointer - except in the finalization wrapper. */
  242. gcc_assert (field != NULL_TREE
  243. && (TREE_TYPE (field) == prvoid_type_node
  244. || TREE_TYPE (field) == pvoid_type_node));
  245. return fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
  246. desc, field, NULL_TREE);
  247. }
  248. static tree
  249. gfc_conv_descriptor_stride (tree desc, tree dim)
  250. {
  251. tree tmp;
  252. tree field;
  253. tmp = gfc_conv_descriptor_dimension (desc, dim);
  254. field = TYPE_FIELDS (TREE_TYPE (tmp));
  255. field = gfc_advance_chain (field, STRIDE_SUBFIELD);
  256. gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
  257. tmp = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
  258. tmp, field, NULL_TREE);
  259. return tmp;
  260. }
  261. tree
  262. gfc_conv_descriptor_stride_get (tree desc, tree dim)
  263. {
  264. tree type = TREE_TYPE (desc);
  265. gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
  266. if (integer_zerop (dim)
  267. && (GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ALLOCATABLE
  268. ||GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ASSUMED_SHAPE_CONT
  269. ||GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_ASSUMED_RANK_CONT
  270. ||GFC_TYPE_ARRAY_AKIND (type) == GFC_ARRAY_POINTER_CONT))
  271. return gfc_index_one_node;
  272. return gfc_conv_descriptor_stride (desc, dim);
  273. }
  274. void
  275. gfc_conv_descriptor_stride_set (stmtblock_t *block, tree desc,
  276. tree dim, tree value)
  277. {
  278. tree t = gfc_conv_descriptor_stride (desc, dim);
  279. gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value));
  280. }
  281. static tree
  282. gfc_conv_descriptor_lbound (tree desc, tree dim)
  283. {
  284. tree tmp;
  285. tree field;
  286. tmp = gfc_conv_descriptor_dimension (desc, dim);
  287. field = TYPE_FIELDS (TREE_TYPE (tmp));
  288. field = gfc_advance_chain (field, LBOUND_SUBFIELD);
  289. gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
  290. tmp = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
  291. tmp, field, NULL_TREE);
  292. return tmp;
  293. }
  294. tree
  295. gfc_conv_descriptor_lbound_get (tree desc, tree dim)
  296. {
  297. return gfc_conv_descriptor_lbound (desc, dim);
  298. }
  299. void
  300. gfc_conv_descriptor_lbound_set (stmtblock_t *block, tree desc,
  301. tree dim, tree value)
  302. {
  303. tree t = gfc_conv_descriptor_lbound (desc, dim);
  304. gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value));
  305. }
  306. static tree
  307. gfc_conv_descriptor_ubound (tree desc, tree dim)
  308. {
  309. tree tmp;
  310. tree field;
  311. tmp = gfc_conv_descriptor_dimension (desc, dim);
  312. field = TYPE_FIELDS (TREE_TYPE (tmp));
  313. field = gfc_advance_chain (field, UBOUND_SUBFIELD);
  314. gcc_assert (field != NULL_TREE && TREE_TYPE (field) == gfc_array_index_type);
  315. tmp = fold_build3_loc (input_location, COMPONENT_REF, TREE_TYPE (field),
  316. tmp, field, NULL_TREE);
  317. return tmp;
  318. }
  319. tree
  320. gfc_conv_descriptor_ubound_get (tree desc, tree dim)
  321. {
  322. return gfc_conv_descriptor_ubound (desc, dim);
  323. }
  324. void
  325. gfc_conv_descriptor_ubound_set (stmtblock_t *block, tree desc,
  326. tree dim, tree value)
  327. {
  328. tree t = gfc_conv_descriptor_ubound (desc, dim);
  329. gfc_add_modify (block, t, fold_convert (TREE_TYPE (t), value));
  330. }
  331. /* Build a null array descriptor constructor. */
  332. tree
  333. gfc_build_null_descriptor (tree type)
  334. {
  335. tree field;
  336. tree tmp;
  337. gcc_assert (GFC_DESCRIPTOR_TYPE_P (type));
  338. gcc_assert (DATA_FIELD == 0);
  339. field = TYPE_FIELDS (type);
  340. /* Set a NULL data pointer. */
  341. tmp = build_constructor_single (type, field, null_pointer_node);
  342. TREE_CONSTANT (tmp) = 1;
  343. /* All other fields are ignored. */
  344. return tmp;
  345. }
  346. /* Modify a descriptor such that the lbound of a given dimension is the value
  347. specified. This also updates ubound and offset accordingly. */
  348. void
  349. gfc_conv_shift_descriptor_lbound (stmtblock_t* block, tree desc,
  350. int dim, tree new_lbound)
  351. {
  352. tree offs, ubound, lbound, stride;
  353. tree diff, offs_diff;
  354. new_lbound = fold_convert (gfc_array_index_type, new_lbound);
  355. offs = gfc_conv_descriptor_offset_get (desc);
  356. lbound = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[dim]);
  357. ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[dim]);
  358. stride = gfc_conv_descriptor_stride_get (desc, gfc_rank_cst[dim]);
  359. /* Get difference (new - old) by which to shift stuff. */
  360. diff = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
  361. new_lbound, lbound);
  362. /* Shift ubound and offset accordingly. This has to be done before
  363. updating the lbound, as they depend on the lbound expression! */
  364. ubound = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
  365. ubound, diff);
  366. gfc_conv_descriptor_ubound_set (block, desc, gfc_rank_cst[dim], ubound);
  367. offs_diff = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
  368. diff, stride);
  369. offs = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
  370. offs, offs_diff);
  371. gfc_conv_descriptor_offset_set (block, desc, offs);
  372. /* Finally set lbound to value we want. */
  373. gfc_conv_descriptor_lbound_set (block, desc, gfc_rank_cst[dim], new_lbound);
  374. }
  375. /* Cleanup those #defines. */
  376. #undef DATA_FIELD
  377. #undef OFFSET_FIELD
  378. #undef DTYPE_FIELD
  379. #undef DIMENSION_FIELD
  380. #undef CAF_TOKEN_FIELD
  381. #undef STRIDE_SUBFIELD
  382. #undef LBOUND_SUBFIELD
  383. #undef UBOUND_SUBFIELD
  384. /* Mark a SS chain as used. Flags specifies in which loops the SS is used.
  385. flags & 1 = Main loop body.
  386. flags & 2 = temp copy loop. */
  387. void
  388. gfc_mark_ss_chain_used (gfc_ss * ss, unsigned flags)
  389. {
  390. for (; ss != gfc_ss_terminator; ss = ss->next)
  391. ss->info->useflags = flags;
  392. }
  393. /* Free a gfc_ss chain. */
  394. void
  395. gfc_free_ss_chain (gfc_ss * ss)
  396. {
  397. gfc_ss *next;
  398. while (ss != gfc_ss_terminator)
  399. {
  400. gcc_assert (ss != NULL);
  401. next = ss->next;
  402. gfc_free_ss (ss);
  403. ss = next;
  404. }
  405. }
  406. static void
  407. free_ss_info (gfc_ss_info *ss_info)
  408. {
  409. int n;
  410. ss_info->refcount--;
  411. if (ss_info->refcount > 0)
  412. return;
  413. gcc_assert (ss_info->refcount == 0);
  414. switch (ss_info->type)
  415. {
  416. case GFC_SS_SECTION:
  417. for (n = 0; n < GFC_MAX_DIMENSIONS; n++)
  418. if (ss_info->data.array.subscript[n])
  419. gfc_free_ss_chain (ss_info->data.array.subscript[n]);
  420. break;
  421. default:
  422. break;
  423. }
  424. free (ss_info);
  425. }
  426. /* Free a SS. */
  427. void
  428. gfc_free_ss (gfc_ss * ss)
  429. {
  430. free_ss_info (ss->info);
  431. free (ss);
  432. }
  433. /* Creates and initializes an array type gfc_ss struct. */
  434. gfc_ss *
  435. gfc_get_array_ss (gfc_ss *next, gfc_expr *expr, int dimen, gfc_ss_type type)
  436. {
  437. gfc_ss *ss;
  438. gfc_ss_info *ss_info;
  439. int i;
  440. ss_info = gfc_get_ss_info ();
  441. ss_info->refcount++;
  442. ss_info->type = type;
  443. ss_info->expr = expr;
  444. ss = gfc_get_ss ();
  445. ss->info = ss_info;
  446. ss->next = next;
  447. ss->dimen = dimen;
  448. for (i = 0; i < ss->dimen; i++)
  449. ss->dim[i] = i;
  450. return ss;
  451. }
  452. /* Creates and initializes a temporary type gfc_ss struct. */
  453. gfc_ss *
  454. gfc_get_temp_ss (tree type, tree string_length, int dimen)
  455. {
  456. gfc_ss *ss;
  457. gfc_ss_info *ss_info;
  458. int i;
  459. ss_info = gfc_get_ss_info ();
  460. ss_info->refcount++;
  461. ss_info->type = GFC_SS_TEMP;
  462. ss_info->string_length = string_length;
  463. ss_info->data.temp.type = type;
  464. ss = gfc_get_ss ();
  465. ss->info = ss_info;
  466. ss->next = gfc_ss_terminator;
  467. ss->dimen = dimen;
  468. for (i = 0; i < ss->dimen; i++)
  469. ss->dim[i] = i;
  470. return ss;
  471. }
  472. /* Creates and initializes a scalar type gfc_ss struct. */
  473. gfc_ss *
  474. gfc_get_scalar_ss (gfc_ss *next, gfc_expr *expr)
  475. {
  476. gfc_ss *ss;
  477. gfc_ss_info *ss_info;
  478. ss_info = gfc_get_ss_info ();
  479. ss_info->refcount++;
  480. ss_info->type = GFC_SS_SCALAR;
  481. ss_info->expr = expr;
  482. ss = gfc_get_ss ();
  483. ss->info = ss_info;
  484. ss->next = next;
  485. return ss;
  486. }
  487. /* Free all the SS associated with a loop. */
  488. void
  489. gfc_cleanup_loop (gfc_loopinfo * loop)
  490. {
  491. gfc_loopinfo *loop_next, **ploop;
  492. gfc_ss *ss;
  493. gfc_ss *next;
  494. ss = loop->ss;
  495. while (ss != gfc_ss_terminator)
  496. {
  497. gcc_assert (ss != NULL);
  498. next = ss->loop_chain;
  499. gfc_free_ss (ss);
  500. ss = next;
  501. }
  502. /* Remove reference to self in the parent loop. */
  503. if (loop->parent)
  504. for (ploop = &loop->parent->nested; *ploop; ploop = &(*ploop)->next)
  505. if (*ploop == loop)
  506. {
  507. *ploop = loop->next;
  508. break;
  509. }
  510. /* Free non-freed nested loops. */
  511. for (loop = loop->nested; loop; loop = loop_next)
  512. {
  513. loop_next = loop->next;
  514. gfc_cleanup_loop (loop);
  515. free (loop);
  516. }
  517. }
  518. static void
  519. set_ss_loop (gfc_ss *ss, gfc_loopinfo *loop)
  520. {
  521. int n;
  522. for (; ss != gfc_ss_terminator; ss = ss->next)
  523. {
  524. ss->loop = loop;
  525. if (ss->info->type == GFC_SS_SCALAR
  526. || ss->info->type == GFC_SS_REFERENCE
  527. || ss->info->type == GFC_SS_TEMP)
  528. continue;
  529. for (n = 0; n < GFC_MAX_DIMENSIONS; n++)
  530. if (ss->info->data.array.subscript[n] != NULL)
  531. set_ss_loop (ss->info->data.array.subscript[n], loop);
  532. }
  533. }
  534. /* Associate a SS chain with a loop. */
  535. void
  536. gfc_add_ss_to_loop (gfc_loopinfo * loop, gfc_ss * head)
  537. {
  538. gfc_ss *ss;
  539. gfc_loopinfo *nested_loop;
  540. if (head == gfc_ss_terminator)
  541. return;
  542. set_ss_loop (head, loop);
  543. ss = head;
  544. for (; ss && ss != gfc_ss_terminator; ss = ss->next)
  545. {
  546. if (ss->nested_ss)
  547. {
  548. nested_loop = ss->nested_ss->loop;
  549. /* More than one ss can belong to the same loop. Hence, we add the
  550. loop to the chain only if it is different from the previously
  551. added one, to avoid duplicate nested loops. */
  552. if (nested_loop != loop->nested)
  553. {
  554. gcc_assert (nested_loop->parent == NULL);
  555. nested_loop->parent = loop;
  556. gcc_assert (nested_loop->next == NULL);
  557. nested_loop->next = loop->nested;
  558. loop->nested = nested_loop;
  559. }
  560. else
  561. gcc_assert (nested_loop->parent == loop);
  562. }
  563. if (ss->next == gfc_ss_terminator)
  564. ss->loop_chain = loop->ss;
  565. else
  566. ss->loop_chain = ss->next;
  567. }
  568. gcc_assert (ss == gfc_ss_terminator);
  569. loop->ss = head;
  570. }
  571. /* Generate an initializer for a static pointer or allocatable array. */
  572. void
  573. gfc_trans_static_array_pointer (gfc_symbol * sym)
  574. {
  575. tree type;
  576. gcc_assert (TREE_STATIC (sym->backend_decl));
  577. /* Just zero the data member. */
  578. type = TREE_TYPE (sym->backend_decl);
  579. DECL_INITIAL (sym->backend_decl) = gfc_build_null_descriptor (type);
  580. }
  581. /* If the bounds of SE's loop have not yet been set, see if they can be
  582. determined from array spec AS, which is the array spec of a called
  583. function. MAPPING maps the callee's dummy arguments to the values
  584. that the caller is passing. Add any initialization and finalization
  585. code to SE. */
  586. void
  587. gfc_set_loop_bounds_from_array_spec (gfc_interface_mapping * mapping,
  588. gfc_se * se, gfc_array_spec * as)
  589. {
  590. int n, dim, total_dim;
  591. gfc_se tmpse;
  592. gfc_ss *ss;
  593. tree lower;
  594. tree upper;
  595. tree tmp;
  596. total_dim = 0;
  597. if (!as || as->type != AS_EXPLICIT)
  598. return;
  599. for (ss = se->ss; ss; ss = ss->parent)
  600. {
  601. total_dim += ss->loop->dimen;
  602. for (n = 0; n < ss->loop->dimen; n++)
  603. {
  604. /* The bound is known, nothing to do. */
  605. if (ss->loop->to[n] != NULL_TREE)
  606. continue;
  607. dim = ss->dim[n];
  608. gcc_assert (dim < as->rank);
  609. gcc_assert (ss->loop->dimen <= as->rank);
  610. /* Evaluate the lower bound. */
  611. gfc_init_se (&tmpse, NULL);
  612. gfc_apply_interface_mapping (mapping, &tmpse, as->lower[dim]);
  613. gfc_add_block_to_block (&se->pre, &tmpse.pre);
  614. gfc_add_block_to_block (&se->post, &tmpse.post);
  615. lower = fold_convert (gfc_array_index_type, tmpse.expr);
  616. /* ...and the upper bound. */
  617. gfc_init_se (&tmpse, NULL);
  618. gfc_apply_interface_mapping (mapping, &tmpse, as->upper[dim]);
  619. gfc_add_block_to_block (&se->pre, &tmpse.pre);
  620. gfc_add_block_to_block (&se->post, &tmpse.post);
  621. upper = fold_convert (gfc_array_index_type, tmpse.expr);
  622. /* Set the upper bound of the loop to UPPER - LOWER. */
  623. tmp = fold_build2_loc (input_location, MINUS_EXPR,
  624. gfc_array_index_type, upper, lower);
  625. tmp = gfc_evaluate_now (tmp, &se->pre);
  626. ss->loop->to[n] = tmp;
  627. }
  628. }
  629. gcc_assert (total_dim == as->rank);
  630. }
  631. /* Generate code to allocate an array temporary, or create a variable to
  632. hold the data. If size is NULL, zero the descriptor so that the
  633. callee will allocate the array. If DEALLOC is true, also generate code to
  634. free the array afterwards.
  635. If INITIAL is not NULL, it is packed using internal_pack and the result used
  636. as data instead of allocating a fresh, unitialized area of memory.
  637. Initialization code is added to PRE and finalization code to POST.
  638. DYNAMIC is true if the caller may want to extend the array later
  639. using realloc. This prevents us from putting the array on the stack. */
  640. static void
  641. gfc_trans_allocate_array_storage (stmtblock_t * pre, stmtblock_t * post,
  642. gfc_array_info * info, tree size, tree nelem,
  643. tree initial, bool dynamic, bool dealloc)
  644. {
  645. tree tmp;
  646. tree desc;
  647. bool onstack;
  648. desc = info->descriptor;
  649. info->offset = gfc_index_zero_node;
  650. if (size == NULL_TREE || integer_zerop (size))
  651. {
  652. /* A callee allocated array. */
  653. gfc_conv_descriptor_data_set (pre, desc, null_pointer_node);
  654. onstack = FALSE;
  655. }
  656. else
  657. {
  658. /* Allocate the temporary. */
  659. onstack = !dynamic && initial == NULL_TREE
  660. && (flag_stack_arrays
  661. || gfc_can_put_var_on_stack (size));
  662. if (onstack)
  663. {
  664. /* Make a temporary variable to hold the data. */
  665. tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (nelem),
  666. nelem, gfc_index_one_node);
  667. tmp = gfc_evaluate_now (tmp, pre);
  668. tmp = build_range_type (gfc_array_index_type, gfc_index_zero_node,
  669. tmp);
  670. tmp = build_array_type (gfc_get_element_type (TREE_TYPE (desc)),
  671. tmp);
  672. tmp = gfc_create_var (tmp, "A");
  673. /* If we're here only because of -fstack-arrays we have to
  674. emit a DECL_EXPR to make the gimplifier emit alloca calls. */
  675. if (!gfc_can_put_var_on_stack (size))
  676. gfc_add_expr_to_block (pre,
  677. fold_build1_loc (input_location,
  678. DECL_EXPR, TREE_TYPE (tmp),
  679. tmp));
  680. tmp = gfc_build_addr_expr (NULL_TREE, tmp);
  681. gfc_conv_descriptor_data_set (pre, desc, tmp);
  682. }
  683. else
  684. {
  685. /* Allocate memory to hold the data or call internal_pack. */
  686. if (initial == NULL_TREE)
  687. {
  688. tmp = gfc_call_malloc (pre, NULL, size);
  689. tmp = gfc_evaluate_now (tmp, pre);
  690. }
  691. else
  692. {
  693. tree packed;
  694. tree source_data;
  695. tree was_packed;
  696. stmtblock_t do_copying;
  697. tmp = TREE_TYPE (initial); /* Pointer to descriptor. */
  698. gcc_assert (TREE_CODE (tmp) == POINTER_TYPE);
  699. tmp = TREE_TYPE (tmp); /* The descriptor itself. */
  700. tmp = gfc_get_element_type (tmp);
  701. gcc_assert (tmp == gfc_get_element_type (TREE_TYPE (desc)));
  702. packed = gfc_create_var (build_pointer_type (tmp), "data");
  703. tmp = build_call_expr_loc (input_location,
  704. gfor_fndecl_in_pack, 1, initial);
  705. tmp = fold_convert (TREE_TYPE (packed), tmp);
  706. gfc_add_modify (pre, packed, tmp);
  707. tmp = build_fold_indirect_ref_loc (input_location,
  708. initial);
  709. source_data = gfc_conv_descriptor_data_get (tmp);
  710. /* internal_pack may return source->data without any allocation
  711. or copying if it is already packed. If that's the case, we
  712. need to allocate and copy manually. */
  713. gfc_start_block (&do_copying);
  714. tmp = gfc_call_malloc (&do_copying, NULL, size);
  715. tmp = fold_convert (TREE_TYPE (packed), tmp);
  716. gfc_add_modify (&do_copying, packed, tmp);
  717. tmp = gfc_build_memcpy_call (packed, source_data, size);
  718. gfc_add_expr_to_block (&do_copying, tmp);
  719. was_packed = fold_build2_loc (input_location, EQ_EXPR,
  720. boolean_type_node, packed,
  721. source_data);
  722. tmp = gfc_finish_block (&do_copying);
  723. tmp = build3_v (COND_EXPR, was_packed, tmp,
  724. build_empty_stmt (input_location));
  725. gfc_add_expr_to_block (pre, tmp);
  726. tmp = fold_convert (pvoid_type_node, packed);
  727. }
  728. gfc_conv_descriptor_data_set (pre, desc, tmp);
  729. }
  730. }
  731. info->data = gfc_conv_descriptor_data_get (desc);
  732. /* The offset is zero because we create temporaries with a zero
  733. lower bound. */
  734. gfc_conv_descriptor_offset_set (pre, desc, gfc_index_zero_node);
  735. if (dealloc && !onstack)
  736. {
  737. /* Free the temporary. */
  738. tmp = gfc_conv_descriptor_data_get (desc);
  739. tmp = gfc_call_free (fold_convert (pvoid_type_node, tmp));
  740. gfc_add_expr_to_block (post, tmp);
  741. }
  742. }
  743. /* Get the scalarizer array dimension corresponding to actual array dimension
  744. given by ARRAY_DIM.
  745. For example, if SS represents the array ref a(1,:,:,1), it is a
  746. bidimensional scalarizer array, and the result would be 0 for ARRAY_DIM=1,
  747. and 1 for ARRAY_DIM=2.
  748. If SS represents transpose(a(:,1,1,:)), it is again a bidimensional
  749. scalarizer array, and the result would be 1 for ARRAY_DIM=0 and 0 for
  750. ARRAY_DIM=3.
  751. If SS represents sum(a(:,:,:,1), dim=1), it is a 2+1-dimensional scalarizer
  752. array. If called on the inner ss, the result would be respectively 0,1,2 for
  753. ARRAY_DIM=0,1,2. If called on the outer ss, the result would be 0,1
  754. for ARRAY_DIM=1,2. */
  755. static int
  756. get_scalarizer_dim_for_array_dim (gfc_ss *ss, int array_dim)
  757. {
  758. int array_ref_dim;
  759. int n;
  760. array_ref_dim = 0;
  761. for (; ss; ss = ss->parent)
  762. for (n = 0; n < ss->dimen; n++)
  763. if (ss->dim[n] < array_dim)
  764. array_ref_dim++;
  765. return array_ref_dim;
  766. }
  767. static gfc_ss *
  768. innermost_ss (gfc_ss *ss)
  769. {
  770. while (ss->nested_ss != NULL)
  771. ss = ss->nested_ss;
  772. return ss;
  773. }
  774. /* Get the array reference dimension corresponding to the given loop dimension.
  775. It is different from the true array dimension given by the dim array in
  776. the case of a partial array reference (i.e. a(:,:,1,:) for example)
  777. It is different from the loop dimension in the case of a transposed array.
  778. */
  779. static int
  780. get_array_ref_dim_for_loop_dim (gfc_ss *ss, int loop_dim)
  781. {
  782. return get_scalarizer_dim_for_array_dim (innermost_ss (ss),
  783. ss->dim[loop_dim]);
  784. }
  785. /* Generate code to create and initialize the descriptor for a temporary
  786. array. This is used for both temporaries needed by the scalarizer, and
  787. functions returning arrays. Adjusts the loop variables to be
  788. zero-based, and calculates the loop bounds for callee allocated arrays.
  789. Allocate the array unless it's callee allocated (we have a callee
  790. allocated array if 'callee_alloc' is true, or if loop->to[n] is
  791. NULL_TREE for any n). Also fills in the descriptor, data and offset
  792. fields of info if known. Returns the size of the array, or NULL for a
  793. callee allocated array.
  794. 'eltype' == NULL signals that the temporary should be a class object.
  795. The 'initial' expression is used to obtain the size of the dynamic
  796. type; otherwise the allocation and initialization proceeds as for any
  797. other expression
  798. PRE, POST, INITIAL, DYNAMIC and DEALLOC are as for
  799. gfc_trans_allocate_array_storage. */
  800. tree
  801. gfc_trans_create_temp_array (stmtblock_t * pre, stmtblock_t * post, gfc_ss * ss,
  802. tree eltype, tree initial, bool dynamic,
  803. bool dealloc, bool callee_alloc, locus * where)
  804. {
  805. gfc_loopinfo *loop;
  806. gfc_ss *s;
  807. gfc_array_info *info;
  808. tree from[GFC_MAX_DIMENSIONS], to[GFC_MAX_DIMENSIONS];
  809. tree type;
  810. tree desc;
  811. tree tmp;
  812. tree size;
  813. tree nelem;
  814. tree cond;
  815. tree or_expr;
  816. tree class_expr = NULL_TREE;
  817. int n, dim, tmp_dim;
  818. int total_dim = 0;
  819. /* This signals a class array for which we need the size of the
  820. dynamic type. Generate an eltype and then the class expression. */
  821. if (eltype == NULL_TREE && initial)
  822. {
  823. gcc_assert (POINTER_TYPE_P (TREE_TYPE (initial)));
  824. class_expr = build_fold_indirect_ref_loc (input_location, initial);
  825. eltype = TREE_TYPE (class_expr);
  826. eltype = gfc_get_element_type (eltype);
  827. /* Obtain the structure (class) expression. */
  828. class_expr = TREE_OPERAND (class_expr, 0);
  829. gcc_assert (class_expr);
  830. }
  831. memset (from, 0, sizeof (from));
  832. memset (to, 0, sizeof (to));
  833. info = &ss->info->data.array;
  834. gcc_assert (ss->dimen > 0);
  835. gcc_assert (ss->loop->dimen == ss->dimen);
  836. if (warn_array_temporaries && where)
  837. gfc_warning (OPT_Warray_temporaries,
  838. "Creating array temporary at %L", where);
  839. /* Set the lower bound to zero. */
  840. for (s = ss; s; s = s->parent)
  841. {
  842. loop = s->loop;
  843. total_dim += loop->dimen;
  844. for (n = 0; n < loop->dimen; n++)
  845. {
  846. dim = s->dim[n];
  847. /* Callee allocated arrays may not have a known bound yet. */
  848. if (loop->to[n])
  849. loop->to[n] = gfc_evaluate_now (
  850. fold_build2_loc (input_location, MINUS_EXPR,
  851. gfc_array_index_type,
  852. loop->to[n], loop->from[n]),
  853. pre);
  854. loop->from[n] = gfc_index_zero_node;
  855. /* We have just changed the loop bounds, we must clear the
  856. corresponding specloop, so that delta calculation is not skipped
  857. later in gfc_set_delta. */
  858. loop->specloop[n] = NULL;
  859. /* We are constructing the temporary's descriptor based on the loop
  860. dimensions. As the dimensions may be accessed in arbitrary order
  861. (think of transpose) the size taken from the n'th loop may not map
  862. to the n'th dimension of the array. We need to reconstruct loop
  863. infos in the right order before using it to set the descriptor
  864. bounds. */
  865. tmp_dim = get_scalarizer_dim_for_array_dim (ss, dim);
  866. from[tmp_dim] = loop->from[n];
  867. to[tmp_dim] = loop->to[n];
  868. info->delta[dim] = gfc_index_zero_node;
  869. info->start[dim] = gfc_index_zero_node;
  870. info->end[dim] = gfc_index_zero_node;
  871. info->stride[dim] = gfc_index_one_node;
  872. }
  873. }
  874. /* Initialize the descriptor. */
  875. type =
  876. gfc_get_array_type_bounds (eltype, total_dim, 0, from, to, 1,
  877. GFC_ARRAY_UNKNOWN, true);
  878. desc = gfc_create_var (type, "atmp");
  879. GFC_DECL_PACKED_ARRAY (desc) = 1;
  880. info->descriptor = desc;
  881. size = gfc_index_one_node;
  882. /* Fill in the array dtype. */
  883. tmp = gfc_conv_descriptor_dtype (desc);
  884. gfc_add_modify (pre, tmp, gfc_get_dtype (TREE_TYPE (desc)));
  885. /*
  886. Fill in the bounds and stride. This is a packed array, so:
  887. size = 1;
  888. for (n = 0; n < rank; n++)
  889. {
  890. stride[n] = size
  891. delta = ubound[n] + 1 - lbound[n];
  892. size = size * delta;
  893. }
  894. size = size * sizeof(element);
  895. */
  896. or_expr = NULL_TREE;
  897. /* If there is at least one null loop->to[n], it is a callee allocated
  898. array. */
  899. for (n = 0; n < total_dim; n++)
  900. if (to[n] == NULL_TREE)
  901. {
  902. size = NULL_TREE;
  903. break;
  904. }
  905. if (size == NULL_TREE)
  906. for (s = ss; s; s = s->parent)
  907. for (n = 0; n < s->loop->dimen; n++)
  908. {
  909. dim = get_scalarizer_dim_for_array_dim (ss, s->dim[n]);
  910. /* For a callee allocated array express the loop bounds in terms
  911. of the descriptor fields. */
  912. tmp = fold_build2_loc (input_location,
  913. MINUS_EXPR, gfc_array_index_type,
  914. gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[dim]),
  915. gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[dim]));
  916. s->loop->to[n] = tmp;
  917. }
  918. else
  919. {
  920. for (n = 0; n < total_dim; n++)
  921. {
  922. /* Store the stride and bound components in the descriptor. */
  923. gfc_conv_descriptor_stride_set (pre, desc, gfc_rank_cst[n], size);
  924. gfc_conv_descriptor_lbound_set (pre, desc, gfc_rank_cst[n],
  925. gfc_index_zero_node);
  926. gfc_conv_descriptor_ubound_set (pre, desc, gfc_rank_cst[n], to[n]);
  927. tmp = fold_build2_loc (input_location, PLUS_EXPR,
  928. gfc_array_index_type,
  929. to[n], gfc_index_one_node);
  930. /* Check whether the size for this dimension is negative. */
  931. cond = fold_build2_loc (input_location, LE_EXPR, boolean_type_node,
  932. tmp, gfc_index_zero_node);
  933. cond = gfc_evaluate_now (cond, pre);
  934. if (n == 0)
  935. or_expr = cond;
  936. else
  937. or_expr = fold_build2_loc (input_location, TRUTH_OR_EXPR,
  938. boolean_type_node, or_expr, cond);
  939. size = fold_build2_loc (input_location, MULT_EXPR,
  940. gfc_array_index_type, size, tmp);
  941. size = gfc_evaluate_now (size, pre);
  942. }
  943. }
  944. /* Get the size of the array. */
  945. if (size && !callee_alloc)
  946. {
  947. tree elemsize;
  948. /* If or_expr is true, then the extent in at least one
  949. dimension is zero and the size is set to zero. */
  950. size = fold_build3_loc (input_location, COND_EXPR, gfc_array_index_type,
  951. or_expr, gfc_index_zero_node, size);
  952. nelem = size;
  953. if (class_expr == NULL_TREE)
  954. elemsize = fold_convert (gfc_array_index_type,
  955. TYPE_SIZE_UNIT (gfc_get_element_type (type)));
  956. else
  957. elemsize = gfc_class_vtab_size_get (class_expr);
  958. size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
  959. size, elemsize);
  960. }
  961. else
  962. {
  963. nelem = size;
  964. size = NULL_TREE;
  965. }
  966. gfc_trans_allocate_array_storage (pre, post, info, size, nelem, initial,
  967. dynamic, dealloc);
  968. while (ss->parent)
  969. ss = ss->parent;
  970. if (ss->dimen > ss->loop->temp_dim)
  971. ss->loop->temp_dim = ss->dimen;
  972. return size;
  973. }
  974. /* Return the number of iterations in a loop that starts at START,
  975. ends at END, and has step STEP. */
  976. static tree
  977. gfc_get_iteration_count (tree start, tree end, tree step)
  978. {
  979. tree tmp;
  980. tree type;
  981. type = TREE_TYPE (step);
  982. tmp = fold_build2_loc (input_location, MINUS_EXPR, type, end, start);
  983. tmp = fold_build2_loc (input_location, FLOOR_DIV_EXPR, type, tmp, step);
  984. tmp = fold_build2_loc (input_location, PLUS_EXPR, type, tmp,
  985. build_int_cst (type, 1));
  986. tmp = fold_build2_loc (input_location, MAX_EXPR, type, tmp,
  987. build_int_cst (type, 0));
  988. return fold_convert (gfc_array_index_type, tmp);
  989. }
  990. /* Extend the data in array DESC by EXTRA elements. */
  991. static void
  992. gfc_grow_array (stmtblock_t * pblock, tree desc, tree extra)
  993. {
  994. tree arg0, arg1;
  995. tree tmp;
  996. tree size;
  997. tree ubound;
  998. if (integer_zerop (extra))
  999. return;
  1000. ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[0]);
  1001. /* Add EXTRA to the upper bound. */
  1002. tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
  1003. ubound, extra);
  1004. gfc_conv_descriptor_ubound_set (pblock, desc, gfc_rank_cst[0], tmp);
  1005. /* Get the value of the current data pointer. */
  1006. arg0 = gfc_conv_descriptor_data_get (desc);
  1007. /* Calculate the new array size. */
  1008. size = TYPE_SIZE_UNIT (gfc_get_element_type (TREE_TYPE (desc)));
  1009. tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
  1010. ubound, gfc_index_one_node);
  1011. arg1 = fold_build2_loc (input_location, MULT_EXPR, size_type_node,
  1012. fold_convert (size_type_node, tmp),
  1013. fold_convert (size_type_node, size));
  1014. /* Call the realloc() function. */
  1015. tmp = gfc_call_realloc (pblock, arg0, arg1);
  1016. gfc_conv_descriptor_data_set (pblock, desc, tmp);
  1017. }
  1018. /* Return true if the bounds of iterator I can only be determined
  1019. at run time. */
  1020. static inline bool
  1021. gfc_iterator_has_dynamic_bounds (gfc_iterator * i)
  1022. {
  1023. return (i->start->expr_type != EXPR_CONSTANT
  1024. || i->end->expr_type != EXPR_CONSTANT
  1025. || i->step->expr_type != EXPR_CONSTANT);
  1026. }
  1027. /* Split the size of constructor element EXPR into the sum of two terms,
  1028. one of which can be determined at compile time and one of which must
  1029. be calculated at run time. Set *SIZE to the former and return true
  1030. if the latter might be nonzero. */
  1031. static bool
  1032. gfc_get_array_constructor_element_size (mpz_t * size, gfc_expr * expr)
  1033. {
  1034. if (expr->expr_type == EXPR_ARRAY)
  1035. return gfc_get_array_constructor_size (size, expr->value.constructor);
  1036. else if (expr->rank > 0)
  1037. {
  1038. /* Calculate everything at run time. */
  1039. mpz_set_ui (*size, 0);
  1040. return true;
  1041. }
  1042. else
  1043. {
  1044. /* A single element. */
  1045. mpz_set_ui (*size, 1);
  1046. return false;
  1047. }
  1048. }
  1049. /* Like gfc_get_array_constructor_element_size, but applied to the whole
  1050. of array constructor C. */
  1051. static bool
  1052. gfc_get_array_constructor_size (mpz_t * size, gfc_constructor_base base)
  1053. {
  1054. gfc_constructor *c;
  1055. gfc_iterator *i;
  1056. mpz_t val;
  1057. mpz_t len;
  1058. bool dynamic;
  1059. mpz_set_ui (*size, 0);
  1060. mpz_init (len);
  1061. mpz_init (val);
  1062. dynamic = false;
  1063. for (c = gfc_constructor_first (base); c; c = gfc_constructor_next (c))
  1064. {
  1065. i = c->iterator;
  1066. if (i && gfc_iterator_has_dynamic_bounds (i))
  1067. dynamic = true;
  1068. else
  1069. {
  1070. dynamic |= gfc_get_array_constructor_element_size (&len, c->expr);
  1071. if (i)
  1072. {
  1073. /* Multiply the static part of the element size by the
  1074. number of iterations. */
  1075. mpz_sub (val, i->end->value.integer, i->start->value.integer);
  1076. mpz_fdiv_q (val, val, i->step->value.integer);
  1077. mpz_add_ui (val, val, 1);
  1078. if (mpz_sgn (val) > 0)
  1079. mpz_mul (len, len, val);
  1080. else
  1081. mpz_set_ui (len, 0);
  1082. }
  1083. mpz_add (*size, *size, len);
  1084. }
  1085. }
  1086. mpz_clear (len);
  1087. mpz_clear (val);
  1088. return dynamic;
  1089. }
  1090. /* Make sure offset is a variable. */
  1091. static void
  1092. gfc_put_offset_into_var (stmtblock_t * pblock, tree * poffset,
  1093. tree * offsetvar)
  1094. {
  1095. /* We should have already created the offset variable. We cannot
  1096. create it here because we may be in an inner scope. */
  1097. gcc_assert (*offsetvar != NULL_TREE);
  1098. gfc_add_modify (pblock, *offsetvar, *poffset);
  1099. *poffset = *offsetvar;
  1100. TREE_USED (*offsetvar) = 1;
  1101. }
  1102. /* Variables needed for bounds-checking. */
  1103. static bool first_len;
  1104. static tree first_len_val;
  1105. static bool typespec_chararray_ctor;
  1106. static void
  1107. gfc_trans_array_ctor_element (stmtblock_t * pblock, tree desc,
  1108. tree offset, gfc_se * se, gfc_expr * expr)
  1109. {
  1110. tree tmp;
  1111. gfc_conv_expr (se, expr);
  1112. /* Store the value. */
  1113. tmp = build_fold_indirect_ref_loc (input_location,
  1114. gfc_conv_descriptor_data_get (desc));
  1115. tmp = gfc_build_array_ref (tmp, offset, NULL);
  1116. if (expr->ts.type == BT_CHARACTER)
  1117. {
  1118. int i = gfc_validate_kind (BT_CHARACTER, expr->ts.kind, false);
  1119. tree esize;
  1120. esize = size_in_bytes (gfc_get_element_type (TREE_TYPE (desc)));
  1121. esize = fold_convert (gfc_charlen_type_node, esize);
  1122. esize = fold_build2_loc (input_location, TRUNC_DIV_EXPR,
  1123. gfc_charlen_type_node, esize,
  1124. build_int_cst (gfc_charlen_type_node,
  1125. gfc_character_kinds[i].bit_size / 8));
  1126. gfc_conv_string_parameter (se);
  1127. if (POINTER_TYPE_P (TREE_TYPE (tmp)))
  1128. {
  1129. /* The temporary is an array of pointers. */
  1130. se->expr = fold_convert (TREE_TYPE (tmp), se->expr);
  1131. gfc_add_modify (&se->pre, tmp, se->expr);
  1132. }
  1133. else
  1134. {
  1135. /* The temporary is an array of string values. */
  1136. tmp = gfc_build_addr_expr (gfc_get_pchar_type (expr->ts.kind), tmp);
  1137. /* We know the temporary and the value will be the same length,
  1138. so can use memcpy. */
  1139. gfc_trans_string_copy (&se->pre, esize, tmp, expr->ts.kind,
  1140. se->string_length, se->expr, expr->ts.kind);
  1141. }
  1142. if ((gfc_option.rtcheck & GFC_RTCHECK_BOUNDS) && !typespec_chararray_ctor)
  1143. {
  1144. if (first_len)
  1145. {
  1146. gfc_add_modify (&se->pre, first_len_val,
  1147. se->string_length);
  1148. first_len = false;
  1149. }
  1150. else
  1151. {
  1152. /* Verify that all constructor elements are of the same
  1153. length. */
  1154. tree cond = fold_build2_loc (input_location, NE_EXPR,
  1155. boolean_type_node, first_len_val,
  1156. se->string_length);
  1157. gfc_trans_runtime_check
  1158. (true, false, cond, &se->pre, &expr->where,
  1159. "Different CHARACTER lengths (%ld/%ld) in array constructor",
  1160. fold_convert (long_integer_type_node, first_len_val),
  1161. fold_convert (long_integer_type_node, se->string_length));
  1162. }
  1163. }
  1164. }
  1165. else
  1166. {
  1167. /* TODO: Should the frontend already have done this conversion? */
  1168. se->expr = fold_convert (TREE_TYPE (tmp), se->expr);
  1169. gfc_add_modify (&se->pre, tmp, se->expr);
  1170. }
  1171. gfc_add_block_to_block (pblock, &se->pre);
  1172. gfc_add_block_to_block (pblock, &se->post);
  1173. }
  1174. /* Add the contents of an array to the constructor. DYNAMIC is as for
  1175. gfc_trans_array_constructor_value. */
  1176. static void
  1177. gfc_trans_array_constructor_subarray (stmtblock_t * pblock,
  1178. tree type ATTRIBUTE_UNUSED,
  1179. tree desc, gfc_expr * expr,
  1180. tree * poffset, tree * offsetvar,
  1181. bool dynamic)
  1182. {
  1183. gfc_se se;
  1184. gfc_ss *ss;
  1185. gfc_loopinfo loop;
  1186. stmtblock_t body;
  1187. tree tmp;
  1188. tree size;
  1189. int n;
  1190. /* We need this to be a variable so we can increment it. */
  1191. gfc_put_offset_into_var (pblock, poffset, offsetvar);
  1192. gfc_init_se (&se, NULL);
  1193. /* Walk the array expression. */
  1194. ss = gfc_walk_expr (expr);
  1195. gcc_assert (ss != gfc_ss_terminator);
  1196. /* Initialize the scalarizer. */
  1197. gfc_init_loopinfo (&loop);
  1198. gfc_add_ss_to_loop (&loop, ss);
  1199. /* Initialize the loop. */
  1200. gfc_conv_ss_startstride (&loop);
  1201. gfc_conv_loop_setup (&loop, &expr->where);
  1202. /* Make sure the constructed array has room for the new data. */
  1203. if (dynamic)
  1204. {
  1205. /* Set SIZE to the total number of elements in the subarray. */
  1206. size = gfc_index_one_node;
  1207. for (n = 0; n < loop.dimen; n++)
  1208. {
  1209. tmp = gfc_get_iteration_count (loop.from[n], loop.to[n],
  1210. gfc_index_one_node);
  1211. size = fold_build2_loc (input_location, MULT_EXPR,
  1212. gfc_array_index_type, size, tmp);
  1213. }
  1214. /* Grow the constructed array by SIZE elements. */
  1215. gfc_grow_array (&loop.pre, desc, size);
  1216. }
  1217. /* Make the loop body. */
  1218. gfc_mark_ss_chain_used (ss, 1);
  1219. gfc_start_scalarized_body (&loop, &body);
  1220. gfc_copy_loopinfo_to_se (&se, &loop);
  1221. se.ss = ss;
  1222. gfc_trans_array_ctor_element (&body, desc, *poffset, &se, expr);
  1223. gcc_assert (se.ss == gfc_ss_terminator);
  1224. /* Increment the offset. */
  1225. tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
  1226. *poffset, gfc_index_one_node);
  1227. gfc_add_modify (&body, *poffset, tmp);
  1228. /* Finish the loop. */
  1229. gfc_trans_scalarizing_loops (&loop, &body);
  1230. gfc_add_block_to_block (&loop.pre, &loop.post);
  1231. tmp = gfc_finish_block (&loop.pre);
  1232. gfc_add_expr_to_block (pblock, tmp);
  1233. gfc_cleanup_loop (&loop);
  1234. }
  1235. /* Assign the values to the elements of an array constructor. DYNAMIC
  1236. is true if descriptor DESC only contains enough data for the static
  1237. size calculated by gfc_get_array_constructor_size. When true, memory
  1238. for the dynamic parts must be allocated using realloc. */
  1239. static void
  1240. gfc_trans_array_constructor_value (stmtblock_t * pblock, tree type,
  1241. tree desc, gfc_constructor_base base,
  1242. tree * poffset, tree * offsetvar,
  1243. bool dynamic)
  1244. {
  1245. tree tmp;
  1246. tree start = NULL_TREE;
  1247. tree end = NULL_TREE;
  1248. tree step = NULL_TREE;
  1249. stmtblock_t body;
  1250. gfc_se se;
  1251. mpz_t size;
  1252. gfc_constructor *c;
  1253. tree shadow_loopvar = NULL_TREE;
  1254. gfc_saved_var saved_loopvar;
  1255. mpz_init (size);
  1256. for (c = gfc_constructor_first (base); c; c = gfc_constructor_next (c))
  1257. {
  1258. /* If this is an iterator or an array, the offset must be a variable. */
  1259. if ((c->iterator || c->expr->rank > 0) && INTEGER_CST_P (*poffset))
  1260. gfc_put_offset_into_var (pblock, poffset, offsetvar);
  1261. /* Shadowing the iterator avoids changing its value and saves us from
  1262. keeping track of it. Further, it makes sure that there's always a
  1263. backend-decl for the symbol, even if there wasn't one before,
  1264. e.g. in the case of an iterator that appears in a specification
  1265. expression in an interface mapping. */
  1266. if (c->iterator)
  1267. {
  1268. gfc_symbol *sym;
  1269. tree type;
  1270. /* Evaluate loop bounds before substituting the loop variable
  1271. in case they depend on it. Such a case is invalid, but it is
  1272. not more expensive to do the right thing here.
  1273. See PR 44354. */
  1274. gfc_init_se (&se, NULL);
  1275. gfc_conv_expr_val (&se, c->iterator->start);
  1276. gfc_add_block_to_block (pblock, &se.pre);
  1277. start = gfc_evaluate_now (se.expr, pblock);
  1278. gfc_init_se (&se, NULL);
  1279. gfc_conv_expr_val (&se, c->iterator->end);
  1280. gfc_add_block_to_block (pblock, &se.pre);
  1281. end = gfc_evaluate_now (se.expr, pblock);
  1282. gfc_init_se (&se, NULL);
  1283. gfc_conv_expr_val (&se, c->iterator->step);
  1284. gfc_add_block_to_block (pblock, &se.pre);
  1285. step = gfc_evaluate_now (se.expr, pblock);
  1286. sym = c->iterator->var->symtree->n.sym;
  1287. type = gfc_typenode_for_spec (&sym->ts);
  1288. shadow_loopvar = gfc_create_var (type, "shadow_loopvar");
  1289. gfc_shadow_sym (sym, shadow_loopvar, &saved_loopvar);
  1290. }
  1291. gfc_start_block (&body);
  1292. if (c->expr->expr_type == EXPR_ARRAY)
  1293. {
  1294. /* Array constructors can be nested. */
  1295. gfc_trans_array_constructor_value (&body, type, desc,
  1296. c->expr->value.constructor,
  1297. poffset, offsetvar, dynamic);
  1298. }
  1299. else if (c->expr->rank > 0)
  1300. {
  1301. gfc_trans_array_constructor_subarray (&body, type, desc, c->expr,
  1302. poffset, offsetvar, dynamic);
  1303. }
  1304. else
  1305. {
  1306. /* This code really upsets the gimplifier so don't bother for now. */
  1307. gfc_constructor *p;
  1308. HOST_WIDE_INT n;
  1309. HOST_WIDE_INT size;
  1310. p = c;
  1311. n = 0;
  1312. while (p && !(p->iterator || p->expr->expr_type != EXPR_CONSTANT))
  1313. {
  1314. p = gfc_constructor_next (p);
  1315. n++;
  1316. }
  1317. if (n < 4)
  1318. {
  1319. /* Scalar values. */
  1320. gfc_init_se (&se, NULL);
  1321. gfc_trans_array_ctor_element (&body, desc, *poffset,
  1322. &se, c->expr);
  1323. *poffset = fold_build2_loc (input_location, PLUS_EXPR,
  1324. gfc_array_index_type,
  1325. *poffset, gfc_index_one_node);
  1326. }
  1327. else
  1328. {
  1329. /* Collect multiple scalar constants into a constructor. */
  1330. vec<constructor_elt, va_gc> *v = NULL;
  1331. tree init;
  1332. tree bound;
  1333. tree tmptype;
  1334. HOST_WIDE_INT idx = 0;
  1335. p = c;
  1336. /* Count the number of consecutive scalar constants. */
  1337. while (p && !(p->iterator
  1338. || p->expr->expr_type != EXPR_CONSTANT))
  1339. {
  1340. gfc_init_se (&se, NULL);
  1341. gfc_conv_constant (&se, p->expr);
  1342. if (c->expr->ts.type != BT_CHARACTER)
  1343. se.expr = fold_convert (type, se.expr);
  1344. /* For constant character array constructors we build
  1345. an array of pointers. */
  1346. else if (POINTER_TYPE_P (type))
  1347. se.expr = gfc_build_addr_expr
  1348. (gfc_get_pchar_type (p->expr->ts.kind),
  1349. se.expr);
  1350. CONSTRUCTOR_APPEND_ELT (v,
  1351. build_int_cst (gfc_array_index_type,
  1352. idx++),
  1353. se.expr);
  1354. c = p;
  1355. p = gfc_constructor_next (p);
  1356. }
  1357. bound = size_int (n - 1);
  1358. /* Create an array type to hold them. */
  1359. tmptype = build_range_type (gfc_array_index_type,
  1360. gfc_index_zero_node, bound);
  1361. tmptype = build_array_type (type, tmptype);
  1362. init = build_constructor (tmptype, v);
  1363. TREE_CONSTANT (init) = 1;
  1364. TREE_STATIC (init) = 1;
  1365. /* Create a static variable to hold the data. */
  1366. tmp = gfc_create_var (tmptype, "data");
  1367. TREE_STATIC (tmp) = 1;
  1368. TREE_CONSTANT (tmp) = 1;
  1369. TREE_READONLY (tmp) = 1;
  1370. DECL_INITIAL (tmp) = init;
  1371. init = tmp;
  1372. /* Use BUILTIN_MEMCPY to assign the values. */
  1373. tmp = gfc_conv_descriptor_data_get (desc);
  1374. tmp = build_fold_indirect_ref_loc (input_location,
  1375. tmp);
  1376. tmp = gfc_build_array_ref (tmp, *poffset, NULL);
  1377. tmp = gfc_build_addr_expr (NULL_TREE, tmp);
  1378. init = gfc_build_addr_expr (NULL_TREE, init);
  1379. size = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (type));
  1380. bound = build_int_cst (size_type_node, n * size);
  1381. tmp = build_call_expr_loc (input_location,
  1382. builtin_decl_explicit (BUILT_IN_MEMCPY),
  1383. 3, tmp, init, bound);
  1384. gfc_add_expr_to_block (&body, tmp);
  1385. *poffset = fold_build2_loc (input_location, PLUS_EXPR,
  1386. gfc_array_index_type, *poffset,
  1387. build_int_cst (gfc_array_index_type, n));
  1388. }
  1389. if (!INTEGER_CST_P (*poffset))
  1390. {
  1391. gfc_add_modify (&body, *offsetvar, *poffset);
  1392. *poffset = *offsetvar;
  1393. }
  1394. }
  1395. /* The frontend should already have done any expansions
  1396. at compile-time. */
  1397. if (!c->iterator)
  1398. {
  1399. /* Pass the code as is. */
  1400. tmp = gfc_finish_block (&body);
  1401. gfc_add_expr_to_block (pblock, tmp);
  1402. }
  1403. else
  1404. {
  1405. /* Build the implied do-loop. */
  1406. stmtblock_t implied_do_block;
  1407. tree cond;
  1408. tree exit_label;
  1409. tree loopbody;
  1410. tree tmp2;
  1411. loopbody = gfc_finish_block (&body);
  1412. /* Create a new block that holds the implied-do loop. A temporary
  1413. loop-variable is used. */
  1414. gfc_start_block(&implied_do_block);
  1415. /* Initialize the loop. */
  1416. gfc_add_modify (&implied_do_block, shadow_loopvar, start);
  1417. /* If this array expands dynamically, and the number of iterations
  1418. is not constant, we won't have allocated space for the static
  1419. part of C->EXPR's size. Do that now. */
  1420. if (dynamic && gfc_iterator_has_dynamic_bounds (c->iterator))
  1421. {
  1422. /* Get the number of iterations. */
  1423. tmp = gfc_get_iteration_count (shadow_loopvar, end, step);
  1424. /* Get the static part of C->EXPR's size. */
  1425. gfc_get_array_constructor_element_size (&size, c->expr);
  1426. tmp2 = gfc_conv_mpz_to_tree (size, gfc_index_integer_kind);
  1427. /* Grow the array by TMP * TMP2 elements. */
  1428. tmp = fold_build2_loc (input_location, MULT_EXPR,
  1429. gfc_array_index_type, tmp, tmp2);
  1430. gfc_grow_array (&implied_do_block, desc, tmp);
  1431. }
  1432. /* Generate the loop body. */
  1433. exit_label = gfc_build_label_decl (NULL_TREE);
  1434. gfc_start_block (&body);
  1435. /* Generate the exit condition. Depending on the sign of
  1436. the step variable we have to generate the correct
  1437. comparison. */
  1438. tmp = fold_build2_loc (input_location, GT_EXPR, boolean_type_node,
  1439. step, build_int_cst (TREE_TYPE (step), 0));
  1440. cond = fold_build3_loc (input_location, COND_EXPR,
  1441. boolean_type_node, tmp,
  1442. fold_build2_loc (input_location, GT_EXPR,
  1443. boolean_type_node, shadow_loopvar, end),
  1444. fold_build2_loc (input_location, LT_EXPR,
  1445. boolean_type_node, shadow_loopvar, end));
  1446. tmp = build1_v (GOTO_EXPR, exit_label);
  1447. TREE_USED (exit_label) = 1;
  1448. tmp = build3_v (COND_EXPR, cond, tmp,
  1449. build_empty_stmt (input_location));
  1450. gfc_add_expr_to_block (&body, tmp);
  1451. /* The main loop body. */
  1452. gfc_add_expr_to_block (&body, loopbody);
  1453. /* Increase loop variable by step. */
  1454. tmp = fold_build2_loc (input_location, PLUS_EXPR,
  1455. TREE_TYPE (shadow_loopvar), shadow_loopvar,
  1456. step);
  1457. gfc_add_modify (&body, shadow_loopvar, tmp);
  1458. /* Finish the loop. */
  1459. tmp = gfc_finish_block (&body);
  1460. tmp = build1_v (LOOP_EXPR, tmp);
  1461. gfc_add_expr_to_block (&implied_do_block, tmp);
  1462. /* Add the exit label. */
  1463. tmp = build1_v (LABEL_EXPR, exit_label);
  1464. gfc_add_expr_to_block (&implied_do_block, tmp);
  1465. /* Finish the implied-do loop. */
  1466. tmp = gfc_finish_block(&implied_do_block);
  1467. gfc_add_expr_to_block(pblock, tmp);
  1468. gfc_restore_sym (c->iterator->var->symtree->n.sym, &saved_loopvar);
  1469. }
  1470. }
  1471. mpz_clear (size);
  1472. }
  1473. /* A catch-all to obtain the string length for anything that is not
  1474. a substring of non-constant length, a constant, array or variable. */
  1475. static void
  1476. get_array_ctor_all_strlen (stmtblock_t *block, gfc_expr *e, tree *len)
  1477. {
  1478. gfc_se se;
  1479. /* Don't bother if we already know the length is a constant. */
  1480. if (*len && INTEGER_CST_P (*len))
  1481. return;
  1482. if (!e->ref && e->ts.u.cl && e->ts.u.cl->length
  1483. && e->ts.u.cl->length->expr_type == EXPR_CONSTANT)
  1484. {
  1485. /* This is easy. */
  1486. gfc_conv_const_charlen (e->ts.u.cl);
  1487. *len = e->ts.u.cl->backend_decl;
  1488. }
  1489. else
  1490. {
  1491. /* Otherwise, be brutal even if inefficient. */
  1492. gfc_init_se (&se, NULL);
  1493. /* No function call, in case of side effects. */
  1494. se.no_function_call = 1;
  1495. if (e->rank == 0)
  1496. gfc_conv_expr (&se, e);
  1497. else
  1498. gfc_conv_expr_descriptor (&se, e);
  1499. /* Fix the value. */
  1500. *len = gfc_evaluate_now (se.string_length, &se.pre);
  1501. gfc_add_block_to_block (block, &se.pre);
  1502. gfc_add_block_to_block (block, &se.post);
  1503. e->ts.u.cl->backend_decl = *len;
  1504. }
  1505. }
  1506. /* Figure out the string length of a variable reference expression.
  1507. Used by get_array_ctor_strlen. */
  1508. static void
  1509. get_array_ctor_var_strlen (stmtblock_t *block, gfc_expr * expr, tree * len)
  1510. {
  1511. gfc_ref *ref;
  1512. gfc_typespec *ts;
  1513. mpz_t char_len;
  1514. /* Don't bother if we already know the length is a constant. */
  1515. if (*len && INTEGER_CST_P (*len))
  1516. return;
  1517. ts = &expr->symtree->n.sym->ts;
  1518. for (ref = expr->ref; ref; ref = ref->next)
  1519. {
  1520. switch (ref->type)
  1521. {
  1522. case REF_ARRAY:
  1523. /* Array references don't change the string length. */
  1524. break;
  1525. case REF_COMPONENT:
  1526. /* Use the length of the component. */
  1527. ts = &ref->u.c.component->ts;
  1528. break;
  1529. case REF_SUBSTRING:
  1530. if (ref->u.ss.start->expr_type != EXPR_CONSTANT
  1531. || ref->u.ss.end->expr_type != EXPR_CONSTANT)
  1532. {
  1533. /* Note that this might evaluate expr. */
  1534. get_array_ctor_all_strlen (block, expr, len);
  1535. return;
  1536. }
  1537. mpz_init_set_ui (char_len, 1);
  1538. mpz_add (char_len, char_len, ref->u.ss.end->value.integer);
  1539. mpz_sub (char_len, char_len, ref->u.ss.start->value.integer);
  1540. *len = gfc_conv_mpz_to_tree (char_len, gfc_default_integer_kind);
  1541. *len = convert (gfc_charlen_type_node, *len);
  1542. mpz_clear (char_len);
  1543. return;
  1544. default:
  1545. gcc_unreachable ();
  1546. }
  1547. }
  1548. *len = ts->u.cl->backend_decl;
  1549. }
  1550. /* Figure out the string length of a character array constructor.
  1551. If len is NULL, don't calculate the length; this happens for recursive calls
  1552. when a sub-array-constructor is an element but not at the first position,
  1553. so when we're not interested in the length.
  1554. Returns TRUE if all elements are character constants. */
  1555. bool
  1556. get_array_ctor_strlen (stmtblock_t *block, gfc_constructor_base base, tree * len)
  1557. {
  1558. gfc_constructor *c;
  1559. bool is_const;
  1560. is_const = TRUE;
  1561. if (gfc_constructor_first (base) == NULL)
  1562. {
  1563. if (len)
  1564. *len = build_int_cstu (gfc_charlen_type_node, 0);
  1565. return is_const;
  1566. }
  1567. /* Loop over all constructor elements to find out is_const, but in len we
  1568. want to store the length of the first, not the last, element. We can
  1569. of course exit the loop as soon as is_const is found to be false. */
  1570. for (c = gfc_constructor_first (base);
  1571. c && is_const; c = gfc_constructor_next (c))
  1572. {
  1573. switch (c->expr->expr_type)
  1574. {
  1575. case EXPR_CONSTANT:
  1576. if (len && !(*len && INTEGER_CST_P (*len)))
  1577. *len = build_int_cstu (gfc_charlen_type_node,
  1578. c->expr->value.character.length);
  1579. break;
  1580. case EXPR_ARRAY:
  1581. if (!get_array_ctor_strlen (block, c->expr->value.constructor, len))
  1582. is_const = false;
  1583. break;
  1584. case EXPR_VARIABLE:
  1585. is_const = false;
  1586. if (len)
  1587. get_array_ctor_var_strlen (block, c->expr, len);
  1588. break;
  1589. default:
  1590. is_const = false;
  1591. if (len)
  1592. get_array_ctor_all_strlen (block, c->expr, len);
  1593. break;
  1594. }
  1595. /* After the first iteration, we don't want the length modified. */
  1596. len = NULL;
  1597. }
  1598. return is_const;
  1599. }
  1600. /* Check whether the array constructor C consists entirely of constant
  1601. elements, and if so returns the number of those elements, otherwise
  1602. return zero. Note, an empty or NULL array constructor returns zero. */
  1603. unsigned HOST_WIDE_INT
  1604. gfc_constant_array_constructor_p (gfc_constructor_base base)
  1605. {
  1606. unsigned HOST_WIDE_INT nelem = 0;
  1607. gfc_constructor *c = gfc_constructor_first (base);
  1608. while (c)
  1609. {
  1610. if (c->iterator
  1611. || c->expr->rank > 0
  1612. || c->expr->expr_type != EXPR_CONSTANT)
  1613. return 0;
  1614. c = gfc_constructor_next (c);
  1615. nelem++;
  1616. }
  1617. return nelem;
  1618. }
  1619. /* Given EXPR, the constant array constructor specified by an EXPR_ARRAY,
  1620. and the tree type of it's elements, TYPE, return a static constant
  1621. variable that is compile-time initialized. */
  1622. tree
  1623. gfc_build_constant_array_constructor (gfc_expr * expr, tree type)
  1624. {
  1625. tree tmptype, init, tmp;
  1626. HOST_WIDE_INT nelem;
  1627. gfc_constructor *c;
  1628. gfc_array_spec as;
  1629. gfc_se se;
  1630. int i;
  1631. vec<constructor_elt, va_gc> *v = NULL;
  1632. /* First traverse the constructor list, converting the constants
  1633. to tree to build an initializer. */
  1634. nelem = 0;
  1635. c = gfc_constructor_first (expr->value.constructor);
  1636. while (c)
  1637. {
  1638. gfc_init_se (&se, NULL);
  1639. gfc_conv_constant (&se, c->expr);
  1640. if (c->expr->ts.type != BT_CHARACTER)
  1641. se.expr = fold_convert (type, se.expr);
  1642. else if (POINTER_TYPE_P (type))
  1643. se.expr = gfc_build_addr_expr (gfc_get_pchar_type (c->expr->ts.kind),
  1644. se.expr);
  1645. CONSTRUCTOR_APPEND_ELT (v, build_int_cst (gfc_array_index_type, nelem),
  1646. se.expr);
  1647. c = gfc_constructor_next (c);
  1648. nelem++;
  1649. }
  1650. /* Next determine the tree type for the array. We use the gfortran
  1651. front-end's gfc_get_nodesc_array_type in order to create a suitable
  1652. GFC_ARRAY_TYPE_P that may be used by the scalarizer. */
  1653. memset (&as, 0, sizeof (gfc_array_spec));
  1654. as.rank = expr->rank;
  1655. as.type = AS_EXPLICIT;
  1656. if (!expr->shape)
  1657. {
  1658. as.lower[0] = gfc_get_int_expr (gfc_default_integer_kind, NULL, 0);
  1659. as.upper[0] = gfc_get_int_expr (gfc_default_integer_kind,
  1660. NULL, nelem - 1);
  1661. }
  1662. else
  1663. for (i = 0; i < expr->rank; i++)
  1664. {
  1665. int tmp = (int) mpz_get_si (expr->shape[i]);
  1666. as.lower[i] = gfc_get_int_expr (gfc_default_integer_kind, NULL, 0);
  1667. as.upper[i] = gfc_get_int_expr (gfc_default_integer_kind,
  1668. NULL, tmp - 1);
  1669. }
  1670. tmptype = gfc_get_nodesc_array_type (type, &as, PACKED_STATIC, true);
  1671. /* as is not needed anymore. */
  1672. for (i = 0; i < as.rank + as.corank; i++)
  1673. {
  1674. gfc_free_expr (as.lower[i]);
  1675. gfc_free_expr (as.upper[i]);
  1676. }
  1677. init = build_constructor (tmptype, v);
  1678. TREE_CONSTANT (init) = 1;
  1679. TREE_STATIC (init) = 1;
  1680. tmp = build_decl (input_location, VAR_DECL, create_tmp_var_name ("A"),
  1681. tmptype);
  1682. DECL_ARTIFICIAL (tmp) = 1;
  1683. DECL_IGNORED_P (tmp) = 1;
  1684. TREE_STATIC (tmp) = 1;
  1685. TREE_CONSTANT (tmp) = 1;
  1686. TREE_READONLY (tmp) = 1;
  1687. DECL_INITIAL (tmp) = init;
  1688. pushdecl (tmp);
  1689. return tmp;
  1690. }
  1691. /* Translate a constant EXPR_ARRAY array constructor for the scalarizer.
  1692. This mostly initializes the scalarizer state info structure with the
  1693. appropriate values to directly use the array created by the function
  1694. gfc_build_constant_array_constructor. */
  1695. static void
  1696. trans_constant_array_constructor (gfc_ss * ss, tree type)
  1697. {
  1698. gfc_array_info *info;
  1699. tree tmp;
  1700. int i;
  1701. tmp = gfc_build_constant_array_constructor (ss->info->expr, type);
  1702. info = &ss->info->data.array;
  1703. info->descriptor = tmp;
  1704. info->data = gfc_build_addr_expr (NULL_TREE, tmp);
  1705. info->offset = gfc_index_zero_node;
  1706. for (i = 0; i < ss->dimen; i++)
  1707. {
  1708. info->delta[i] = gfc_index_zero_node;
  1709. info->start[i] = gfc_index_zero_node;
  1710. info->end[i] = gfc_index_zero_node;
  1711. info->stride[i] = gfc_index_one_node;
  1712. }
  1713. }
  1714. static int
  1715. get_rank (gfc_loopinfo *loop)
  1716. {
  1717. int rank;
  1718. rank = 0;
  1719. for (; loop; loop = loop->parent)
  1720. rank += loop->dimen;
  1721. return rank;
  1722. }
  1723. /* Helper routine of gfc_trans_array_constructor to determine if the
  1724. bounds of the loop specified by LOOP are constant and simple enough
  1725. to use with trans_constant_array_constructor. Returns the
  1726. iteration count of the loop if suitable, and NULL_TREE otherwise. */
  1727. static tree
  1728. constant_array_constructor_loop_size (gfc_loopinfo * l)
  1729. {
  1730. gfc_loopinfo *loop;
  1731. tree size = gfc_index_one_node;
  1732. tree tmp;
  1733. int i, total_dim;
  1734. total_dim = get_rank (l);
  1735. for (loop = l; loop; loop = loop->parent)
  1736. {
  1737. for (i = 0; i < loop->dimen; i++)
  1738. {
  1739. /* If the bounds aren't constant, return NULL_TREE. */
  1740. if (!INTEGER_CST_P (loop->from[i]) || !INTEGER_CST_P (loop->to[i]))
  1741. return NULL_TREE;
  1742. if (!integer_zerop (loop->from[i]))
  1743. {
  1744. /* Only allow nonzero "from" in one-dimensional arrays. */
  1745. if (total_dim != 1)
  1746. return NULL_TREE;
  1747. tmp = fold_build2_loc (input_location, MINUS_EXPR,
  1748. gfc_array_index_type,
  1749. loop->to[i], loop->from[i]);
  1750. }
  1751. else
  1752. tmp = loop->to[i];
  1753. tmp = fold_build2_loc (input_location, PLUS_EXPR,
  1754. gfc_array_index_type, tmp, gfc_index_one_node);
  1755. size = fold_build2_loc (input_location, MULT_EXPR,
  1756. gfc_array_index_type, size, tmp);
  1757. }
  1758. }
  1759. return size;
  1760. }
  1761. static tree *
  1762. get_loop_upper_bound_for_array (gfc_ss *array, int array_dim)
  1763. {
  1764. gfc_ss *ss;
  1765. int n;
  1766. gcc_assert (array->nested_ss == NULL);
  1767. for (ss = array; ss; ss = ss->parent)
  1768. for (n = 0; n < ss->loop->dimen; n++)
  1769. if (array_dim == get_array_ref_dim_for_loop_dim (ss, n))
  1770. return &(ss->loop->to[n]);
  1771. gcc_unreachable ();
  1772. }
  1773. static gfc_loopinfo *
  1774. outermost_loop (gfc_loopinfo * loop)
  1775. {
  1776. while (loop->parent != NULL)
  1777. loop = loop->parent;
  1778. return loop;
  1779. }
  1780. /* Array constructors are handled by constructing a temporary, then using that
  1781. within the scalarization loop. This is not optimal, but seems by far the
  1782. simplest method. */
  1783. static void
  1784. trans_array_constructor (gfc_ss * ss, locus * where)
  1785. {
  1786. gfc_constructor_base c;
  1787. tree offset;
  1788. tree offsetvar;
  1789. tree desc;
  1790. tree type;
  1791. tree tmp;
  1792. tree *loop_ubound0;
  1793. bool dynamic;
  1794. bool old_first_len, old_typespec_chararray_ctor;
  1795. tree old_first_len_val;
  1796. gfc_loopinfo *loop, *outer_loop;
  1797. gfc_ss_info *ss_info;
  1798. gfc_expr *expr;
  1799. gfc_ss *s;
  1800. /* Save the old values for nested checking. */
  1801. old_first_len = first_len;
  1802. old_first_len_val = first_len_val;
  1803. old_typespec_chararray_ctor = typespec_chararray_ctor;
  1804. loop = ss->loop;
  1805. outer_loop = outermost_loop (loop);
  1806. ss_info = ss->info;
  1807. expr = ss_info->expr;
  1808. /* Do bounds-checking here and in gfc_trans_array_ctor_element only if no
  1809. typespec was given for the array constructor. */
  1810. typespec_chararray_ctor = (expr->ts.u.cl
  1811. && expr->ts.u.cl->length_from_typespec);
  1812. if ((gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
  1813. && expr->ts.type == BT_CHARACTER && !typespec_chararray_ctor)
  1814. {
  1815. first_len_val = gfc_create_var (gfc_charlen_type_node, "len");
  1816. first_len = true;
  1817. }
  1818. gcc_assert (ss->dimen == ss->loop->dimen);
  1819. c = expr->value.constructor;
  1820. if (expr->ts.type == BT_CHARACTER)
  1821. {
  1822. bool const_string;
  1823. /* get_array_ctor_strlen walks the elements of the constructor, if a
  1824. typespec was given, we already know the string length and want the one
  1825. specified there. */
  1826. if (typespec_chararray_ctor && expr->ts.u.cl->length
  1827. && expr->ts.u.cl->length->expr_type != EXPR_CONSTANT)
  1828. {
  1829. gfc_se length_se;
  1830. const_string = false;
  1831. gfc_init_se (&length_se, NULL);
  1832. gfc_conv_expr_type (&length_se, expr->ts.u.cl->length,
  1833. gfc_charlen_type_node);
  1834. ss_info->string_length = length_se.expr;
  1835. gfc_add_block_to_block (&outer_loop->pre, &length_se.pre);
  1836. gfc_add_block_to_block (&outer_loop->post, &length_se.post);
  1837. }
  1838. else
  1839. const_string = get_array_ctor_strlen (&outer_loop->pre, c,
  1840. &ss_info->string_length);
  1841. /* Complex character array constructors should have been taken care of
  1842. and not end up here. */
  1843. gcc_assert (ss_info->string_length);
  1844. expr->ts.u.cl->backend_decl = ss_info->string_length;
  1845. type = gfc_get_character_type_len (expr->ts.kind, ss_info->string_length);
  1846. if (const_string)
  1847. type = build_pointer_type (type);
  1848. }
  1849. else
  1850. type = gfc_typenode_for_spec (&expr->ts);
  1851. /* See if the constructor determines the loop bounds. */
  1852. dynamic = false;
  1853. loop_ubound0 = get_loop_upper_bound_for_array (ss, 0);
  1854. if (expr->shape && get_rank (loop) > 1 && *loop_ubound0 == NULL_TREE)
  1855. {
  1856. /* We have a multidimensional parameter. */
  1857. for (s = ss; s; s = s->parent)
  1858. {
  1859. int n;
  1860. for (n = 0; n < s->loop->dimen; n++)
  1861. {
  1862. s->loop->from[n] = gfc_index_zero_node;
  1863. s->loop->to[n] = gfc_conv_mpz_to_tree (expr->shape[s->dim[n]],
  1864. gfc_index_integer_kind);
  1865. s->loop->to[n] = fold_build2_loc (input_location, MINUS_EXPR,
  1866. gfc_array_index_type,
  1867. s->loop->to[n],
  1868. gfc_index_one_node);
  1869. }
  1870. }
  1871. }
  1872. if (*loop_ubound0 == NULL_TREE)
  1873. {
  1874. mpz_t size;
  1875. /* We should have a 1-dimensional, zero-based loop. */
  1876. gcc_assert (loop->parent == NULL && loop->nested == NULL);
  1877. gcc_assert (loop->dimen == 1);
  1878. gcc_assert (integer_zerop (loop->from[0]));
  1879. /* Split the constructor size into a static part and a dynamic part.
  1880. Allocate the static size up-front and record whether the dynamic
  1881. size might be nonzero. */
  1882. mpz_init (size);
  1883. dynamic = gfc_get_array_constructor_size (&size, c);
  1884. mpz_sub_ui (size, size, 1);
  1885. loop->to[0] = gfc_conv_mpz_to_tree (size, gfc_index_integer_kind);
  1886. mpz_clear (size);
  1887. }
  1888. /* Special case constant array constructors. */
  1889. if (!dynamic)
  1890. {
  1891. unsigned HOST_WIDE_INT nelem = gfc_constant_array_constructor_p (c);
  1892. if (nelem > 0)
  1893. {
  1894. tree size = constant_array_constructor_loop_size (loop);
  1895. if (size && compare_tree_int (size, nelem) == 0)
  1896. {
  1897. trans_constant_array_constructor (ss, type);
  1898. goto finish;
  1899. }
  1900. }
  1901. }
  1902. gfc_trans_create_temp_array (&outer_loop->pre, &outer_loop->post, ss, type,
  1903. NULL_TREE, dynamic, true, false, where);
  1904. desc = ss_info->data.array.descriptor;
  1905. offset = gfc_index_zero_node;
  1906. offsetvar = gfc_create_var_np (gfc_array_index_type, "offset");
  1907. TREE_NO_WARNING (offsetvar) = 1;
  1908. TREE_USED (offsetvar) = 0;
  1909. gfc_trans_array_constructor_value (&outer_loop->pre, type, desc, c,
  1910. &offset, &offsetvar, dynamic);
  1911. /* If the array grows dynamically, the upper bound of the loop variable
  1912. is determined by the array's final upper bound. */
  1913. if (dynamic)
  1914. {
  1915. tmp = fold_build2_loc (input_location, MINUS_EXPR,
  1916. gfc_array_index_type,
  1917. offsetvar, gfc_index_one_node);
  1918. tmp = gfc_evaluate_now (tmp, &outer_loop->pre);
  1919. gfc_conv_descriptor_ubound_set (&loop->pre, desc, gfc_rank_cst[0], tmp);
  1920. if (*loop_ubound0 && TREE_CODE (*loop_ubound0) == VAR_DECL)
  1921. gfc_add_modify (&outer_loop->pre, *loop_ubound0, tmp);
  1922. else
  1923. *loop_ubound0 = tmp;
  1924. }
  1925. if (TREE_USED (offsetvar))
  1926. pushdecl (offsetvar);
  1927. else
  1928. gcc_assert (INTEGER_CST_P (offset));
  1929. #if 0
  1930. /* Disable bound checking for now because it's probably broken. */
  1931. if (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
  1932. {
  1933. gcc_unreachable ();
  1934. }
  1935. #endif
  1936. finish:
  1937. /* Restore old values of globals. */
  1938. first_len = old_first_len;
  1939. first_len_val = old_first_len_val;
  1940. typespec_chararray_ctor = old_typespec_chararray_ctor;
  1941. }
  1942. /* INFO describes a GFC_SS_SECTION in loop LOOP, and this function is
  1943. called after evaluating all of INFO's vector dimensions. Go through
  1944. each such vector dimension and see if we can now fill in any missing
  1945. loop bounds. */
  1946. static void
  1947. set_vector_loop_bounds (gfc_ss * ss)
  1948. {
  1949. gfc_loopinfo *loop, *outer_loop;
  1950. gfc_array_info *info;
  1951. gfc_se se;
  1952. tree tmp;
  1953. tree desc;
  1954. tree zero;
  1955. int n;
  1956. int dim;
  1957. outer_loop = outermost_loop (ss->loop);
  1958. info = &ss->info->data.array;
  1959. for (; ss; ss = ss->parent)
  1960. {
  1961. loop = ss->loop;
  1962. for (n = 0; n < loop->dimen; n++)
  1963. {
  1964. dim = ss->dim[n];
  1965. if (info->ref->u.ar.dimen_type[dim] != DIMEN_VECTOR
  1966. || loop->to[n] != NULL)
  1967. continue;
  1968. /* Loop variable N indexes vector dimension DIM, and we don't
  1969. yet know the upper bound of loop variable N. Set it to the
  1970. difference between the vector's upper and lower bounds. */
  1971. gcc_assert (loop->from[n] == gfc_index_zero_node);
  1972. gcc_assert (info->subscript[dim]
  1973. && info->subscript[dim]->info->type == GFC_SS_VECTOR);
  1974. gfc_init_se (&se, NULL);
  1975. desc = info->subscript[dim]->info->data.array.descriptor;
  1976. zero = gfc_rank_cst[0];
  1977. tmp = fold_build2_loc (input_location, MINUS_EXPR,
  1978. gfc_array_index_type,
  1979. gfc_conv_descriptor_ubound_get (desc, zero),
  1980. gfc_conv_descriptor_lbound_get (desc, zero));
  1981. tmp = gfc_evaluate_now (tmp, &outer_loop->pre);
  1982. loop->to[n] = tmp;
  1983. }
  1984. }
  1985. }
  1986. /* Add the pre and post chains for all the scalar expressions in a SS chain
  1987. to loop. This is called after the loop parameters have been calculated,
  1988. but before the actual scalarizing loops. */
  1989. static void
  1990. gfc_add_loop_ss_code (gfc_loopinfo * loop, gfc_ss * ss, bool subscript,
  1991. locus * where)
  1992. {
  1993. gfc_loopinfo *nested_loop, *outer_loop;
  1994. gfc_se se;
  1995. gfc_ss_info *ss_info;
  1996. gfc_array_info *info;
  1997. gfc_expr *expr;
  1998. int n;
  1999. /* Don't evaluate the arguments for realloc_lhs_loop_for_fcn_call; otherwise,
  2000. arguments could get evaluated multiple times. */
  2001. if (ss->is_alloc_lhs)
  2002. return;
  2003. outer_loop = outermost_loop (loop);
  2004. /* TODO: This can generate bad code if there are ordering dependencies,
  2005. e.g., a callee allocated function and an unknown size constructor. */
  2006. gcc_assert (ss != NULL);
  2007. for (; ss != gfc_ss_terminator; ss = ss->loop_chain)
  2008. {
  2009. gcc_assert (ss);
  2010. /* Cross loop arrays are handled from within the most nested loop. */
  2011. if (ss->nested_ss != NULL)
  2012. continue;
  2013. ss_info = ss->info;
  2014. expr = ss_info->expr;
  2015. info = &ss_info->data.array;
  2016. switch (ss_info->type)
  2017. {
  2018. case GFC_SS_SCALAR:
  2019. /* Scalar expression. Evaluate this now. This includes elemental
  2020. dimension indices, but not array section bounds. */
  2021. gfc_init_se (&se, NULL);
  2022. gfc_conv_expr (&se, expr);
  2023. gfc_add_block_to_block (&outer_loop->pre, &se.pre);
  2024. if (expr->ts.type != BT_CHARACTER
  2025. && !gfc_is_alloc_class_scalar_function (expr))
  2026. {
  2027. /* Move the evaluation of scalar expressions outside the
  2028. scalarization loop, except for WHERE assignments. */
  2029. if (subscript)
  2030. se.expr = convert(gfc_array_index_type, se.expr);
  2031. if (!ss_info->where)
  2032. se.expr = gfc_evaluate_now (se.expr, &outer_loop->pre);
  2033. gfc_add_block_to_block (&outer_loop->pre, &se.post);
  2034. }
  2035. else
  2036. gfc_add_block_to_block (&outer_loop->post, &se.post);
  2037. ss_info->data.scalar.value = se.expr;
  2038. ss_info->string_length = se.string_length;
  2039. break;
  2040. case GFC_SS_REFERENCE:
  2041. /* Scalar argument to elemental procedure. */
  2042. gfc_init_se (&se, NULL);
  2043. if (ss_info->can_be_null_ref)
  2044. {
  2045. /* If the actual argument can be absent (in other words, it can
  2046. be a NULL reference), don't try to evaluate it; pass instead
  2047. the reference directly. */
  2048. gfc_conv_expr_reference (&se, expr);
  2049. }
  2050. else
  2051. {
  2052. /* Otherwise, evaluate the argument outside the loop and pass
  2053. a reference to the value. */
  2054. gfc_conv_expr (&se, expr);
  2055. }
  2056. /* Ensure that a pointer to the string is stored. */
  2057. if (expr->ts.type == BT_CHARACTER)
  2058. gfc_conv_string_parameter (&se);
  2059. gfc_add_block_to_block (&outer_loop->pre, &se.pre);
  2060. gfc_add_block_to_block (&outer_loop->post, &se.post);
  2061. if (gfc_is_class_scalar_expr (expr))
  2062. /* This is necessary because the dynamic type will always be
  2063. large than the declared type. In consequence, assigning
  2064. the value to a temporary could segfault.
  2065. OOP-TODO: see if this is generally correct or is the value
  2066. has to be written to an allocated temporary, whose address
  2067. is passed via ss_info. */
  2068. ss_info->data.scalar.value = se.expr;
  2069. else
  2070. ss_info->data.scalar.value = gfc_evaluate_now (se.expr,
  2071. &outer_loop->pre);
  2072. ss_info->string_length = se.string_length;
  2073. break;
  2074. case GFC_SS_SECTION:
  2075. /* Add the expressions for scalar and vector subscripts. */
  2076. for (n = 0; n < GFC_MAX_DIMENSIONS; n++)
  2077. if (info->subscript[n])
  2078. gfc_add_loop_ss_code (loop, info->subscript[n], true, where);
  2079. set_vector_loop_bounds (ss);
  2080. break;
  2081. case GFC_SS_VECTOR:
  2082. /* Get the vector's descriptor and store it in SS. */
  2083. gfc_init_se (&se, NULL);
  2084. gfc_conv_expr_descriptor (&se, expr);
  2085. gfc_add_block_to_block (&outer_loop->pre, &se.pre);
  2086. gfc_add_block_to_block (&outer_loop->post, &se.post);
  2087. info->descriptor = se.expr;
  2088. break;
  2089. case GFC_SS_INTRINSIC:
  2090. gfc_add_intrinsic_ss_code (loop, ss);
  2091. break;
  2092. case GFC_SS_FUNCTION:
  2093. /* Array function return value. We call the function and save its
  2094. result in a temporary for use inside the loop. */
  2095. gfc_init_se (&se, NULL);
  2096. se.loop = loop;
  2097. se.ss = ss;
  2098. gfc_conv_expr (&se, expr);
  2099. gfc_add_block_to_block (&outer_loop->pre, &se.pre);
  2100. gfc_add_block_to_block (&outer_loop->post, &se.post);
  2101. ss_info->string_length = se.string_length;
  2102. break;
  2103. case GFC_SS_CONSTRUCTOR:
  2104. if (expr->ts.type == BT_CHARACTER
  2105. && ss_info->string_length == NULL
  2106. && expr->ts.u.cl
  2107. && expr->ts.u.cl->length)
  2108. {
  2109. gfc_init_se (&se, NULL);
  2110. gfc_conv_expr_type (&se, expr->ts.u.cl->length,
  2111. gfc_charlen_type_node);
  2112. ss_info->string_length = se.expr;
  2113. gfc_add_block_to_block (&outer_loop->pre, &se.pre);
  2114. gfc_add_block_to_block (&outer_loop->post, &se.post);
  2115. }
  2116. trans_array_constructor (ss, where);
  2117. break;
  2118. case GFC_SS_TEMP:
  2119. case GFC_SS_COMPONENT:
  2120. /* Do nothing. These are handled elsewhere. */
  2121. break;
  2122. default:
  2123. gcc_unreachable ();
  2124. }
  2125. }
  2126. if (!subscript)
  2127. for (nested_loop = loop->nested; nested_loop;
  2128. nested_loop = nested_loop->next)
  2129. gfc_add_loop_ss_code (nested_loop, nested_loop->ss, subscript, where);
  2130. }
  2131. /* Translate expressions for the descriptor and data pointer of a SS. */
  2132. /*GCC ARRAYS*/
  2133. static void
  2134. gfc_conv_ss_descriptor (stmtblock_t * block, gfc_ss * ss, int base)
  2135. {
  2136. gfc_se se;
  2137. gfc_ss_info *ss_info;
  2138. gfc_array_info *info;
  2139. tree tmp;
  2140. ss_info = ss->info;
  2141. info = &ss_info->data.array;
  2142. /* Get the descriptor for the array to be scalarized. */
  2143. gcc_assert (ss_info->expr->expr_type == EXPR_VARIABLE);
  2144. gfc_init_se (&se, NULL);
  2145. se.descriptor_only = 1;
  2146. gfc_conv_expr_lhs (&se, ss_info->expr);
  2147. gfc_add_block_to_block (block, &se.pre);
  2148. info->descriptor = se.expr;
  2149. ss_info->string_length = se.string_length;
  2150. if (base)
  2151. {
  2152. /* Also the data pointer. */
  2153. tmp = gfc_conv_array_data (se.expr);
  2154. /* If this is a variable or address of a variable we use it directly.
  2155. Otherwise we must evaluate it now to avoid breaking dependency
  2156. analysis by pulling the expressions for elemental array indices
  2157. inside the loop. */
  2158. if (!(DECL_P (tmp)
  2159. || (TREE_CODE (tmp) == ADDR_EXPR
  2160. && DECL_P (TREE_OPERAND (tmp, 0)))))
  2161. tmp = gfc_evaluate_now (tmp, block);
  2162. info->data = tmp;
  2163. tmp = gfc_conv_array_offset (se.expr);
  2164. info->offset = gfc_evaluate_now (tmp, block);
  2165. /* Make absolutely sure that the saved_offset is indeed saved
  2166. so that the variable is still accessible after the loops
  2167. are translated. */
  2168. info->saved_offset = info->offset;
  2169. }
  2170. }
  2171. /* Initialize a gfc_loopinfo structure. */
  2172. void
  2173. gfc_init_loopinfo (gfc_loopinfo * loop)
  2174. {
  2175. int n;
  2176. memset (loop, 0, sizeof (gfc_loopinfo));
  2177. gfc_init_block (&loop->pre);
  2178. gfc_init_block (&loop->post);
  2179. /* Initially scalarize in order and default to no loop reversal. */
  2180. for (n = 0; n < GFC_MAX_DIMENSIONS; n++)
  2181. {
  2182. loop->order[n] = n;
  2183. loop->reverse[n] = GFC_INHIBIT_REVERSE;
  2184. }
  2185. loop->ss = gfc_ss_terminator;
  2186. }
  2187. /* Copies the loop variable info to a gfc_se structure. Does not copy the SS
  2188. chain. */
  2189. void
  2190. gfc_copy_loopinfo_to_se (gfc_se * se, gfc_loopinfo * loop)
  2191. {
  2192. se->loop = loop;
  2193. }
  2194. /* Return an expression for the data pointer of an array. */
  2195. tree
  2196. gfc_conv_array_data (tree descriptor)
  2197. {
  2198. tree type;
  2199. type = TREE_TYPE (descriptor);
  2200. if (GFC_ARRAY_TYPE_P (type))
  2201. {
  2202. if (TREE_CODE (type) == POINTER_TYPE)
  2203. return descriptor;
  2204. else
  2205. {
  2206. /* Descriptorless arrays. */
  2207. return gfc_build_addr_expr (NULL_TREE, descriptor);
  2208. }
  2209. }
  2210. else
  2211. return gfc_conv_descriptor_data_get (descriptor);
  2212. }
  2213. /* Return an expression for the base offset of an array. */
  2214. tree
  2215. gfc_conv_array_offset (tree descriptor)
  2216. {
  2217. tree type;
  2218. type = TREE_TYPE (descriptor);
  2219. if (GFC_ARRAY_TYPE_P (type))
  2220. return GFC_TYPE_ARRAY_OFFSET (type);
  2221. else
  2222. return gfc_conv_descriptor_offset_get (descriptor);
  2223. }
  2224. /* Get an expression for the array stride. */
  2225. tree
  2226. gfc_conv_array_stride (tree descriptor, int dim)
  2227. {
  2228. tree tmp;
  2229. tree type;
  2230. type = TREE_TYPE (descriptor);
  2231. /* For descriptorless arrays use the array size. */
  2232. tmp = GFC_TYPE_ARRAY_STRIDE (type, dim);
  2233. if (tmp != NULL_TREE)
  2234. return tmp;
  2235. tmp = gfc_conv_descriptor_stride_get (descriptor, gfc_rank_cst[dim]);
  2236. return tmp;
  2237. }
  2238. /* Like gfc_conv_array_stride, but for the lower bound. */
  2239. tree
  2240. gfc_conv_array_lbound (tree descriptor, int dim)
  2241. {
  2242. tree tmp;
  2243. tree type;
  2244. type = TREE_TYPE (descriptor);
  2245. tmp = GFC_TYPE_ARRAY_LBOUND (type, dim);
  2246. if (tmp != NULL_TREE)
  2247. return tmp;
  2248. tmp = gfc_conv_descriptor_lbound_get (descriptor, gfc_rank_cst[dim]);
  2249. return tmp;
  2250. }
  2251. /* Like gfc_conv_array_stride, but for the upper bound. */
  2252. tree
  2253. gfc_conv_array_ubound (tree descriptor, int dim)
  2254. {
  2255. tree tmp;
  2256. tree type;
  2257. type = TREE_TYPE (descriptor);
  2258. tmp = GFC_TYPE_ARRAY_UBOUND (type, dim);
  2259. if (tmp != NULL_TREE)
  2260. return tmp;
  2261. /* This should only ever happen when passing an assumed shape array
  2262. as an actual parameter. The value will never be used. */
  2263. if (GFC_ARRAY_TYPE_P (TREE_TYPE (descriptor)))
  2264. return gfc_index_zero_node;
  2265. tmp = gfc_conv_descriptor_ubound_get (descriptor, gfc_rank_cst[dim]);
  2266. return tmp;
  2267. }
  2268. /* Generate code to perform an array index bound check. */
  2269. static tree
  2270. trans_array_bound_check (gfc_se * se, gfc_ss *ss, tree index, int n,
  2271. locus * where, bool check_upper)
  2272. {
  2273. tree fault;
  2274. tree tmp_lo, tmp_up;
  2275. tree descriptor;
  2276. char *msg;
  2277. const char * name = NULL;
  2278. if (!(gfc_option.rtcheck & GFC_RTCHECK_BOUNDS))
  2279. return index;
  2280. descriptor = ss->info->data.array.descriptor;
  2281. index = gfc_evaluate_now (index, &se->pre);
  2282. /* We find a name for the error message. */
  2283. name = ss->info->expr->symtree->n.sym->name;
  2284. gcc_assert (name != NULL);
  2285. if (TREE_CODE (descriptor) == VAR_DECL)
  2286. name = IDENTIFIER_POINTER (DECL_NAME (descriptor));
  2287. /* If upper bound is present, include both bounds in the error message. */
  2288. if (check_upper)
  2289. {
  2290. tmp_lo = gfc_conv_array_lbound (descriptor, n);
  2291. tmp_up = gfc_conv_array_ubound (descriptor, n);
  2292. if (name)
  2293. msg = xasprintf ("Index '%%ld' of dimension %d of array '%s' "
  2294. "outside of expected range (%%ld:%%ld)", n+1, name);
  2295. else
  2296. msg = xasprintf ("Index '%%ld' of dimension %d "
  2297. "outside of expected range (%%ld:%%ld)", n+1);
  2298. fault = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
  2299. index, tmp_lo);
  2300. gfc_trans_runtime_check (true, false, fault, &se->pre, where, msg,
  2301. fold_convert (long_integer_type_node, index),
  2302. fold_convert (long_integer_type_node, tmp_lo),
  2303. fold_convert (long_integer_type_node, tmp_up));
  2304. fault = fold_build2_loc (input_location, GT_EXPR, boolean_type_node,
  2305. index, tmp_up);
  2306. gfc_trans_runtime_check (true, false, fault, &se->pre, where, msg,
  2307. fold_convert (long_integer_type_node, index),
  2308. fold_convert (long_integer_type_node, tmp_lo),
  2309. fold_convert (long_integer_type_node, tmp_up));
  2310. free (msg);
  2311. }
  2312. else
  2313. {
  2314. tmp_lo = gfc_conv_array_lbound (descriptor, n);
  2315. if (name)
  2316. msg = xasprintf ("Index '%%ld' of dimension %d of array '%s' "
  2317. "below lower bound of %%ld", n+1, name);
  2318. else
  2319. msg = xasprintf ("Index '%%ld' of dimension %d "
  2320. "below lower bound of %%ld", n+1);
  2321. fault = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
  2322. index, tmp_lo);
  2323. gfc_trans_runtime_check (true, false, fault, &se->pre, where, msg,
  2324. fold_convert (long_integer_type_node, index),
  2325. fold_convert (long_integer_type_node, tmp_lo));
  2326. free (msg);
  2327. }
  2328. return index;
  2329. }
  2330. /* Return the offset for an index. Performs bound checking for elemental
  2331. dimensions. Single element references are processed separately.
  2332. DIM is the array dimension, I is the loop dimension. */
  2333. static tree
  2334. conv_array_index_offset (gfc_se * se, gfc_ss * ss, int dim, int i,
  2335. gfc_array_ref * ar, tree stride)
  2336. {
  2337. gfc_array_info *info;
  2338. tree index;
  2339. tree desc;
  2340. tree data;
  2341. info = &ss->info->data.array;
  2342. /* Get the index into the array for this dimension. */
  2343. if (ar)
  2344. {
  2345. gcc_assert (ar->type != AR_ELEMENT);
  2346. switch (ar->dimen_type[dim])
  2347. {
  2348. case DIMEN_THIS_IMAGE:
  2349. gcc_unreachable ();
  2350. break;
  2351. case DIMEN_ELEMENT:
  2352. /* Elemental dimension. */
  2353. gcc_assert (info->subscript[dim]
  2354. && info->subscript[dim]->info->type == GFC_SS_SCALAR);
  2355. /* We've already translated this value outside the loop. */
  2356. index = info->subscript[dim]->info->data.scalar.value;
  2357. index = trans_array_bound_check (se, ss, index, dim, &ar->where,
  2358. ar->as->type != AS_ASSUMED_SIZE
  2359. || dim < ar->dimen - 1);
  2360. break;
  2361. case DIMEN_VECTOR:
  2362. gcc_assert (info && se->loop);
  2363. gcc_assert (info->subscript[dim]
  2364. && info->subscript[dim]->info->type == GFC_SS_VECTOR);
  2365. desc = info->subscript[dim]->info->data.array.descriptor;
  2366. /* Get a zero-based index into the vector. */
  2367. index = fold_build2_loc (input_location, MINUS_EXPR,
  2368. gfc_array_index_type,
  2369. se->loop->loopvar[i], se->loop->from[i]);
  2370. /* Multiply the index by the stride. */
  2371. index = fold_build2_loc (input_location, MULT_EXPR,
  2372. gfc_array_index_type,
  2373. index, gfc_conv_array_stride (desc, 0));
  2374. /* Read the vector to get an index into info->descriptor. */
  2375. data = build_fold_indirect_ref_loc (input_location,
  2376. gfc_conv_array_data (desc));
  2377. index = gfc_build_array_ref (data, index, NULL);
  2378. index = gfc_evaluate_now (index, &se->pre);
  2379. index = fold_convert (gfc_array_index_type, index);
  2380. /* Do any bounds checking on the final info->descriptor index. */
  2381. index = trans_array_bound_check (se, ss, index, dim, &ar->where,
  2382. ar->as->type != AS_ASSUMED_SIZE
  2383. || dim < ar->dimen - 1);
  2384. break;
  2385. case DIMEN_RANGE:
  2386. /* Scalarized dimension. */
  2387. gcc_assert (info && se->loop);
  2388. /* Multiply the loop variable by the stride and delta. */
  2389. index = se->loop->loopvar[i];
  2390. if (!integer_onep (info->stride[dim]))
  2391. index = fold_build2_loc (input_location, MULT_EXPR,
  2392. gfc_array_index_type, index,
  2393. info->stride[dim]);
  2394. if (!integer_zerop (info->delta[dim]))
  2395. index = fold_build2_loc (input_location, PLUS_EXPR,
  2396. gfc_array_index_type, index,
  2397. info->delta[dim]);
  2398. break;
  2399. default:
  2400. gcc_unreachable ();
  2401. }
  2402. }
  2403. else
  2404. {
  2405. /* Temporary array or derived type component. */
  2406. gcc_assert (se->loop);
  2407. index = se->loop->loopvar[se->loop->order[i]];
  2408. /* Pointer functions can have stride[0] different from unity.
  2409. Use the stride returned by the function call and stored in
  2410. the descriptor for the temporary. */
  2411. if (se->ss && se->ss->info->type == GFC_SS_FUNCTION
  2412. && se->ss->info->expr
  2413. && se->ss->info->expr->symtree
  2414. && se->ss->info->expr->symtree->n.sym->result
  2415. && se->ss->info->expr->symtree->n.sym->result->attr.pointer)
  2416. stride = gfc_conv_descriptor_stride_get (info->descriptor,
  2417. gfc_rank_cst[dim]);
  2418. if (info->delta[dim] && !integer_zerop (info->delta[dim]))
  2419. index = fold_build2_loc (input_location, PLUS_EXPR,
  2420. gfc_array_index_type, index, info->delta[dim]);
  2421. }
  2422. /* Multiply by the stride. */
  2423. if (!integer_onep (stride))
  2424. index = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
  2425. index, stride);
  2426. return index;
  2427. }
  2428. /* Build a scalarized array reference using the vptr 'size'. */
  2429. static bool
  2430. build_class_array_ref (gfc_se *se, tree base, tree index)
  2431. {
  2432. tree type;
  2433. tree size;
  2434. tree offset;
  2435. tree decl;
  2436. tree tmp;
  2437. gfc_expr *expr = se->ss->info->expr;
  2438. gfc_ref *ref;
  2439. gfc_ref *class_ref;
  2440. gfc_typespec *ts;
  2441. if (expr == NULL
  2442. || (expr->ts.type != BT_CLASS
  2443. && !gfc_is_alloc_class_array_function (expr)))
  2444. return false;
  2445. if (expr->symtree && expr->symtree->n.sym->ts.type == BT_CLASS)
  2446. ts = &expr->symtree->n.sym->ts;
  2447. else
  2448. ts = NULL;
  2449. class_ref = NULL;
  2450. for (ref = expr->ref; ref; ref = ref->next)
  2451. {
  2452. if (ref->type == REF_COMPONENT
  2453. && ref->u.c.component->ts.type == BT_CLASS
  2454. && ref->next && ref->next->type == REF_COMPONENT
  2455. && strcmp (ref->next->u.c.component->name, "_data") == 0
  2456. && ref->next->next
  2457. && ref->next->next->type == REF_ARRAY
  2458. && ref->next->next->u.ar.type != AR_ELEMENT)
  2459. {
  2460. ts = &ref->u.c.component->ts;
  2461. class_ref = ref;
  2462. break;
  2463. }
  2464. }
  2465. if (ts == NULL)
  2466. return false;
  2467. if (class_ref == NULL && expr->symtree->n.sym->attr.function
  2468. && expr->symtree->n.sym == expr->symtree->n.sym->result)
  2469. {
  2470. gcc_assert (expr->symtree->n.sym->backend_decl == current_function_decl);
  2471. decl = gfc_get_fake_result_decl (expr->symtree->n.sym, 0);
  2472. }
  2473. else if (gfc_is_alloc_class_array_function (expr))
  2474. {
  2475. size = NULL_TREE;
  2476. decl = NULL_TREE;
  2477. for (tmp = base; tmp; tmp = TREE_OPERAND (tmp, 0))
  2478. {
  2479. tree type;
  2480. type = TREE_TYPE (tmp);
  2481. while (type)
  2482. {
  2483. if (GFC_CLASS_TYPE_P (type))
  2484. decl = tmp;
  2485. if (type != TYPE_CANONICAL (type))
  2486. type = TYPE_CANONICAL (type);
  2487. else
  2488. type = NULL_TREE;
  2489. }
  2490. if (TREE_CODE (tmp) == VAR_DECL)
  2491. break;
  2492. }
  2493. if (decl == NULL_TREE)
  2494. return false;
  2495. }
  2496. else if (class_ref == NULL)
  2497. decl = expr->symtree->n.sym->backend_decl;
  2498. else
  2499. {
  2500. /* Remove everything after the last class reference, convert the
  2501. expression and then recover its tailend once more. */
  2502. gfc_se tmpse;
  2503. ref = class_ref->next;
  2504. class_ref->next = NULL;
  2505. gfc_init_se (&tmpse, NULL);
  2506. gfc_conv_expr (&tmpse, expr);
  2507. decl = tmpse.expr;
  2508. class_ref->next = ref;
  2509. }
  2510. if (POINTER_TYPE_P (TREE_TYPE (decl)))
  2511. decl = build_fold_indirect_ref_loc (input_location, decl);
  2512. if (!GFC_CLASS_TYPE_P (TREE_TYPE (decl)))
  2513. return false;
  2514. size = gfc_class_vtab_size_get (decl);
  2515. /* Build the address of the element. */
  2516. type = TREE_TYPE (TREE_TYPE (base));
  2517. size = fold_convert (TREE_TYPE (index), size);
  2518. offset = fold_build2_loc (input_location, MULT_EXPR,
  2519. gfc_array_index_type,
  2520. index, size);
  2521. tmp = gfc_build_addr_expr (pvoid_type_node, base);
  2522. tmp = fold_build_pointer_plus_loc (input_location, tmp, offset);
  2523. tmp = fold_convert (build_pointer_type (type), tmp);
  2524. /* Return the element in the se expression. */
  2525. se->expr = build_fold_indirect_ref_loc (input_location, tmp);
  2526. return true;
  2527. }
  2528. /* Build a scalarized reference to an array. */
  2529. static void
  2530. gfc_conv_scalarized_array_ref (gfc_se * se, gfc_array_ref * ar)
  2531. {
  2532. gfc_array_info *info;
  2533. tree decl = NULL_TREE;
  2534. tree index;
  2535. tree tmp;
  2536. gfc_ss *ss;
  2537. gfc_expr *expr;
  2538. int n;
  2539. ss = se->ss;
  2540. expr = ss->info->expr;
  2541. info = &ss->info->data.array;
  2542. if (ar)
  2543. n = se->loop->order[0];
  2544. else
  2545. n = 0;
  2546. index = conv_array_index_offset (se, ss, ss->dim[n], n, ar, info->stride0);
  2547. /* Add the offset for this dimension to the stored offset for all other
  2548. dimensions. */
  2549. if (info->offset && !integer_zerop (info->offset))
  2550. index = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
  2551. index, info->offset);
  2552. if (expr && is_subref_array (expr))
  2553. decl = expr->symtree->n.sym->backend_decl;
  2554. tmp = build_fold_indirect_ref_loc (input_location, info->data);
  2555. /* Use the vptr 'size' field to access a class the element of a class
  2556. array. */
  2557. if (build_class_array_ref (se, tmp, index))
  2558. return;
  2559. se->expr = gfc_build_array_ref (tmp, index, decl);
  2560. }
  2561. /* Translate access of temporary array. */
  2562. void
  2563. gfc_conv_tmp_array_ref (gfc_se * se)
  2564. {
  2565. se->string_length = se->ss->info->string_length;
  2566. gfc_conv_scalarized_array_ref (se, NULL);
  2567. gfc_advance_se_ss_chain (se);
  2568. }
  2569. /* Add T to the offset pair *OFFSET, *CST_OFFSET. */
  2570. static void
  2571. add_to_offset (tree *cst_offset, tree *offset, tree t)
  2572. {
  2573. if (TREE_CODE (t) == INTEGER_CST)
  2574. *cst_offset = int_const_binop (PLUS_EXPR, *cst_offset, t);
  2575. else
  2576. {
  2577. if (!integer_zerop (*offset))
  2578. *offset = fold_build2_loc (input_location, PLUS_EXPR,
  2579. gfc_array_index_type, *offset, t);
  2580. else
  2581. *offset = t;
  2582. }
  2583. }
  2584. static tree
  2585. build_array_ref (tree desc, tree offset, tree decl)
  2586. {
  2587. tree tmp;
  2588. tree type;
  2589. /* Class container types do not always have the GFC_CLASS_TYPE_P
  2590. but the canonical type does. */
  2591. if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (desc))
  2592. && TREE_CODE (desc) == COMPONENT_REF)
  2593. {
  2594. type = TREE_TYPE (TREE_OPERAND (desc, 0));
  2595. if (TYPE_CANONICAL (type)
  2596. && GFC_CLASS_TYPE_P (TYPE_CANONICAL (type)))
  2597. type = TYPE_CANONICAL (type);
  2598. }
  2599. else
  2600. type = NULL;
  2601. /* Class array references need special treatment because the assigned
  2602. type size needs to be used to point to the element. */
  2603. if (type && GFC_CLASS_TYPE_P (type))
  2604. {
  2605. type = gfc_get_element_type (TREE_TYPE (desc));
  2606. tmp = TREE_OPERAND (desc, 0);
  2607. tmp = gfc_get_class_array_ref (offset, tmp);
  2608. tmp = fold_convert (build_pointer_type (type), tmp);
  2609. tmp = build_fold_indirect_ref_loc (input_location, tmp);
  2610. return tmp;
  2611. }
  2612. tmp = gfc_conv_array_data (desc);
  2613. tmp = build_fold_indirect_ref_loc (input_location, tmp);
  2614. tmp = gfc_build_array_ref (tmp, offset, decl);
  2615. return tmp;
  2616. }
  2617. /* Build an array reference. se->expr already holds the array descriptor.
  2618. This should be either a variable, indirect variable reference or component
  2619. reference. For arrays which do not have a descriptor, se->expr will be
  2620. the data pointer.
  2621. a(i, j, k) = base[offset + i * stride[0] + j * stride[1] + k * stride[2]]*/
  2622. void
  2623. gfc_conv_array_ref (gfc_se * se, gfc_array_ref * ar, gfc_expr *expr,
  2624. locus * where)
  2625. {
  2626. int n;
  2627. tree offset, cst_offset;
  2628. tree tmp;
  2629. tree stride;
  2630. gfc_se indexse;
  2631. gfc_se tmpse;
  2632. gfc_symbol * sym = expr->symtree->n.sym;
  2633. char *var_name = NULL;
  2634. if (ar->dimen == 0)
  2635. {
  2636. gcc_assert (ar->codimen);
  2637. if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (se->expr)))
  2638. se->expr = build_fold_indirect_ref (gfc_conv_array_data (se->expr));
  2639. else
  2640. {
  2641. if (GFC_ARRAY_TYPE_P (TREE_TYPE (se->expr))
  2642. && TREE_CODE (TREE_TYPE (se->expr)) == POINTER_TYPE)
  2643. se->expr = build_fold_indirect_ref_loc (input_location, se->expr);
  2644. /* Use the actual tree type and not the wrapped coarray. */
  2645. if (!se->want_pointer)
  2646. se->expr = fold_convert (TYPE_MAIN_VARIANT (TREE_TYPE (se->expr)),
  2647. se->expr);
  2648. }
  2649. return;
  2650. }
  2651. /* Handle scalarized references separately. */
  2652. if (ar->type != AR_ELEMENT)
  2653. {
  2654. gfc_conv_scalarized_array_ref (se, ar);
  2655. gfc_advance_se_ss_chain (se);
  2656. return;
  2657. }
  2658. if (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
  2659. {
  2660. size_t len;
  2661. gfc_ref *ref;
  2662. len = strlen (sym->name) + 1;
  2663. for (ref = expr->ref; ref; ref = ref->next)
  2664. {
  2665. if (ref->type == REF_ARRAY && &ref->u.ar == ar)
  2666. break;
  2667. if (ref->type == REF_COMPONENT)
  2668. len += 1 + strlen (ref->u.c.component->name);
  2669. }
  2670. var_name = XALLOCAVEC (char, len);
  2671. strcpy (var_name, sym->name);
  2672. for (ref = expr->ref; ref; ref = ref->next)
  2673. {
  2674. if (ref->type == REF_ARRAY && &ref->u.ar == ar)
  2675. break;
  2676. if (ref->type == REF_COMPONENT)
  2677. {
  2678. strcat (var_name, "%%");
  2679. strcat (var_name, ref->u.c.component->name);
  2680. }
  2681. }
  2682. }
  2683. cst_offset = offset = gfc_index_zero_node;
  2684. add_to_offset (&cst_offset, &offset, gfc_conv_array_offset (se->expr));
  2685. /* Calculate the offsets from all the dimensions. Make sure to associate
  2686. the final offset so that we form a chain of loop invariant summands. */
  2687. for (n = ar->dimen - 1; n >= 0; n--)
  2688. {
  2689. /* Calculate the index for this dimension. */
  2690. gfc_init_se (&indexse, se);
  2691. gfc_conv_expr_type (&indexse, ar->start[n], gfc_array_index_type);
  2692. gfc_add_block_to_block (&se->pre, &indexse.pre);
  2693. if (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
  2694. {
  2695. /* Check array bounds. */
  2696. tree cond;
  2697. char *msg;
  2698. /* Evaluate the indexse.expr only once. */
  2699. indexse.expr = save_expr (indexse.expr);
  2700. /* Lower bound. */
  2701. tmp = gfc_conv_array_lbound (se->expr, n);
  2702. if (sym->attr.temporary)
  2703. {
  2704. gfc_init_se (&tmpse, se);
  2705. gfc_conv_expr_type (&tmpse, ar->as->lower[n],
  2706. gfc_array_index_type);
  2707. gfc_add_block_to_block (&se->pre, &tmpse.pre);
  2708. tmp = tmpse.expr;
  2709. }
  2710. cond = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
  2711. indexse.expr, tmp);
  2712. msg = xasprintf ("Index '%%ld' of dimension %d of array '%s' "
  2713. "below lower bound of %%ld", n+1, var_name);
  2714. gfc_trans_runtime_check (true, false, cond, &se->pre, where, msg,
  2715. fold_convert (long_integer_type_node,
  2716. indexse.expr),
  2717. fold_convert (long_integer_type_node, tmp));
  2718. free (msg);
  2719. /* Upper bound, but not for the last dimension of assumed-size
  2720. arrays. */
  2721. if (n < ar->dimen - 1 || ar->as->type != AS_ASSUMED_SIZE)
  2722. {
  2723. tmp = gfc_conv_array_ubound (se->expr, n);
  2724. if (sym->attr.temporary)
  2725. {
  2726. gfc_init_se (&tmpse, se);
  2727. gfc_conv_expr_type (&tmpse, ar->as->upper[n],
  2728. gfc_array_index_type);
  2729. gfc_add_block_to_block (&se->pre, &tmpse.pre);
  2730. tmp = tmpse.expr;
  2731. }
  2732. cond = fold_build2_loc (input_location, GT_EXPR,
  2733. boolean_type_node, indexse.expr, tmp);
  2734. msg = xasprintf ("Index '%%ld' of dimension %d of array '%s' "
  2735. "above upper bound of %%ld", n+1, var_name);
  2736. gfc_trans_runtime_check (true, false, cond, &se->pre, where, msg,
  2737. fold_convert (long_integer_type_node,
  2738. indexse.expr),
  2739. fold_convert (long_integer_type_node, tmp));
  2740. free (msg);
  2741. }
  2742. }
  2743. /* Multiply the index by the stride. */
  2744. stride = gfc_conv_array_stride (se->expr, n);
  2745. tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
  2746. indexse.expr, stride);
  2747. /* And add it to the total. */
  2748. add_to_offset (&cst_offset, &offset, tmp);
  2749. }
  2750. if (!integer_zerop (cst_offset))
  2751. offset = fold_build2_loc (input_location, PLUS_EXPR,
  2752. gfc_array_index_type, offset, cst_offset);
  2753. se->expr = build_array_ref (se->expr, offset, sym->backend_decl);
  2754. }
  2755. /* Add the offset corresponding to array's ARRAY_DIM dimension and loop's
  2756. LOOP_DIM dimension (if any) to array's offset. */
  2757. static void
  2758. add_array_offset (stmtblock_t *pblock, gfc_loopinfo *loop, gfc_ss *ss,
  2759. gfc_array_ref *ar, int array_dim, int loop_dim)
  2760. {
  2761. gfc_se se;
  2762. gfc_array_info *info;
  2763. tree stride, index;
  2764. info = &ss->info->data.array;
  2765. gfc_init_se (&se, NULL);
  2766. se.loop = loop;
  2767. se.expr = info->descriptor;
  2768. stride = gfc_conv_array_stride (info->descriptor, array_dim);
  2769. index = conv_array_index_offset (&se, ss, array_dim, loop_dim, ar, stride);
  2770. gfc_add_block_to_block (pblock, &se.pre);
  2771. info->offset = fold_build2_loc (input_location, PLUS_EXPR,
  2772. gfc_array_index_type,
  2773. info->offset, index);
  2774. info->offset = gfc_evaluate_now (info->offset, pblock);
  2775. }
  2776. /* Generate the code to be executed immediately before entering a
  2777. scalarization loop. */
  2778. static void
  2779. gfc_trans_preloop_setup (gfc_loopinfo * loop, int dim, int flag,
  2780. stmtblock_t * pblock)
  2781. {
  2782. tree stride;
  2783. gfc_ss_info *ss_info;
  2784. gfc_array_info *info;
  2785. gfc_ss_type ss_type;
  2786. gfc_ss *ss, *pss;
  2787. gfc_loopinfo *ploop;
  2788. gfc_array_ref *ar;
  2789. int i;
  2790. /* This code will be executed before entering the scalarization loop
  2791. for this dimension. */
  2792. for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
  2793. {
  2794. ss_info = ss->info;
  2795. if ((ss_info->useflags & flag) == 0)
  2796. continue;
  2797. ss_type = ss_info->type;
  2798. if (ss_type != GFC_SS_SECTION
  2799. && ss_type != GFC_SS_FUNCTION
  2800. && ss_type != GFC_SS_CONSTRUCTOR
  2801. && ss_type != GFC_SS_COMPONENT)
  2802. continue;
  2803. info = &ss_info->data.array;
  2804. gcc_assert (dim < ss->dimen);
  2805. gcc_assert (ss->dimen == loop->dimen);
  2806. if (info->ref)
  2807. ar = &info->ref->u.ar;
  2808. else
  2809. ar = NULL;
  2810. if (dim == loop->dimen - 1 && loop->parent != NULL)
  2811. {
  2812. /* If we are in the outermost dimension of this loop, the previous
  2813. dimension shall be in the parent loop. */
  2814. gcc_assert (ss->parent != NULL);
  2815. pss = ss->parent;
  2816. ploop = loop->parent;
  2817. /* ss and ss->parent are about the same array. */
  2818. gcc_assert (ss_info == pss->info);
  2819. }
  2820. else
  2821. {
  2822. ploop = loop;
  2823. pss = ss;
  2824. }
  2825. if (dim == loop->dimen - 1)
  2826. i = 0;
  2827. else
  2828. i = dim + 1;
  2829. /* For the time being, there is no loop reordering. */
  2830. gcc_assert (i == ploop->order[i]);
  2831. i = ploop->order[i];
  2832. if (dim == loop->dimen - 1 && loop->parent == NULL)
  2833. {
  2834. stride = gfc_conv_array_stride (info->descriptor,
  2835. innermost_ss (ss)->dim[i]);
  2836. /* Calculate the stride of the innermost loop. Hopefully this will
  2837. allow the backend optimizers to do their stuff more effectively.
  2838. */
  2839. info->stride0 = gfc_evaluate_now (stride, pblock);
  2840. /* For the outermost loop calculate the offset due to any
  2841. elemental dimensions. It will have been initialized with the
  2842. base offset of the array. */
  2843. if (info->ref)
  2844. {
  2845. for (i = 0; i < ar->dimen; i++)
  2846. {
  2847. if (ar->dimen_type[i] != DIMEN_ELEMENT)
  2848. continue;
  2849. add_array_offset (pblock, loop, ss, ar, i, /* unused */ -1);
  2850. }
  2851. }
  2852. }
  2853. else
  2854. /* Add the offset for the previous loop dimension. */
  2855. add_array_offset (pblock, ploop, ss, ar, pss->dim[i], i);
  2856. /* Remember this offset for the second loop. */
  2857. if (dim == loop->temp_dim - 1 && loop->parent == NULL)
  2858. info->saved_offset = info->offset;
  2859. }
  2860. }
  2861. /* Start a scalarized expression. Creates a scope and declares loop
  2862. variables. */
  2863. void
  2864. gfc_start_scalarized_body (gfc_loopinfo * loop, stmtblock_t * pbody)
  2865. {
  2866. int dim;
  2867. int n;
  2868. int flags;
  2869. gcc_assert (!loop->array_parameter);
  2870. for (dim = loop->dimen - 1; dim >= 0; dim--)
  2871. {
  2872. n = loop->order[dim];
  2873. gfc_start_block (&loop->code[n]);
  2874. /* Create the loop variable. */
  2875. loop->loopvar[n] = gfc_create_var (gfc_array_index_type, "S");
  2876. if (dim < loop->temp_dim)
  2877. flags = 3;
  2878. else
  2879. flags = 1;
  2880. /* Calculate values that will be constant within this loop. */
  2881. gfc_trans_preloop_setup (loop, dim, flags, &loop->code[n]);
  2882. }
  2883. gfc_start_block (pbody);
  2884. }
  2885. /* Generates the actual loop code for a scalarization loop. */
  2886. void
  2887. gfc_trans_scalarized_loop_end (gfc_loopinfo * loop, int n,
  2888. stmtblock_t * pbody)
  2889. {
  2890. stmtblock_t block;
  2891. tree cond;
  2892. tree tmp;
  2893. tree loopbody;
  2894. tree exit_label;
  2895. tree stmt;
  2896. tree init;
  2897. tree incr;
  2898. if ((ompws_flags & (OMPWS_WORKSHARE_FLAG | OMPWS_SCALARIZER_WS))
  2899. == (OMPWS_WORKSHARE_FLAG | OMPWS_SCALARIZER_WS)
  2900. && n == loop->dimen - 1)
  2901. {
  2902. /* We create an OMP_FOR construct for the outermost scalarized loop. */
  2903. init = make_tree_vec (1);
  2904. cond = make_tree_vec (1);
  2905. incr = make_tree_vec (1);
  2906. /* Cycle statement is implemented with a goto. Exit statement must not
  2907. be present for this loop. */
  2908. exit_label = gfc_build_label_decl (NULL_TREE);
  2909. TREE_USED (exit_label) = 1;
  2910. /* Label for cycle statements (if needed). */
  2911. tmp = build1_v (LABEL_EXPR, exit_label);
  2912. gfc_add_expr_to_block (pbody, tmp);
  2913. stmt = make_node (OMP_FOR);
  2914. TREE_TYPE (stmt) = void_type_node;
  2915. OMP_FOR_BODY (stmt) = loopbody = gfc_finish_block (pbody);
  2916. OMP_FOR_CLAUSES (stmt) = build_omp_clause (input_location,
  2917. OMP_CLAUSE_SCHEDULE);
  2918. OMP_CLAUSE_SCHEDULE_KIND (OMP_FOR_CLAUSES (stmt))
  2919. = OMP_CLAUSE_SCHEDULE_STATIC;
  2920. if (ompws_flags & OMPWS_NOWAIT)
  2921. OMP_CLAUSE_CHAIN (OMP_FOR_CLAUSES (stmt))
  2922. = build_omp_clause (input_location, OMP_CLAUSE_NOWAIT);
  2923. /* Initialize the loopvar. */
  2924. TREE_VEC_ELT (init, 0) = build2_v (MODIFY_EXPR, loop->loopvar[n],
  2925. loop->from[n]);
  2926. OMP_FOR_INIT (stmt) = init;
  2927. /* The exit condition. */
  2928. TREE_VEC_ELT (cond, 0) = build2_loc (input_location, LE_EXPR,
  2929. boolean_type_node,
  2930. loop->loopvar[n], loop->to[n]);
  2931. SET_EXPR_LOCATION (TREE_VEC_ELT (cond, 0), input_location);
  2932. OMP_FOR_COND (stmt) = cond;
  2933. /* Increment the loopvar. */
  2934. tmp = build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
  2935. loop->loopvar[n], gfc_index_one_node);
  2936. TREE_VEC_ELT (incr, 0) = fold_build2_loc (input_location, MODIFY_EXPR,
  2937. void_type_node, loop->loopvar[n], tmp);
  2938. OMP_FOR_INCR (stmt) = incr;
  2939. ompws_flags &= ~OMPWS_CURR_SINGLEUNIT;
  2940. gfc_add_expr_to_block (&loop->code[n], stmt);
  2941. }
  2942. else
  2943. {
  2944. bool reverse_loop = (loop->reverse[n] == GFC_REVERSE_SET)
  2945. && (loop->temp_ss == NULL);
  2946. loopbody = gfc_finish_block (pbody);
  2947. if (reverse_loop)
  2948. {
  2949. tmp = loop->from[n];
  2950. loop->from[n] = loop->to[n];
  2951. loop->to[n] = tmp;
  2952. }
  2953. /* Initialize the loopvar. */
  2954. if (loop->loopvar[n] != loop->from[n])
  2955. gfc_add_modify (&loop->code[n], loop->loopvar[n], loop->from[n]);
  2956. exit_label = gfc_build_label_decl (NULL_TREE);
  2957. /* Generate the loop body. */
  2958. gfc_init_block (&block);
  2959. /* The exit condition. */
  2960. cond = fold_build2_loc (input_location, reverse_loop ? LT_EXPR : GT_EXPR,
  2961. boolean_type_node, loop->loopvar[n], loop->to[n]);
  2962. tmp = build1_v (GOTO_EXPR, exit_label);
  2963. TREE_USED (exit_label) = 1;
  2964. tmp = build3_v (COND_EXPR, cond, tmp, build_empty_stmt (input_location));
  2965. gfc_add_expr_to_block (&block, tmp);
  2966. /* The main body. */
  2967. gfc_add_expr_to_block (&block, loopbody);
  2968. /* Increment the loopvar. */
  2969. tmp = fold_build2_loc (input_location,
  2970. reverse_loop ? MINUS_EXPR : PLUS_EXPR,
  2971. gfc_array_index_type, loop->loopvar[n],
  2972. gfc_index_one_node);
  2973. gfc_add_modify (&block, loop->loopvar[n], tmp);
  2974. /* Build the loop. */
  2975. tmp = gfc_finish_block (&block);
  2976. tmp = build1_v (LOOP_EXPR, tmp);
  2977. gfc_add_expr_to_block (&loop->code[n], tmp);
  2978. /* Add the exit label. */
  2979. tmp = build1_v (LABEL_EXPR, exit_label);
  2980. gfc_add_expr_to_block (&loop->code[n], tmp);
  2981. }
  2982. }
  2983. /* Finishes and generates the loops for a scalarized expression. */
  2984. void
  2985. gfc_trans_scalarizing_loops (gfc_loopinfo * loop, stmtblock_t * body)
  2986. {
  2987. int dim;
  2988. int n;
  2989. gfc_ss *ss;
  2990. stmtblock_t *pblock;
  2991. tree tmp;
  2992. pblock = body;
  2993. /* Generate the loops. */
  2994. for (dim = 0; dim < loop->dimen; dim++)
  2995. {
  2996. n = loop->order[dim];
  2997. gfc_trans_scalarized_loop_end (loop, n, pblock);
  2998. loop->loopvar[n] = NULL_TREE;
  2999. pblock = &loop->code[n];
  3000. }
  3001. tmp = gfc_finish_block (pblock);
  3002. gfc_add_expr_to_block (&loop->pre, tmp);
  3003. /* Clear all the used flags. */
  3004. for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
  3005. if (ss->parent == NULL)
  3006. ss->info->useflags = 0;
  3007. }
  3008. /* Finish the main body of a scalarized expression, and start the secondary
  3009. copying body. */
  3010. void
  3011. gfc_trans_scalarized_loop_boundary (gfc_loopinfo * loop, stmtblock_t * body)
  3012. {
  3013. int dim;
  3014. int n;
  3015. stmtblock_t *pblock;
  3016. gfc_ss *ss;
  3017. pblock = body;
  3018. /* We finish as many loops as are used by the temporary. */
  3019. for (dim = 0; dim < loop->temp_dim - 1; dim++)
  3020. {
  3021. n = loop->order[dim];
  3022. gfc_trans_scalarized_loop_end (loop, n, pblock);
  3023. loop->loopvar[n] = NULL_TREE;
  3024. pblock = &loop->code[n];
  3025. }
  3026. /* We don't want to finish the outermost loop entirely. */
  3027. n = loop->order[loop->temp_dim - 1];
  3028. gfc_trans_scalarized_loop_end (loop, n, pblock);
  3029. /* Restore the initial offsets. */
  3030. for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
  3031. {
  3032. gfc_ss_type ss_type;
  3033. gfc_ss_info *ss_info;
  3034. ss_info = ss->info;
  3035. if ((ss_info->useflags & 2) == 0)
  3036. continue;
  3037. ss_type = ss_info->type;
  3038. if (ss_type != GFC_SS_SECTION
  3039. && ss_type != GFC_SS_FUNCTION
  3040. && ss_type != GFC_SS_CONSTRUCTOR
  3041. && ss_type != GFC_SS_COMPONENT)
  3042. continue;
  3043. ss_info->data.array.offset = ss_info->data.array.saved_offset;
  3044. }
  3045. /* Restart all the inner loops we just finished. */
  3046. for (dim = loop->temp_dim - 2; dim >= 0; dim--)
  3047. {
  3048. n = loop->order[dim];
  3049. gfc_start_block (&loop->code[n]);
  3050. loop->loopvar[n] = gfc_create_var (gfc_array_index_type, "Q");
  3051. gfc_trans_preloop_setup (loop, dim, 2, &loop->code[n]);
  3052. }
  3053. /* Start a block for the secondary copying code. */
  3054. gfc_start_block (body);
  3055. }
  3056. /* Precalculate (either lower or upper) bound of an array section.
  3057. BLOCK: Block in which the (pre)calculation code will go.
  3058. BOUNDS[DIM]: Where the bound value will be stored once evaluated.
  3059. VALUES[DIM]: Specified bound (NULL <=> unspecified).
  3060. DESC: Array descriptor from which the bound will be picked if unspecified
  3061. (either lower or upper bound according to LBOUND). */
  3062. static void
  3063. evaluate_bound (stmtblock_t *block, tree *bounds, gfc_expr ** values,
  3064. tree desc, int dim, bool lbound)
  3065. {
  3066. gfc_se se;
  3067. gfc_expr * input_val = values[dim];
  3068. tree *output = &bounds[dim];
  3069. if (input_val)
  3070. {
  3071. /* Specified section bound. */
  3072. gfc_init_se (&se, NULL);
  3073. gfc_conv_expr_type (&se, input_val, gfc_array_index_type);
  3074. gfc_add_block_to_block (block, &se.pre);
  3075. *output = se.expr;
  3076. }
  3077. else
  3078. {
  3079. /* No specific bound specified so use the bound of the array. */
  3080. *output = lbound ? gfc_conv_array_lbound (desc, dim) :
  3081. gfc_conv_array_ubound (desc, dim);
  3082. }
  3083. *output = gfc_evaluate_now (*output, block);
  3084. }
  3085. /* Calculate the lower bound of an array section. */
  3086. static void
  3087. gfc_conv_section_startstride (stmtblock_t * block, gfc_ss * ss, int dim)
  3088. {
  3089. gfc_expr *stride = NULL;
  3090. tree desc;
  3091. gfc_se se;
  3092. gfc_array_info *info;
  3093. gfc_array_ref *ar;
  3094. gcc_assert (ss->info->type == GFC_SS_SECTION);
  3095. info = &ss->info->data.array;
  3096. ar = &info->ref->u.ar;
  3097. if (ar->dimen_type[dim] == DIMEN_VECTOR)
  3098. {
  3099. /* We use a zero-based index to access the vector. */
  3100. info->start[dim] = gfc_index_zero_node;
  3101. info->end[dim] = NULL;
  3102. info->stride[dim] = gfc_index_one_node;
  3103. return;
  3104. }
  3105. gcc_assert (ar->dimen_type[dim] == DIMEN_RANGE
  3106. || ar->dimen_type[dim] == DIMEN_THIS_IMAGE);
  3107. desc = info->descriptor;
  3108. stride = ar->stride[dim];
  3109. /* Calculate the start of the range. For vector subscripts this will
  3110. be the range of the vector. */
  3111. evaluate_bound (block, info->start, ar->start, desc, dim, true);
  3112. /* Similarly calculate the end. Although this is not used in the
  3113. scalarizer, it is needed when checking bounds and where the end
  3114. is an expression with side-effects. */
  3115. evaluate_bound (block, info->end, ar->end, desc, dim, false);
  3116. /* Calculate the stride. */
  3117. if (stride == NULL)
  3118. info->stride[dim] = gfc_index_one_node;
  3119. else
  3120. {
  3121. gfc_init_se (&se, NULL);
  3122. gfc_conv_expr_type (&se, stride, gfc_array_index_type);
  3123. gfc_add_block_to_block (block, &se.pre);
  3124. info->stride[dim] = gfc_evaluate_now (se.expr, block);
  3125. }
  3126. }
  3127. /* Calculates the range start and stride for a SS chain. Also gets the
  3128. descriptor and data pointer. The range of vector subscripts is the size
  3129. of the vector. Array bounds are also checked. */
  3130. void
  3131. gfc_conv_ss_startstride (gfc_loopinfo * loop)
  3132. {
  3133. int n;
  3134. tree tmp;
  3135. gfc_ss *ss;
  3136. tree desc;
  3137. gfc_loopinfo * const outer_loop = outermost_loop (loop);
  3138. loop->dimen = 0;
  3139. /* Determine the rank of the loop. */
  3140. for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
  3141. {
  3142. switch (ss->info->type)
  3143. {
  3144. case GFC_SS_SECTION:
  3145. case GFC_SS_CONSTRUCTOR:
  3146. case GFC_SS_FUNCTION:
  3147. case GFC_SS_COMPONENT:
  3148. loop->dimen = ss->dimen;
  3149. goto done;
  3150. /* As usual, lbound and ubound are exceptions!. */
  3151. case GFC_SS_INTRINSIC:
  3152. switch (ss->info->expr->value.function.isym->id)
  3153. {
  3154. case GFC_ISYM_LBOUND:
  3155. case GFC_ISYM_UBOUND:
  3156. case GFC_ISYM_LCOBOUND:
  3157. case GFC_ISYM_UCOBOUND:
  3158. case GFC_ISYM_THIS_IMAGE:
  3159. loop->dimen = ss->dimen;
  3160. goto done;
  3161. default:
  3162. break;
  3163. }
  3164. default:
  3165. break;
  3166. }
  3167. }
  3168. /* We should have determined the rank of the expression by now. If
  3169. not, that's bad news. */
  3170. gcc_unreachable ();
  3171. done:
  3172. /* Loop over all the SS in the chain. */
  3173. for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
  3174. {
  3175. gfc_ss_info *ss_info;
  3176. gfc_array_info *info;
  3177. gfc_expr *expr;
  3178. ss_info = ss->info;
  3179. expr = ss_info->expr;
  3180. info = &ss_info->data.array;
  3181. if (expr && expr->shape && !info->shape)
  3182. info->shape = expr->shape;
  3183. switch (ss_info->type)
  3184. {
  3185. case GFC_SS_SECTION:
  3186. /* Get the descriptor for the array. If it is a cross loops array,
  3187. we got the descriptor already in the outermost loop. */
  3188. if (ss->parent == NULL)
  3189. gfc_conv_ss_descriptor (&outer_loop->pre, ss,
  3190. !loop->array_parameter);
  3191. for (n = 0; n < ss->dimen; n++)
  3192. gfc_conv_section_startstride (&outer_loop->pre, ss, ss->dim[n]);
  3193. break;
  3194. case GFC_SS_INTRINSIC:
  3195. switch (expr->value.function.isym->id)
  3196. {
  3197. /* Fall through to supply start and stride. */
  3198. case GFC_ISYM_LBOUND:
  3199. case GFC_ISYM_UBOUND:
  3200. {
  3201. gfc_expr *arg;
  3202. /* This is the variant without DIM=... */
  3203. gcc_assert (expr->value.function.actual->next->expr == NULL);
  3204. arg = expr->value.function.actual->expr;
  3205. if (arg->rank == -1)
  3206. {
  3207. gfc_se se;
  3208. tree rank, tmp;
  3209. /* The rank (hence the return value's shape) is unknown,
  3210. we have to retrieve it. */
  3211. gfc_init_se (&se, NULL);
  3212. se.descriptor_only = 1;
  3213. gfc_conv_expr (&se, arg);
  3214. /* This is a bare variable, so there is no preliminary
  3215. or cleanup code. */
  3216. gcc_assert (se.pre.head == NULL_TREE
  3217. && se.post.head == NULL_TREE);
  3218. rank = gfc_conv_descriptor_rank (se.expr);
  3219. tmp = fold_build2_loc (input_location, MINUS_EXPR,
  3220. gfc_array_index_type,
  3221. fold_convert (gfc_array_index_type,
  3222. rank),
  3223. gfc_index_one_node);
  3224. info->end[0] = gfc_evaluate_now (tmp, &outer_loop->pre);
  3225. info->start[0] = gfc_index_zero_node;
  3226. info->stride[0] = gfc_index_one_node;
  3227. continue;
  3228. }
  3229. /* Otherwise fall through GFC_SS_FUNCTION. */
  3230. }
  3231. case GFC_ISYM_LCOBOUND:
  3232. case GFC_ISYM_UCOBOUND:
  3233. case GFC_ISYM_THIS_IMAGE:
  3234. break;
  3235. default:
  3236. continue;
  3237. }
  3238. case GFC_SS_CONSTRUCTOR:
  3239. case GFC_SS_FUNCTION:
  3240. for (n = 0; n < ss->dimen; n++)
  3241. {
  3242. int dim = ss->dim[n];
  3243. info->start[dim] = gfc_index_zero_node;
  3244. info->end[dim] = gfc_index_zero_node;
  3245. info->stride[dim] = gfc_index_one_node;
  3246. }
  3247. break;
  3248. default:
  3249. break;
  3250. }
  3251. }
  3252. /* The rest is just runtime bound checking. */
  3253. if (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS)
  3254. {
  3255. stmtblock_t block;
  3256. tree lbound, ubound;
  3257. tree end;
  3258. tree size[GFC_MAX_DIMENSIONS];
  3259. tree stride_pos, stride_neg, non_zerosized, tmp2, tmp3;
  3260. gfc_array_info *info;
  3261. char *msg;
  3262. int dim;
  3263. gfc_start_block (&block);
  3264. for (n = 0; n < loop->dimen; n++)
  3265. size[n] = NULL_TREE;
  3266. for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
  3267. {
  3268. stmtblock_t inner;
  3269. gfc_ss_info *ss_info;
  3270. gfc_expr *expr;
  3271. locus *expr_loc;
  3272. const char *expr_name;
  3273. ss_info = ss->info;
  3274. if (ss_info->type != GFC_SS_SECTION)
  3275. continue;
  3276. /* Catch allocatable lhs in f2003. */
  3277. if (flag_realloc_lhs && ss->is_alloc_lhs)
  3278. continue;
  3279. expr = ss_info->expr;
  3280. expr_loc = &expr->where;
  3281. expr_name = expr->symtree->name;
  3282. gfc_start_block (&inner);
  3283. /* TODO: range checking for mapped dimensions. */
  3284. info = &ss_info->data.array;
  3285. /* This code only checks ranges. Elemental and vector
  3286. dimensions are checked later. */
  3287. for (n = 0; n < loop->dimen; n++)
  3288. {
  3289. bool check_upper;
  3290. dim = ss->dim[n];
  3291. if (info->ref->u.ar.dimen_type[dim] != DIMEN_RANGE)
  3292. continue;
  3293. if (dim == info->ref->u.ar.dimen - 1
  3294. && info->ref->u.ar.as->type == AS_ASSUMED_SIZE)
  3295. check_upper = false;
  3296. else
  3297. check_upper = true;
  3298. /* Zero stride is not allowed. */
  3299. tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
  3300. info->stride[dim], gfc_index_zero_node);
  3301. msg = xasprintf ("Zero stride is not allowed, for dimension %d "
  3302. "of array '%s'", dim + 1, expr_name);
  3303. gfc_trans_runtime_check (true, false, tmp, &inner,
  3304. expr_loc, msg);
  3305. free (msg);
  3306. desc = info->descriptor;
  3307. /* This is the run-time equivalent of resolve.c's
  3308. check_dimension(). The logical is more readable there
  3309. than it is here, with all the trees. */
  3310. lbound = gfc_conv_array_lbound (desc, dim);
  3311. end = info->end[dim];
  3312. if (check_upper)
  3313. ubound = gfc_conv_array_ubound (desc, dim);
  3314. else
  3315. ubound = NULL;
  3316. /* non_zerosized is true when the selected range is not
  3317. empty. */
  3318. stride_pos = fold_build2_loc (input_location, GT_EXPR,
  3319. boolean_type_node, info->stride[dim],
  3320. gfc_index_zero_node);
  3321. tmp = fold_build2_loc (input_location, LE_EXPR, boolean_type_node,
  3322. info->start[dim], end);
  3323. stride_pos = fold_build2_loc (input_location, TRUTH_AND_EXPR,
  3324. boolean_type_node, stride_pos, tmp);
  3325. stride_neg = fold_build2_loc (input_location, LT_EXPR,
  3326. boolean_type_node,
  3327. info->stride[dim], gfc_index_zero_node);
  3328. tmp = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
  3329. info->start[dim], end);
  3330. stride_neg = fold_build2_loc (input_location, TRUTH_AND_EXPR,
  3331. boolean_type_node,
  3332. stride_neg, tmp);
  3333. non_zerosized = fold_build2_loc (input_location, TRUTH_OR_EXPR,
  3334. boolean_type_node,
  3335. stride_pos, stride_neg);
  3336. /* Check the start of the range against the lower and upper
  3337. bounds of the array, if the range is not empty.
  3338. If upper bound is present, include both bounds in the
  3339. error message. */
  3340. if (check_upper)
  3341. {
  3342. tmp = fold_build2_loc (input_location, LT_EXPR,
  3343. boolean_type_node,
  3344. info->start[dim], lbound);
  3345. tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR,
  3346. boolean_type_node,
  3347. non_zerosized, tmp);
  3348. tmp2 = fold_build2_loc (input_location, GT_EXPR,
  3349. boolean_type_node,
  3350. info->start[dim], ubound);
  3351. tmp2 = fold_build2_loc (input_location, TRUTH_AND_EXPR,
  3352. boolean_type_node,
  3353. non_zerosized, tmp2);
  3354. msg = xasprintf ("Index '%%ld' of dimension %d of array '%s' "
  3355. "outside of expected range (%%ld:%%ld)",
  3356. dim + 1, expr_name);
  3357. gfc_trans_runtime_check (true, false, tmp, &inner,
  3358. expr_loc, msg,
  3359. fold_convert (long_integer_type_node, info->start[dim]),
  3360. fold_convert (long_integer_type_node, lbound),
  3361. fold_convert (long_integer_type_node, ubound));
  3362. gfc_trans_runtime_check (true, false, tmp2, &inner,
  3363. expr_loc, msg,
  3364. fold_convert (long_integer_type_node, info->start[dim]),
  3365. fold_convert (long_integer_type_node, lbound),
  3366. fold_convert (long_integer_type_node, ubound));
  3367. free (msg);
  3368. }
  3369. else
  3370. {
  3371. tmp = fold_build2_loc (input_location, LT_EXPR,
  3372. boolean_type_node,
  3373. info->start[dim], lbound);
  3374. tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR,
  3375. boolean_type_node, non_zerosized, tmp);
  3376. msg = xasprintf ("Index '%%ld' of dimension %d of array '%s' "
  3377. "below lower bound of %%ld",
  3378. dim + 1, expr_name);
  3379. gfc_trans_runtime_check (true, false, tmp, &inner,
  3380. expr_loc, msg,
  3381. fold_convert (long_integer_type_node, info->start[dim]),
  3382. fold_convert (long_integer_type_node, lbound));
  3383. free (msg);
  3384. }
  3385. /* Compute the last element of the range, which is not
  3386. necessarily "end" (think 0:5:3, which doesn't contain 5)
  3387. and check it against both lower and upper bounds. */
  3388. tmp = fold_build2_loc (input_location, MINUS_EXPR,
  3389. gfc_array_index_type, end,
  3390. info->start[dim]);
  3391. tmp = fold_build2_loc (input_location, TRUNC_MOD_EXPR,
  3392. gfc_array_index_type, tmp,
  3393. info->stride[dim]);
  3394. tmp = fold_build2_loc (input_location, MINUS_EXPR,
  3395. gfc_array_index_type, end, tmp);
  3396. tmp2 = fold_build2_loc (input_location, LT_EXPR,
  3397. boolean_type_node, tmp, lbound);
  3398. tmp2 = fold_build2_loc (input_location, TRUTH_AND_EXPR,
  3399. boolean_type_node, non_zerosized, tmp2);
  3400. if (check_upper)
  3401. {
  3402. tmp3 = fold_build2_loc (input_location, GT_EXPR,
  3403. boolean_type_node, tmp, ubound);
  3404. tmp3 = fold_build2_loc (input_location, TRUTH_AND_EXPR,
  3405. boolean_type_node, non_zerosized, tmp3);
  3406. msg = xasprintf ("Index '%%ld' of dimension %d of array '%s' "
  3407. "outside of expected range (%%ld:%%ld)",
  3408. dim + 1, expr_name);
  3409. gfc_trans_runtime_check (true, false, tmp2, &inner,
  3410. expr_loc, msg,
  3411. fold_convert (long_integer_type_node, tmp),
  3412. fold_convert (long_integer_type_node, ubound),
  3413. fold_convert (long_integer_type_node, lbound));
  3414. gfc_trans_runtime_check (true, false, tmp3, &inner,
  3415. expr_loc, msg,
  3416. fold_convert (long_integer_type_node, tmp),
  3417. fold_convert (long_integer_type_node, ubound),
  3418. fold_convert (long_integer_type_node, lbound));
  3419. free (msg);
  3420. }
  3421. else
  3422. {
  3423. msg = xasprintf ("Index '%%ld' of dimension %d of array '%s' "
  3424. "below lower bound of %%ld",
  3425. dim + 1, expr_name);
  3426. gfc_trans_runtime_check (true, false, tmp2, &inner,
  3427. expr_loc, msg,
  3428. fold_convert (long_integer_type_node, tmp),
  3429. fold_convert (long_integer_type_node, lbound));
  3430. free (msg);
  3431. }
  3432. /* Check the section sizes match. */
  3433. tmp = fold_build2_loc (input_location, MINUS_EXPR,
  3434. gfc_array_index_type, end,
  3435. info->start[dim]);
  3436. tmp = fold_build2_loc (input_location, FLOOR_DIV_EXPR,
  3437. gfc_array_index_type, tmp,
  3438. info->stride[dim]);
  3439. tmp = fold_build2_loc (input_location, PLUS_EXPR,
  3440. gfc_array_index_type,
  3441. gfc_index_one_node, tmp);
  3442. tmp = fold_build2_loc (input_location, MAX_EXPR,
  3443. gfc_array_index_type, tmp,
  3444. build_int_cst (gfc_array_index_type, 0));
  3445. /* We remember the size of the first section, and check all the
  3446. others against this. */
  3447. if (size[n])
  3448. {
  3449. tmp3 = fold_build2_loc (input_location, NE_EXPR,
  3450. boolean_type_node, tmp, size[n]);
  3451. msg = xasprintf ("Array bound mismatch for dimension %d "
  3452. "of array '%s' (%%ld/%%ld)",
  3453. dim + 1, expr_name);
  3454. gfc_trans_runtime_check (true, false, tmp3, &inner,
  3455. expr_loc, msg,
  3456. fold_convert (long_integer_type_node, tmp),
  3457. fold_convert (long_integer_type_node, size[n]));
  3458. free (msg);
  3459. }
  3460. else
  3461. size[n] = gfc_evaluate_now (tmp, &inner);
  3462. }
  3463. tmp = gfc_finish_block (&inner);
  3464. /* For optional arguments, only check bounds if the argument is
  3465. present. */
  3466. if (expr->symtree->n.sym->attr.optional
  3467. || expr->symtree->n.sym->attr.not_always_present)
  3468. tmp = build3_v (COND_EXPR,
  3469. gfc_conv_expr_present (expr->symtree->n.sym),
  3470. tmp, build_empty_stmt (input_location));
  3471. gfc_add_expr_to_block (&block, tmp);
  3472. }
  3473. tmp = gfc_finish_block (&block);
  3474. gfc_add_expr_to_block (&outer_loop->pre, tmp);
  3475. }
  3476. for (loop = loop->nested; loop; loop = loop->next)
  3477. gfc_conv_ss_startstride (loop);
  3478. }
  3479. /* Return true if both symbols could refer to the same data object. Does
  3480. not take account of aliasing due to equivalence statements. */
  3481. static int
  3482. symbols_could_alias (gfc_symbol *lsym, gfc_symbol *rsym, bool lsym_pointer,
  3483. bool lsym_target, bool rsym_pointer, bool rsym_target)
  3484. {
  3485. /* Aliasing isn't possible if the symbols have different base types. */
  3486. if (gfc_compare_types (&lsym->ts, &rsym->ts) == 0)
  3487. return 0;
  3488. /* Pointers can point to other pointers and target objects. */
  3489. if ((lsym_pointer && (rsym_pointer || rsym_target))
  3490. || (rsym_pointer && (lsym_pointer || lsym_target)))
  3491. return 1;
  3492. /* Special case: Argument association, cf. F90 12.4.1.6, F2003 12.4.1.7
  3493. and F2008 12.5.2.13 items 3b and 4b. The pointer case (a) is already
  3494. checked above. */
  3495. if (lsym_target && rsym_target
  3496. && ((lsym->attr.dummy && !lsym->attr.contiguous
  3497. && (!lsym->attr.dimension || lsym->as->type == AS_ASSUMED_SHAPE))
  3498. || (rsym->attr.dummy && !rsym->attr.contiguous
  3499. && (!rsym->attr.dimension
  3500. || rsym->as->type == AS_ASSUMED_SHAPE))))
  3501. return 1;
  3502. return 0;
  3503. }
  3504. /* Return true if the two SS could be aliased, i.e. both point to the same data
  3505. object. */
  3506. /* TODO: resolve aliases based on frontend expressions. */
  3507. static int
  3508. gfc_could_be_alias (gfc_ss * lss, gfc_ss * rss)
  3509. {
  3510. gfc_ref *lref;
  3511. gfc_ref *rref;
  3512. gfc_expr *lexpr, *rexpr;
  3513. gfc_symbol *lsym;
  3514. gfc_symbol *rsym;
  3515. bool lsym_pointer, lsym_target, rsym_pointer, rsym_target;
  3516. lexpr = lss->info->expr;
  3517. rexpr = rss->info->expr;
  3518. lsym = lexpr->symtree->n.sym;
  3519. rsym = rexpr->symtree->n.sym;
  3520. lsym_pointer = lsym->attr.pointer;
  3521. lsym_target = lsym->attr.target;
  3522. rsym_pointer = rsym->attr.pointer;
  3523. rsym_target = rsym->attr.target;
  3524. if (symbols_could_alias (lsym, rsym, lsym_pointer, lsym_target,
  3525. rsym_pointer, rsym_target))
  3526. return 1;
  3527. if (rsym->ts.type != BT_DERIVED && rsym->ts.type != BT_CLASS
  3528. && lsym->ts.type != BT_DERIVED && lsym->ts.type != BT_CLASS)
  3529. return 0;
  3530. /* For derived types we must check all the component types. We can ignore
  3531. array references as these will have the same base type as the previous
  3532. component ref. */
  3533. for (lref = lexpr->ref; lref != lss->info->data.array.ref; lref = lref->next)
  3534. {
  3535. if (lref->type != REF_COMPONENT)
  3536. continue;
  3537. lsym_pointer = lsym_pointer || lref->u.c.sym->attr.pointer;
  3538. lsym_target = lsym_target || lref->u.c.sym->attr.target;
  3539. if (symbols_could_alias (lref->u.c.sym, rsym, lsym_pointer, lsym_target,
  3540. rsym_pointer, rsym_target))
  3541. return 1;
  3542. if ((lsym_pointer && (rsym_pointer || rsym_target))
  3543. || (rsym_pointer && (lsym_pointer || lsym_target)))
  3544. {
  3545. if (gfc_compare_types (&lref->u.c.component->ts,
  3546. &rsym->ts))
  3547. return 1;
  3548. }
  3549. for (rref = rexpr->ref; rref != rss->info->data.array.ref;
  3550. rref = rref->next)
  3551. {
  3552. if (rref->type != REF_COMPONENT)
  3553. continue;
  3554. rsym_pointer = rsym_pointer || rref->u.c.sym->attr.pointer;
  3555. rsym_target = lsym_target || rref->u.c.sym->attr.target;
  3556. if (symbols_could_alias (lref->u.c.sym, rref->u.c.sym,
  3557. lsym_pointer, lsym_target,
  3558. rsym_pointer, rsym_target))
  3559. return 1;
  3560. if ((lsym_pointer && (rsym_pointer || rsym_target))
  3561. || (rsym_pointer && (lsym_pointer || lsym_target)))
  3562. {
  3563. if (gfc_compare_types (&lref->u.c.component->ts,
  3564. &rref->u.c.sym->ts))
  3565. return 1;
  3566. if (gfc_compare_types (&lref->u.c.sym->ts,
  3567. &rref->u.c.component->ts))
  3568. return 1;
  3569. if (gfc_compare_types (&lref->u.c.component->ts,
  3570. &rref->u.c.component->ts))
  3571. return 1;
  3572. }
  3573. }
  3574. }
  3575. lsym_pointer = lsym->attr.pointer;
  3576. lsym_target = lsym->attr.target;
  3577. lsym_pointer = lsym->attr.pointer;
  3578. lsym_target = lsym->attr.target;
  3579. for (rref = rexpr->ref; rref != rss->info->data.array.ref; rref = rref->next)
  3580. {
  3581. if (rref->type != REF_COMPONENT)
  3582. break;
  3583. rsym_pointer = rsym_pointer || rref->u.c.sym->attr.pointer;
  3584. rsym_target = lsym_target || rref->u.c.sym->attr.target;
  3585. if (symbols_could_alias (rref->u.c.sym, lsym,
  3586. lsym_pointer, lsym_target,
  3587. rsym_pointer, rsym_target))
  3588. return 1;
  3589. if ((lsym_pointer && (rsym_pointer || rsym_target))
  3590. || (rsym_pointer && (lsym_pointer || lsym_target)))
  3591. {
  3592. if (gfc_compare_types (&lsym->ts, &rref->u.c.component->ts))
  3593. return 1;
  3594. }
  3595. }
  3596. return 0;
  3597. }
  3598. /* Resolve array data dependencies. Creates a temporary if required. */
  3599. /* TODO: Calc dependencies with gfc_expr rather than gfc_ss, and move to
  3600. dependency.c. */
  3601. void
  3602. gfc_conv_resolve_dependencies (gfc_loopinfo * loop, gfc_ss * dest,
  3603. gfc_ss * rss)
  3604. {
  3605. gfc_ss *ss;
  3606. gfc_ref *lref;
  3607. gfc_ref *rref;
  3608. gfc_expr *dest_expr;
  3609. gfc_expr *ss_expr;
  3610. int nDepend = 0;
  3611. int i, j;
  3612. loop->temp_ss = NULL;
  3613. dest_expr = dest->info->expr;
  3614. for (ss = rss; ss != gfc_ss_terminator; ss = ss->next)
  3615. {
  3616. ss_expr = ss->info->expr;
  3617. if (ss->info->array_outer_dependency)
  3618. {
  3619. nDepend = 1;
  3620. break;
  3621. }
  3622. if (ss->info->type != GFC_SS_SECTION)
  3623. {
  3624. if (flag_realloc_lhs
  3625. && dest_expr != ss_expr
  3626. && gfc_is_reallocatable_lhs (dest_expr)
  3627. && ss_expr->rank)
  3628. nDepend = gfc_check_dependency (dest_expr, ss_expr, true);
  3629. /* Check for cases like c(:)(1:2) = c(2)(2:3) */
  3630. if (!nDepend && dest_expr->rank > 0
  3631. && dest_expr->ts.type == BT_CHARACTER
  3632. && ss_expr->expr_type == EXPR_VARIABLE)
  3633. nDepend = gfc_check_dependency (dest_expr, ss_expr, false);
  3634. continue;
  3635. }
  3636. if (dest_expr->symtree->n.sym != ss_expr->symtree->n.sym)
  3637. {
  3638. if (gfc_could_be_alias (dest, ss)
  3639. || gfc_are_equivalenced_arrays (dest_expr, ss_expr))
  3640. {
  3641. nDepend = 1;
  3642. break;
  3643. }
  3644. }
  3645. else
  3646. {
  3647. lref = dest_expr->ref;
  3648. rref = ss_expr->ref;
  3649. nDepend = gfc_dep_resolver (lref, rref, &loop->reverse[0]);
  3650. if (nDepend == 1)
  3651. break;
  3652. for (i = 0; i < dest->dimen; i++)
  3653. for (j = 0; j < ss->dimen; j++)
  3654. if (i != j
  3655. && dest->dim[i] == ss->dim[j])
  3656. {
  3657. /* If we don't access array elements in the same order,
  3658. there is a dependency. */
  3659. nDepend = 1;
  3660. goto temporary;
  3661. }
  3662. #if 0
  3663. /* TODO : loop shifting. */
  3664. if (nDepend == 1)
  3665. {
  3666. /* Mark the dimensions for LOOP SHIFTING */
  3667. for (n = 0; n < loop->dimen; n++)
  3668. {
  3669. int dim = dest->data.info.dim[n];
  3670. if (lref->u.ar.dimen_type[dim] == DIMEN_VECTOR)
  3671. depends[n] = 2;
  3672. else if (! gfc_is_same_range (&lref->u.ar,
  3673. &rref->u.ar, dim, 0))
  3674. depends[n] = 1;
  3675. }
  3676. /* Put all the dimensions with dependencies in the
  3677. innermost loops. */
  3678. dim = 0;
  3679. for (n = 0; n < loop->dimen; n++)
  3680. {
  3681. gcc_assert (loop->order[n] == n);
  3682. if (depends[n])
  3683. loop->order[dim++] = n;
  3684. }
  3685. for (n = 0; n < loop->dimen; n++)
  3686. {
  3687. if (! depends[n])
  3688. loop->order[dim++] = n;
  3689. }
  3690. gcc_assert (dim == loop->dimen);
  3691. break;
  3692. }
  3693. #endif
  3694. }
  3695. }
  3696. temporary:
  3697. if (nDepend == 1)
  3698. {
  3699. tree base_type = gfc_typenode_for_spec (&dest_expr->ts);
  3700. if (GFC_ARRAY_TYPE_P (base_type)
  3701. || GFC_DESCRIPTOR_TYPE_P (base_type))
  3702. base_type = gfc_get_element_type (base_type);
  3703. loop->temp_ss = gfc_get_temp_ss (base_type, dest->info->string_length,
  3704. loop->dimen);
  3705. gfc_add_ss_to_loop (loop, loop->temp_ss);
  3706. }
  3707. else
  3708. loop->temp_ss = NULL;
  3709. }
  3710. /* Browse through each array's information from the scalarizer and set the loop
  3711. bounds according to the "best" one (per dimension), i.e. the one which
  3712. provides the most information (constant bounds, shape, etc.). */
  3713. static void
  3714. set_loop_bounds (gfc_loopinfo *loop)
  3715. {
  3716. int n, dim, spec_dim;
  3717. gfc_array_info *info;
  3718. gfc_array_info *specinfo;
  3719. gfc_ss *ss;
  3720. tree tmp;
  3721. gfc_ss **loopspec;
  3722. bool dynamic[GFC_MAX_DIMENSIONS];
  3723. mpz_t *cshape;
  3724. mpz_t i;
  3725. bool nonoptional_arr;
  3726. gfc_loopinfo * const outer_loop = outermost_loop (loop);
  3727. loopspec = loop->specloop;
  3728. mpz_init (i);
  3729. for (n = 0; n < loop->dimen; n++)
  3730. {
  3731. loopspec[n] = NULL;
  3732. dynamic[n] = false;
  3733. /* If there are both optional and nonoptional array arguments, scalarize
  3734. over the nonoptional; otherwise, it does not matter as then all
  3735. (optional) arrays have to be present per F2008, 125.2.12p3(6). */
  3736. nonoptional_arr = false;
  3737. for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
  3738. if (ss->info->type != GFC_SS_SCALAR && ss->info->type != GFC_SS_TEMP
  3739. && ss->info->type != GFC_SS_REFERENCE && !ss->info->can_be_null_ref)
  3740. {
  3741. nonoptional_arr = true;
  3742. break;
  3743. }
  3744. /* We use one SS term, and use that to determine the bounds of the
  3745. loop for this dimension. We try to pick the simplest term. */
  3746. for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
  3747. {
  3748. gfc_ss_type ss_type;
  3749. ss_type = ss->info->type;
  3750. if (ss_type == GFC_SS_SCALAR
  3751. || ss_type == GFC_SS_TEMP
  3752. || ss_type == GFC_SS_REFERENCE
  3753. || (ss->info->can_be_null_ref && nonoptional_arr))
  3754. continue;
  3755. info = &ss->info->data.array;
  3756. dim = ss->dim[n];
  3757. if (loopspec[n] != NULL)
  3758. {
  3759. specinfo = &loopspec[n]->info->data.array;
  3760. spec_dim = loopspec[n]->dim[n];
  3761. }
  3762. else
  3763. {
  3764. /* Silence uninitialized warnings. */
  3765. specinfo = NULL;
  3766. spec_dim = 0;
  3767. }
  3768. if (info->shape)
  3769. {
  3770. gcc_assert (info->shape[dim]);
  3771. /* The frontend has worked out the size for us. */
  3772. if (!loopspec[n]
  3773. || !specinfo->shape
  3774. || !integer_zerop (specinfo->start[spec_dim]))
  3775. /* Prefer zero-based descriptors if possible. */
  3776. loopspec[n] = ss;
  3777. continue;
  3778. }
  3779. if (ss_type == GFC_SS_CONSTRUCTOR)
  3780. {
  3781. gfc_constructor_base base;
  3782. /* An unknown size constructor will always be rank one.
  3783. Higher rank constructors will either have known shape,
  3784. or still be wrapped in a call to reshape. */
  3785. gcc_assert (loop->dimen == 1);
  3786. /* Always prefer to use the constructor bounds if the size
  3787. can be determined at compile time. Prefer not to otherwise,
  3788. since the general case involves realloc, and it's better to
  3789. avoid that overhead if possible. */
  3790. base = ss->info->expr->value.constructor;
  3791. dynamic[n] = gfc_get_array_constructor_size (&i, base);
  3792. if (!dynamic[n] || !loopspec[n])
  3793. loopspec[n] = ss;
  3794. continue;
  3795. }
  3796. /* Avoid using an allocatable lhs in an assignment, since
  3797. there might be a reallocation coming. */
  3798. if (loopspec[n] && ss->is_alloc_lhs)
  3799. continue;
  3800. if (!loopspec[n])
  3801. loopspec[n] = ss;
  3802. /* Criteria for choosing a loop specifier (most important first):
  3803. doesn't need realloc
  3804. stride of one
  3805. known stride
  3806. known lower bound
  3807. known upper bound
  3808. */
  3809. else if (loopspec[n]->info->type == GFC_SS_CONSTRUCTOR && dynamic[n])
  3810. loopspec[n] = ss;
  3811. else if (integer_onep (info->stride[dim])
  3812. && !integer_onep (specinfo->stride[spec_dim]))
  3813. loopspec[n] = ss;
  3814. else if (INTEGER_CST_P (info->stride[dim])
  3815. && !INTEGER_CST_P (specinfo->stride[spec_dim]))
  3816. loopspec[n] = ss;
  3817. else if (INTEGER_CST_P (info->start[dim])
  3818. && !INTEGER_CST_P (specinfo->start[spec_dim])
  3819. && integer_onep (info->stride[dim])
  3820. == integer_onep (specinfo->stride[spec_dim])
  3821. && INTEGER_CST_P (info->stride[dim])
  3822. == INTEGER_CST_P (specinfo->stride[spec_dim]))
  3823. loopspec[n] = ss;
  3824. /* We don't work out the upper bound.
  3825. else if (INTEGER_CST_P (info->finish[n])
  3826. && ! INTEGER_CST_P (specinfo->finish[n]))
  3827. loopspec[n] = ss; */
  3828. }
  3829. /* We should have found the scalarization loop specifier. If not,
  3830. that's bad news. */
  3831. gcc_assert (loopspec[n]);
  3832. info = &loopspec[n]->info->data.array;
  3833. dim = loopspec[n]->dim[n];
  3834. /* Set the extents of this range. */
  3835. cshape = info->shape;
  3836. if (cshape && INTEGER_CST_P (info->start[dim])
  3837. && INTEGER_CST_P (info->stride[dim]))
  3838. {
  3839. loop->from[n] = info->start[dim];
  3840. mpz_set (i, cshape[get_array_ref_dim_for_loop_dim (loopspec[n], n)]);
  3841. mpz_sub_ui (i, i, 1);
  3842. /* To = from + (size - 1) * stride. */
  3843. tmp = gfc_conv_mpz_to_tree (i, gfc_index_integer_kind);
  3844. if (!integer_onep (info->stride[dim]))
  3845. tmp = fold_build2_loc (input_location, MULT_EXPR,
  3846. gfc_array_index_type, tmp,
  3847. info->stride[dim]);
  3848. loop->to[n] = fold_build2_loc (input_location, PLUS_EXPR,
  3849. gfc_array_index_type,
  3850. loop->from[n], tmp);
  3851. }
  3852. else
  3853. {
  3854. loop->from[n] = info->start[dim];
  3855. switch (loopspec[n]->info->type)
  3856. {
  3857. case GFC_SS_CONSTRUCTOR:
  3858. /* The upper bound is calculated when we expand the
  3859. constructor. */
  3860. gcc_assert (loop->to[n] == NULL_TREE);
  3861. break;
  3862. case GFC_SS_SECTION:
  3863. /* Use the end expression if it exists and is not constant,
  3864. so that it is only evaluated once. */
  3865. loop->to[n] = info->end[dim];
  3866. break;
  3867. case GFC_SS_FUNCTION:
  3868. /* The loop bound will be set when we generate the call. */
  3869. gcc_assert (loop->to[n] == NULL_TREE);
  3870. break;
  3871. case GFC_SS_INTRINSIC:
  3872. {
  3873. gfc_expr *expr = loopspec[n]->info->expr;
  3874. /* The {l,u}bound of an assumed rank. */
  3875. gcc_assert ((expr->value.function.isym->id == GFC_ISYM_LBOUND
  3876. || expr->value.function.isym->id == GFC_ISYM_UBOUND)
  3877. && expr->value.function.actual->next->expr == NULL
  3878. && expr->value.function.actual->expr->rank == -1);
  3879. loop->to[n] = info->end[dim];
  3880. break;
  3881. }
  3882. default:
  3883. gcc_unreachable ();
  3884. }
  3885. }
  3886. /* Transform everything so we have a simple incrementing variable. */
  3887. if (integer_onep (info->stride[dim]))
  3888. info->delta[dim] = gfc_index_zero_node;
  3889. else
  3890. {
  3891. /* Set the delta for this section. */
  3892. info->delta[dim] = gfc_evaluate_now (loop->from[n], &outer_loop->pre);
  3893. /* Number of iterations is (end - start + step) / step.
  3894. with start = 0, this simplifies to
  3895. last = end / step;
  3896. for (i = 0; i<=last; i++){...}; */
  3897. tmp = fold_build2_loc (input_location, MINUS_EXPR,
  3898. gfc_array_index_type, loop->to[n],
  3899. loop->from[n]);
  3900. tmp = fold_build2_loc (input_location, FLOOR_DIV_EXPR,
  3901. gfc_array_index_type, tmp, info->stride[dim]);
  3902. tmp = fold_build2_loc (input_location, MAX_EXPR, gfc_array_index_type,
  3903. tmp, build_int_cst (gfc_array_index_type, -1));
  3904. loop->to[n] = gfc_evaluate_now (tmp, &outer_loop->pre);
  3905. /* Make the loop variable start at 0. */
  3906. loop->from[n] = gfc_index_zero_node;
  3907. }
  3908. }
  3909. mpz_clear (i);
  3910. for (loop = loop->nested; loop; loop = loop->next)
  3911. set_loop_bounds (loop);
  3912. }
  3913. /* Initialize the scalarization loop. Creates the loop variables. Determines
  3914. the range of the loop variables. Creates a temporary if required.
  3915. Also generates code for scalar expressions which have been
  3916. moved outside the loop. */
  3917. void
  3918. gfc_conv_loop_setup (gfc_loopinfo * loop, locus * where)
  3919. {
  3920. gfc_ss *tmp_ss;
  3921. tree tmp;
  3922. set_loop_bounds (loop);
  3923. /* Add all the scalar code that can be taken out of the loops.
  3924. This may include calculating the loop bounds, so do it before
  3925. allocating the temporary. */
  3926. gfc_add_loop_ss_code (loop, loop->ss, false, where);
  3927. tmp_ss = loop->temp_ss;
  3928. /* If we want a temporary then create it. */
  3929. if (tmp_ss != NULL)
  3930. {
  3931. gfc_ss_info *tmp_ss_info;
  3932. tmp_ss_info = tmp_ss->info;
  3933. gcc_assert (tmp_ss_info->type == GFC_SS_TEMP);
  3934. gcc_assert (loop->parent == NULL);
  3935. /* Make absolutely sure that this is a complete type. */
  3936. if (tmp_ss_info->string_length)
  3937. tmp_ss_info->data.temp.type
  3938. = gfc_get_character_type_len_for_eltype
  3939. (TREE_TYPE (tmp_ss_info->data.temp.type),
  3940. tmp_ss_info->string_length);
  3941. tmp = tmp_ss_info->data.temp.type;
  3942. memset (&tmp_ss_info->data.array, 0, sizeof (gfc_array_info));
  3943. tmp_ss_info->type = GFC_SS_SECTION;
  3944. gcc_assert (tmp_ss->dimen != 0);
  3945. gfc_trans_create_temp_array (&loop->pre, &loop->post, tmp_ss, tmp,
  3946. NULL_TREE, false, true, false, where);
  3947. }
  3948. /* For array parameters we don't have loop variables, so don't calculate the
  3949. translations. */
  3950. if (!loop->array_parameter)
  3951. gfc_set_delta (loop);
  3952. }
  3953. /* Calculates how to transform from loop variables to array indices for each
  3954. array: once loop bounds are chosen, sets the difference (DELTA field) between
  3955. loop bounds and array reference bounds, for each array info. */
  3956. void
  3957. gfc_set_delta (gfc_loopinfo *loop)
  3958. {
  3959. gfc_ss *ss, **loopspec;
  3960. gfc_array_info *info;
  3961. tree tmp;
  3962. int n, dim;
  3963. gfc_loopinfo * const outer_loop = outermost_loop (loop);
  3964. loopspec = loop->specloop;
  3965. /* Calculate the translation from loop variables to array indices. */
  3966. for (ss = loop->ss; ss != gfc_ss_terminator; ss = ss->loop_chain)
  3967. {
  3968. gfc_ss_type ss_type;
  3969. ss_type = ss->info->type;
  3970. if (ss_type != GFC_SS_SECTION
  3971. && ss_type != GFC_SS_COMPONENT
  3972. && ss_type != GFC_SS_CONSTRUCTOR)
  3973. continue;
  3974. info = &ss->info->data.array;
  3975. for (n = 0; n < ss->dimen; n++)
  3976. {
  3977. /* If we are specifying the range the delta is already set. */
  3978. if (loopspec[n] != ss)
  3979. {
  3980. dim = ss->dim[n];
  3981. /* Calculate the offset relative to the loop variable.
  3982. First multiply by the stride. */
  3983. tmp = loop->from[n];
  3984. if (!integer_onep (info->stride[dim]))
  3985. tmp = fold_build2_loc (input_location, MULT_EXPR,
  3986. gfc_array_index_type,
  3987. tmp, info->stride[dim]);
  3988. /* Then subtract this from our starting value. */
  3989. tmp = fold_build2_loc (input_location, MINUS_EXPR,
  3990. gfc_array_index_type,
  3991. info->start[dim], tmp);
  3992. info->delta[dim] = gfc_evaluate_now (tmp, &outer_loop->pre);
  3993. }
  3994. }
  3995. }
  3996. for (loop = loop->nested; loop; loop = loop->next)
  3997. gfc_set_delta (loop);
  3998. }
  3999. /* Calculate the size of a given array dimension from the bounds. This
  4000. is simply (ubound - lbound + 1) if this expression is positive
  4001. or 0 if it is negative (pick either one if it is zero). Optionally
  4002. (if or_expr is present) OR the (expression != 0) condition to it. */
  4003. tree
  4004. gfc_conv_array_extent_dim (tree lbound, tree ubound, tree* or_expr)
  4005. {
  4006. tree res;
  4007. tree cond;
  4008. /* Calculate (ubound - lbound + 1). */
  4009. res = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
  4010. ubound, lbound);
  4011. res = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type, res,
  4012. gfc_index_one_node);
  4013. /* Check whether the size for this dimension is negative. */
  4014. cond = fold_build2_loc (input_location, LE_EXPR, boolean_type_node, res,
  4015. gfc_index_zero_node);
  4016. res = fold_build3_loc (input_location, COND_EXPR, gfc_array_index_type, cond,
  4017. gfc_index_zero_node, res);
  4018. /* Build OR expression. */
  4019. if (or_expr)
  4020. *or_expr = fold_build2_loc (input_location, TRUTH_OR_EXPR,
  4021. boolean_type_node, *or_expr, cond);
  4022. return res;
  4023. }
  4024. /* For an array descriptor, get the total number of elements. This is just
  4025. the product of the extents along from_dim to to_dim. */
  4026. static tree
  4027. gfc_conv_descriptor_size_1 (tree desc, int from_dim, int to_dim)
  4028. {
  4029. tree res;
  4030. int dim;
  4031. res = gfc_index_one_node;
  4032. for (dim = from_dim; dim < to_dim; ++dim)
  4033. {
  4034. tree lbound;
  4035. tree ubound;
  4036. tree extent;
  4037. lbound = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[dim]);
  4038. ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[dim]);
  4039. extent = gfc_conv_array_extent_dim (lbound, ubound, NULL);
  4040. res = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
  4041. res, extent);
  4042. }
  4043. return res;
  4044. }
  4045. /* Full size of an array. */
  4046. tree
  4047. gfc_conv_descriptor_size (tree desc, int rank)
  4048. {
  4049. return gfc_conv_descriptor_size_1 (desc, 0, rank);
  4050. }
  4051. /* Size of a coarray for all dimensions but the last. */
  4052. tree
  4053. gfc_conv_descriptor_cosize (tree desc, int rank, int corank)
  4054. {
  4055. return gfc_conv_descriptor_size_1 (desc, rank, rank + corank - 1);
  4056. }
  4057. /* Fills in an array descriptor, and returns the size of the array.
  4058. The size will be a simple_val, ie a variable or a constant. Also
  4059. calculates the offset of the base. The pointer argument overflow,
  4060. which should be of integer type, will increase in value if overflow
  4061. occurs during the size calculation. Returns the size of the array.
  4062. {
  4063. stride = 1;
  4064. offset = 0;
  4065. for (n = 0; n < rank; n++)
  4066. {
  4067. a.lbound[n] = specified_lower_bound;
  4068. offset = offset + a.lbond[n] * stride;
  4069. size = 1 - lbound;
  4070. a.ubound[n] = specified_upper_bound;
  4071. a.stride[n] = stride;
  4072. size = size >= 0 ? ubound + size : 0; //size = ubound + 1 - lbound
  4073. overflow += size == 0 ? 0: (MAX/size < stride ? 1: 0);
  4074. stride = stride * size;
  4075. }
  4076. for (n = rank; n < rank+corank; n++)
  4077. (Set lcobound/ucobound as above.)
  4078. element_size = sizeof (array element);
  4079. if (!rank)
  4080. return element_size
  4081. stride = (size_t) stride;
  4082. overflow += element_size == 0 ? 0: (MAX/element_size < stride ? 1: 0);
  4083. stride = stride * element_size;
  4084. return (stride);
  4085. } */
  4086. /*GCC ARRAYS*/
  4087. static tree
  4088. gfc_array_init_size (tree descriptor, int rank, int corank, tree * poffset,
  4089. gfc_expr ** lower, gfc_expr ** upper, stmtblock_t * pblock,
  4090. stmtblock_t * descriptor_block, tree * overflow,
  4091. tree expr3_elem_size, tree *nelems, gfc_expr *expr3)
  4092. {
  4093. tree type;
  4094. tree tmp;
  4095. tree size;
  4096. tree offset;
  4097. tree stride;
  4098. tree element_size;
  4099. tree or_expr;
  4100. tree thencase;
  4101. tree elsecase;
  4102. tree cond;
  4103. tree var;
  4104. stmtblock_t thenblock;
  4105. stmtblock_t elseblock;
  4106. gfc_expr *ubound;
  4107. gfc_se se;
  4108. int n;
  4109. type = TREE_TYPE (descriptor);
  4110. stride = gfc_index_one_node;
  4111. offset = gfc_index_zero_node;
  4112. /* Set the dtype. */
  4113. tmp = gfc_conv_descriptor_dtype (descriptor);
  4114. gfc_add_modify (descriptor_block, tmp, gfc_get_dtype (type));
  4115. or_expr = boolean_false_node;
  4116. for (n = 0; n < rank; n++)
  4117. {
  4118. tree conv_lbound;
  4119. tree conv_ubound;
  4120. /* We have 3 possibilities for determining the size of the array:
  4121. lower == NULL => lbound = 1, ubound = upper[n]
  4122. upper[n] = NULL => lbound = 1, ubound = lower[n]
  4123. upper[n] != NULL => lbound = lower[n], ubound = upper[n] */
  4124. ubound = upper[n];
  4125. /* Set lower bound. */
  4126. gfc_init_se (&se, NULL);
  4127. if (lower == NULL)
  4128. se.expr = gfc_index_one_node;
  4129. else
  4130. {
  4131. gcc_assert (lower[n]);
  4132. if (ubound)
  4133. {
  4134. gfc_conv_expr_type (&se, lower[n], gfc_array_index_type);
  4135. gfc_add_block_to_block (pblock, &se.pre);
  4136. }
  4137. else
  4138. {
  4139. se.expr = gfc_index_one_node;
  4140. ubound = lower[n];
  4141. }
  4142. }
  4143. gfc_conv_descriptor_lbound_set (descriptor_block, descriptor,
  4144. gfc_rank_cst[n], se.expr);
  4145. conv_lbound = se.expr;
  4146. /* Work out the offset for this component. */
  4147. tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
  4148. se.expr, stride);
  4149. offset = fold_build2_loc (input_location, MINUS_EXPR,
  4150. gfc_array_index_type, offset, tmp);
  4151. /* Set upper bound. */
  4152. gfc_init_se (&se, NULL);
  4153. gcc_assert (ubound);
  4154. gfc_conv_expr_type (&se, ubound, gfc_array_index_type);
  4155. gfc_add_block_to_block (pblock, &se.pre);
  4156. gfc_conv_descriptor_ubound_set (descriptor_block, descriptor,
  4157. gfc_rank_cst[n], se.expr);
  4158. conv_ubound = se.expr;
  4159. /* Store the stride. */
  4160. gfc_conv_descriptor_stride_set (descriptor_block, descriptor,
  4161. gfc_rank_cst[n], stride);
  4162. /* Calculate size and check whether extent is negative. */
  4163. size = gfc_conv_array_extent_dim (conv_lbound, conv_ubound, &or_expr);
  4164. size = gfc_evaluate_now (size, pblock);
  4165. /* Check whether multiplying the stride by the number of
  4166. elements in this dimension would overflow. We must also check
  4167. whether the current dimension has zero size in order to avoid
  4168. division by zero.
  4169. */
  4170. tmp = fold_build2_loc (input_location, TRUNC_DIV_EXPR,
  4171. gfc_array_index_type,
  4172. fold_convert (gfc_array_index_type,
  4173. TYPE_MAX_VALUE (gfc_array_index_type)),
  4174. size);
  4175. cond = gfc_unlikely (fold_build2_loc (input_location, LT_EXPR,
  4176. boolean_type_node, tmp, stride),
  4177. PRED_FORTRAN_OVERFLOW);
  4178. tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node, cond,
  4179. integer_one_node, integer_zero_node);
  4180. cond = gfc_unlikely (fold_build2_loc (input_location, EQ_EXPR,
  4181. boolean_type_node, size,
  4182. gfc_index_zero_node),
  4183. PRED_FORTRAN_SIZE_ZERO);
  4184. tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node, cond,
  4185. integer_zero_node, tmp);
  4186. tmp = fold_build2_loc (input_location, PLUS_EXPR, integer_type_node,
  4187. *overflow, tmp);
  4188. *overflow = gfc_evaluate_now (tmp, pblock);
  4189. /* Multiply the stride by the number of elements in this dimension. */
  4190. stride = fold_build2_loc (input_location, MULT_EXPR,
  4191. gfc_array_index_type, stride, size);
  4192. stride = gfc_evaluate_now (stride, pblock);
  4193. }
  4194. for (n = rank; n < rank + corank; n++)
  4195. {
  4196. ubound = upper[n];
  4197. /* Set lower bound. */
  4198. gfc_init_se (&se, NULL);
  4199. if (lower == NULL || lower[n] == NULL)
  4200. {
  4201. gcc_assert (n == rank + corank - 1);
  4202. se.expr = gfc_index_one_node;
  4203. }
  4204. else
  4205. {
  4206. if (ubound || n == rank + corank - 1)
  4207. {
  4208. gfc_conv_expr_type (&se, lower[n], gfc_array_index_type);
  4209. gfc_add_block_to_block (pblock, &se.pre);
  4210. }
  4211. else
  4212. {
  4213. se.expr = gfc_index_one_node;
  4214. ubound = lower[n];
  4215. }
  4216. }
  4217. gfc_conv_descriptor_lbound_set (descriptor_block, descriptor,
  4218. gfc_rank_cst[n], se.expr);
  4219. if (n < rank + corank - 1)
  4220. {
  4221. gfc_init_se (&se, NULL);
  4222. gcc_assert (ubound);
  4223. gfc_conv_expr_type (&se, ubound, gfc_array_index_type);
  4224. gfc_add_block_to_block (pblock, &se.pre);
  4225. gfc_conv_descriptor_ubound_set (descriptor_block, descriptor,
  4226. gfc_rank_cst[n], se.expr);
  4227. }
  4228. }
  4229. /* The stride is the number of elements in the array, so multiply by the
  4230. size of an element to get the total size. Obviously, if there is a
  4231. SOURCE expression (expr3) we must use its element size. */
  4232. if (expr3_elem_size != NULL_TREE)
  4233. tmp = expr3_elem_size;
  4234. else if (expr3 != NULL)
  4235. {
  4236. if (expr3->ts.type == BT_CLASS)
  4237. {
  4238. gfc_se se_sz;
  4239. gfc_expr *sz = gfc_copy_expr (expr3);
  4240. gfc_add_vptr_component (sz);
  4241. gfc_add_size_component (sz);
  4242. gfc_init_se (&se_sz, NULL);
  4243. gfc_conv_expr (&se_sz, sz);
  4244. gfc_free_expr (sz);
  4245. tmp = se_sz.expr;
  4246. }
  4247. else
  4248. {
  4249. tmp = gfc_typenode_for_spec (&expr3->ts);
  4250. tmp = TYPE_SIZE_UNIT (tmp);
  4251. }
  4252. }
  4253. else
  4254. tmp = TYPE_SIZE_UNIT (gfc_get_element_type (type));
  4255. /* Convert to size_t. */
  4256. element_size = fold_convert (size_type_node, tmp);
  4257. if (rank == 0)
  4258. return element_size;
  4259. *nelems = gfc_evaluate_now (stride, pblock);
  4260. stride = fold_convert (size_type_node, stride);
  4261. /* First check for overflow. Since an array of type character can
  4262. have zero element_size, we must check for that before
  4263. dividing. */
  4264. tmp = fold_build2_loc (input_location, TRUNC_DIV_EXPR,
  4265. size_type_node,
  4266. TYPE_MAX_VALUE (size_type_node), element_size);
  4267. cond = gfc_unlikely (fold_build2_loc (input_location, LT_EXPR,
  4268. boolean_type_node, tmp, stride),
  4269. PRED_FORTRAN_OVERFLOW);
  4270. tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node, cond,
  4271. integer_one_node, integer_zero_node);
  4272. cond = gfc_unlikely (fold_build2_loc (input_location, EQ_EXPR,
  4273. boolean_type_node, element_size,
  4274. build_int_cst (size_type_node, 0)),
  4275. PRED_FORTRAN_SIZE_ZERO);
  4276. tmp = fold_build3_loc (input_location, COND_EXPR, integer_type_node, cond,
  4277. integer_zero_node, tmp);
  4278. tmp = fold_build2_loc (input_location, PLUS_EXPR, integer_type_node,
  4279. *overflow, tmp);
  4280. *overflow = gfc_evaluate_now (tmp, pblock);
  4281. size = fold_build2_loc (input_location, MULT_EXPR, size_type_node,
  4282. stride, element_size);
  4283. if (poffset != NULL)
  4284. {
  4285. offset = gfc_evaluate_now (offset, pblock);
  4286. *poffset = offset;
  4287. }
  4288. if (integer_zerop (or_expr))
  4289. return size;
  4290. if (integer_onep (or_expr))
  4291. return build_int_cst (size_type_node, 0);
  4292. var = gfc_create_var (TREE_TYPE (size), "size");
  4293. gfc_start_block (&thenblock);
  4294. gfc_add_modify (&thenblock, var, build_int_cst (size_type_node, 0));
  4295. thencase = gfc_finish_block (&thenblock);
  4296. gfc_start_block (&elseblock);
  4297. gfc_add_modify (&elseblock, var, size);
  4298. elsecase = gfc_finish_block (&elseblock);
  4299. tmp = gfc_evaluate_now (or_expr, pblock);
  4300. tmp = build3_v (COND_EXPR, tmp, thencase, elsecase);
  4301. gfc_add_expr_to_block (pblock, tmp);
  4302. return var;
  4303. }
  4304. /* Initializes the descriptor and generates a call to _gfor_allocate. Does
  4305. the work for an ALLOCATE statement. */
  4306. /*GCC ARRAYS*/
  4307. bool
  4308. gfc_array_allocate (gfc_se * se, gfc_expr * expr, tree status, tree errmsg,
  4309. tree errlen, tree label_finish, tree expr3_elem_size,
  4310. tree *nelems, gfc_expr *expr3)
  4311. {
  4312. tree tmp;
  4313. tree pointer;
  4314. tree offset = NULL_TREE;
  4315. tree token = NULL_TREE;
  4316. tree size;
  4317. tree msg;
  4318. tree error = NULL_TREE;
  4319. tree overflow; /* Boolean storing whether size calculation overflows. */
  4320. tree var_overflow = NULL_TREE;
  4321. tree cond;
  4322. tree set_descriptor;
  4323. stmtblock_t set_descriptor_block;
  4324. stmtblock_t elseblock;
  4325. gfc_expr **lower;
  4326. gfc_expr **upper;
  4327. gfc_ref *ref, *prev_ref = NULL;
  4328. bool allocatable, coarray, dimension;
  4329. ref = expr->ref;
  4330. /* Find the last reference in the chain. */
  4331. while (ref && ref->next != NULL)
  4332. {
  4333. gcc_assert (ref->type != REF_ARRAY || ref->u.ar.type == AR_ELEMENT
  4334. || (ref->u.ar.dimen == 0 && ref->u.ar.codimen > 0));
  4335. prev_ref = ref;
  4336. ref = ref->next;
  4337. }
  4338. if (ref == NULL || ref->type != REF_ARRAY)
  4339. return false;
  4340. if (!prev_ref)
  4341. {
  4342. allocatable = expr->symtree->n.sym->attr.allocatable;
  4343. coarray = expr->symtree->n.sym->attr.codimension;
  4344. dimension = expr->symtree->n.sym->attr.dimension;
  4345. }
  4346. else
  4347. {
  4348. allocatable = prev_ref->u.c.component->attr.allocatable;
  4349. coarray = prev_ref->u.c.component->attr.codimension;
  4350. dimension = prev_ref->u.c.component->attr.dimension;
  4351. }
  4352. if (!dimension)
  4353. gcc_assert (coarray);
  4354. /* Figure out the size of the array. */
  4355. switch (ref->u.ar.type)
  4356. {
  4357. case AR_ELEMENT:
  4358. if (!coarray)
  4359. {
  4360. lower = NULL;
  4361. upper = ref->u.ar.start;
  4362. break;
  4363. }
  4364. /* Fall through. */
  4365. case AR_SECTION:
  4366. lower = ref->u.ar.start;
  4367. upper = ref->u.ar.end;
  4368. break;
  4369. case AR_FULL:
  4370. gcc_assert (ref->u.ar.as->type == AS_EXPLICIT);
  4371. lower = ref->u.ar.as->lower;
  4372. upper = ref->u.ar.as->upper;
  4373. break;
  4374. default:
  4375. gcc_unreachable ();
  4376. break;
  4377. }
  4378. overflow = integer_zero_node;
  4379. gfc_init_block (&set_descriptor_block);
  4380. size = gfc_array_init_size (se->expr, ref->u.ar.as->rank,
  4381. ref->u.ar.as->corank, &offset, lower, upper,
  4382. &se->pre, &set_descriptor_block, &overflow,
  4383. expr3_elem_size, nelems, expr3);
  4384. if (dimension)
  4385. {
  4386. var_overflow = gfc_create_var (integer_type_node, "overflow");
  4387. gfc_add_modify (&se->pre, var_overflow, overflow);
  4388. if (status == NULL_TREE)
  4389. {
  4390. /* Generate the block of code handling overflow. */
  4391. msg = gfc_build_addr_expr (pchar_type_node,
  4392. gfc_build_localized_cstring_const
  4393. ("Integer overflow when calculating the amount of "
  4394. "memory to allocate"));
  4395. error = build_call_expr_loc (input_location,
  4396. gfor_fndecl_runtime_error, 1, msg);
  4397. }
  4398. else
  4399. {
  4400. tree status_type = TREE_TYPE (status);
  4401. stmtblock_t set_status_block;
  4402. gfc_start_block (&set_status_block);
  4403. gfc_add_modify (&set_status_block, status,
  4404. build_int_cst (status_type, LIBERROR_ALLOCATION));
  4405. error = gfc_finish_block (&set_status_block);
  4406. }
  4407. }
  4408. gfc_start_block (&elseblock);
  4409. /* Allocate memory to store the data. */
  4410. if (POINTER_TYPE_P (TREE_TYPE (se->expr)))
  4411. se->expr = build_fold_indirect_ref_loc (input_location, se->expr);
  4412. pointer = gfc_conv_descriptor_data_get (se->expr);
  4413. STRIP_NOPS (pointer);
  4414. if (coarray && flag_coarray == GFC_FCOARRAY_LIB)
  4415. token = gfc_build_addr_expr (NULL_TREE,
  4416. gfc_conv_descriptor_token (se->expr));
  4417. /* The allocatable variant takes the old pointer as first argument. */
  4418. if (allocatable)
  4419. gfc_allocate_allocatable (&elseblock, pointer, size, token,
  4420. status, errmsg, errlen, label_finish, expr);
  4421. else
  4422. gfc_allocate_using_malloc (&elseblock, pointer, size, status);
  4423. if (dimension)
  4424. {
  4425. cond = gfc_unlikely (fold_build2_loc (input_location, NE_EXPR,
  4426. boolean_type_node, var_overflow, integer_zero_node),
  4427. PRED_FORTRAN_OVERFLOW);
  4428. tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node, cond,
  4429. error, gfc_finish_block (&elseblock));
  4430. }
  4431. else
  4432. tmp = gfc_finish_block (&elseblock);
  4433. gfc_add_expr_to_block (&se->pre, tmp);
  4434. /* Update the array descriptors. */
  4435. if (dimension)
  4436. gfc_conv_descriptor_offset_set (&set_descriptor_block, se->expr, offset);
  4437. set_descriptor = gfc_finish_block (&set_descriptor_block);
  4438. if (status != NULL_TREE)
  4439. {
  4440. cond = fold_build2_loc (input_location, EQ_EXPR,
  4441. boolean_type_node, status,
  4442. build_int_cst (TREE_TYPE (status), 0));
  4443. gfc_add_expr_to_block (&se->pre,
  4444. fold_build3_loc (input_location, COND_EXPR, void_type_node,
  4445. gfc_likely (cond, PRED_FORTRAN_FAIL_ALLOC),
  4446. set_descriptor,
  4447. build_empty_stmt (input_location)));
  4448. }
  4449. else
  4450. gfc_add_expr_to_block (&se->pre, set_descriptor);
  4451. if ((expr->ts.type == BT_DERIVED)
  4452. && expr->ts.u.derived->attr.alloc_comp)
  4453. {
  4454. tmp = gfc_nullify_alloc_comp (expr->ts.u.derived, se->expr,
  4455. ref->u.ar.as->rank);
  4456. gfc_add_expr_to_block (&se->pre, tmp);
  4457. }
  4458. return true;
  4459. }
  4460. /* Deallocate an array variable. Also used when an allocated variable goes
  4461. out of scope. */
  4462. /*GCC ARRAYS*/
  4463. tree
  4464. gfc_array_deallocate (tree descriptor, tree pstat, tree errmsg, tree errlen,
  4465. tree label_finish, gfc_expr* expr)
  4466. {
  4467. tree var;
  4468. tree tmp;
  4469. stmtblock_t block;
  4470. bool coarray = gfc_is_coarray (expr);
  4471. gfc_start_block (&block);
  4472. /* Get a pointer to the data. */
  4473. var = gfc_conv_descriptor_data_get (descriptor);
  4474. STRIP_NOPS (var);
  4475. /* Parameter is the address of the data component. */
  4476. tmp = gfc_deallocate_with_status (coarray ? descriptor : var, pstat, errmsg,
  4477. errlen, label_finish, false, expr, coarray);
  4478. gfc_add_expr_to_block (&block, tmp);
  4479. /* Zero the data pointer; only for coarrays an error can occur and then
  4480. the allocation status may not be changed. */
  4481. tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node,
  4482. var, build_int_cst (TREE_TYPE (var), 0));
  4483. if (pstat != NULL_TREE && coarray && flag_coarray == GFC_FCOARRAY_LIB)
  4484. {
  4485. tree cond;
  4486. tree stat = build_fold_indirect_ref_loc (input_location, pstat);
  4487. cond = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
  4488. stat, build_int_cst (TREE_TYPE (stat), 0));
  4489. tmp = fold_build3_loc (input_location, COND_EXPR, void_type_node,
  4490. cond, tmp, build_empty_stmt (input_location));
  4491. }
  4492. gfc_add_expr_to_block (&block, tmp);
  4493. return gfc_finish_block (&block);
  4494. }
  4495. /* Create an array constructor from an initialization expression.
  4496. We assume the frontend already did any expansions and conversions. */
  4497. tree
  4498. gfc_conv_array_initializer (tree type, gfc_expr * expr)
  4499. {
  4500. gfc_constructor *c;
  4501. tree tmp;
  4502. offset_int wtmp;
  4503. gfc_se se;
  4504. tree index, range;
  4505. vec<constructor_elt, va_gc> *v = NULL;
  4506. if (expr->expr_type == EXPR_VARIABLE
  4507. && expr->symtree->n.sym->attr.flavor == FL_PARAMETER
  4508. && expr->symtree->n.sym->value)
  4509. expr = expr->symtree->n.sym->value;
  4510. switch (expr->expr_type)
  4511. {
  4512. case EXPR_CONSTANT:
  4513. case EXPR_STRUCTURE:
  4514. /* A single scalar or derived type value. Create an array with all
  4515. elements equal to that value. */
  4516. gfc_init_se (&se, NULL);
  4517. if (expr->expr_type == EXPR_CONSTANT)
  4518. gfc_conv_constant (&se, expr);
  4519. else
  4520. gfc_conv_structure (&se, expr, 1);
  4521. wtmp = wi::to_offset (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) + 1;
  4522. /* This will probably eat buckets of memory for large arrays. */
  4523. while (wtmp != 0)
  4524. {
  4525. CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, se.expr);
  4526. wtmp -= 1;
  4527. }
  4528. break;
  4529. case EXPR_ARRAY:
  4530. /* Create a vector of all the elements. */
  4531. for (c = gfc_constructor_first (expr->value.constructor);
  4532. c; c = gfc_constructor_next (c))
  4533. {
  4534. if (c->iterator)
  4535. {
  4536. /* Problems occur when we get something like
  4537. integer :: a(lots) = (/(i, i=1, lots)/) */
  4538. gfc_fatal_error ("The number of elements in the array "
  4539. "constructor at %L requires an increase of "
  4540. "the allowed %d upper limit. See "
  4541. "%<-fmax-array-constructor%> option",
  4542. &expr->where, flag_max_array_constructor);
  4543. return NULL_TREE;
  4544. }
  4545. if (mpz_cmp_si (c->offset, 0) != 0)
  4546. index = gfc_conv_mpz_to_tree (c->offset, gfc_index_integer_kind);
  4547. else
  4548. index = NULL_TREE;
  4549. if (mpz_cmp_si (c->repeat, 1) > 0)
  4550. {
  4551. tree tmp1, tmp2;
  4552. mpz_t maxval;
  4553. mpz_init (maxval);
  4554. mpz_add (maxval, c->offset, c->repeat);
  4555. mpz_sub_ui (maxval, maxval, 1);
  4556. tmp2 = gfc_conv_mpz_to_tree (maxval, gfc_index_integer_kind);
  4557. if (mpz_cmp_si (c->offset, 0) != 0)
  4558. {
  4559. mpz_add_ui (maxval, c->offset, 1);
  4560. tmp1 = gfc_conv_mpz_to_tree (maxval, gfc_index_integer_kind);
  4561. }
  4562. else
  4563. tmp1 = gfc_conv_mpz_to_tree (c->offset, gfc_index_integer_kind);
  4564. range = fold_build2 (RANGE_EXPR, gfc_array_index_type, tmp1, tmp2);
  4565. mpz_clear (maxval);
  4566. }
  4567. else
  4568. range = NULL;
  4569. gfc_init_se (&se, NULL);
  4570. switch (c->expr->expr_type)
  4571. {
  4572. case EXPR_CONSTANT:
  4573. gfc_conv_constant (&se, c->expr);
  4574. break;
  4575. case EXPR_STRUCTURE:
  4576. gfc_conv_structure (&se, c->expr, 1);
  4577. break;
  4578. default:
  4579. /* Catch those occasional beasts that do not simplify
  4580. for one reason or another, assuming that if they are
  4581. standard defying the frontend will catch them. */
  4582. gfc_conv_expr (&se, c->expr);
  4583. break;
  4584. }
  4585. if (range == NULL_TREE)
  4586. CONSTRUCTOR_APPEND_ELT (v, index, se.expr);
  4587. else
  4588. {
  4589. if (index != NULL_TREE)
  4590. CONSTRUCTOR_APPEND_ELT (v, index, se.expr);
  4591. CONSTRUCTOR_APPEND_ELT (v, range, se.expr);
  4592. }
  4593. }
  4594. break;
  4595. case EXPR_NULL:
  4596. return gfc_build_null_descriptor (type);
  4597. default:
  4598. gcc_unreachable ();
  4599. }
  4600. /* Create a constructor from the list of elements. */
  4601. tmp = build_constructor (type, v);
  4602. TREE_CONSTANT (tmp) = 1;
  4603. return tmp;
  4604. }
  4605. /* Generate code to evaluate non-constant coarray cobounds. */
  4606. void
  4607. gfc_trans_array_cobounds (tree type, stmtblock_t * pblock,
  4608. const gfc_symbol *sym)
  4609. {
  4610. int dim;
  4611. tree ubound;
  4612. tree lbound;
  4613. gfc_se se;
  4614. gfc_array_spec *as;
  4615. as = sym->as;
  4616. for (dim = as->rank; dim < as->rank + as->corank; dim++)
  4617. {
  4618. /* Evaluate non-constant array bound expressions. */
  4619. lbound = GFC_TYPE_ARRAY_LBOUND (type, dim);
  4620. if (as->lower[dim] && !INTEGER_CST_P (lbound))
  4621. {
  4622. gfc_init_se (&se, NULL);
  4623. gfc_conv_expr_type (&se, as->lower[dim], gfc_array_index_type);
  4624. gfc_add_block_to_block (pblock, &se.pre);
  4625. gfc_add_modify (pblock, lbound, se.expr);
  4626. }
  4627. ubound = GFC_TYPE_ARRAY_UBOUND (type, dim);
  4628. if (as->upper[dim] && !INTEGER_CST_P (ubound))
  4629. {
  4630. gfc_init_se (&se, NULL);
  4631. gfc_conv_expr_type (&se, as->upper[dim], gfc_array_index_type);
  4632. gfc_add_block_to_block (pblock, &se.pre);
  4633. gfc_add_modify (pblock, ubound, se.expr);
  4634. }
  4635. }
  4636. }
  4637. /* Generate code to evaluate non-constant array bounds. Sets *poffset and
  4638. returns the size (in elements) of the array. */
  4639. static tree
  4640. gfc_trans_array_bounds (tree type, gfc_symbol * sym, tree * poffset,
  4641. stmtblock_t * pblock)
  4642. {
  4643. gfc_array_spec *as;
  4644. tree size;
  4645. tree stride;
  4646. tree offset;
  4647. tree ubound;
  4648. tree lbound;
  4649. tree tmp;
  4650. gfc_se se;
  4651. int dim;
  4652. as = sym->as;
  4653. size = gfc_index_one_node;
  4654. offset = gfc_index_zero_node;
  4655. for (dim = 0; dim < as->rank; dim++)
  4656. {
  4657. /* Evaluate non-constant array bound expressions. */
  4658. lbound = GFC_TYPE_ARRAY_LBOUND (type, dim);
  4659. if (as->lower[dim] && !INTEGER_CST_P (lbound))
  4660. {
  4661. gfc_init_se (&se, NULL);
  4662. gfc_conv_expr_type (&se, as->lower[dim], gfc_array_index_type);
  4663. gfc_add_block_to_block (pblock, &se.pre);
  4664. gfc_add_modify (pblock, lbound, se.expr);
  4665. }
  4666. ubound = GFC_TYPE_ARRAY_UBOUND (type, dim);
  4667. if (as->upper[dim] && !INTEGER_CST_P (ubound))
  4668. {
  4669. gfc_init_se (&se, NULL);
  4670. gfc_conv_expr_type (&se, as->upper[dim], gfc_array_index_type);
  4671. gfc_add_block_to_block (pblock, &se.pre);
  4672. gfc_add_modify (pblock, ubound, se.expr);
  4673. }
  4674. /* The offset of this dimension. offset = offset - lbound * stride. */
  4675. tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
  4676. lbound, size);
  4677. offset = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
  4678. offset, tmp);
  4679. /* The size of this dimension, and the stride of the next. */
  4680. if (dim + 1 < as->rank)
  4681. stride = GFC_TYPE_ARRAY_STRIDE (type, dim + 1);
  4682. else
  4683. stride = GFC_TYPE_ARRAY_SIZE (type);
  4684. if (ubound != NULL_TREE && !(stride && INTEGER_CST_P (stride)))
  4685. {
  4686. /* Calculate stride = size * (ubound + 1 - lbound). */
  4687. tmp = fold_build2_loc (input_location, MINUS_EXPR,
  4688. gfc_array_index_type,
  4689. gfc_index_one_node, lbound);
  4690. tmp = fold_build2_loc (input_location, PLUS_EXPR,
  4691. gfc_array_index_type, ubound, tmp);
  4692. tmp = fold_build2_loc (input_location, MULT_EXPR,
  4693. gfc_array_index_type, size, tmp);
  4694. if (stride)
  4695. gfc_add_modify (pblock, stride, tmp);
  4696. else
  4697. stride = gfc_evaluate_now (tmp, pblock);
  4698. /* Make sure that negative size arrays are translated
  4699. to being zero size. */
  4700. tmp = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
  4701. stride, gfc_index_zero_node);
  4702. tmp = fold_build3_loc (input_location, COND_EXPR,
  4703. gfc_array_index_type, tmp,
  4704. stride, gfc_index_zero_node);
  4705. gfc_add_modify (pblock, stride, tmp);
  4706. }
  4707. size = stride;
  4708. }
  4709. gfc_trans_array_cobounds (type, pblock, sym);
  4710. gfc_trans_vla_type_sizes (sym, pblock);
  4711. *poffset = offset;
  4712. return size;
  4713. }
  4714. /* Generate code to initialize/allocate an array variable. */
  4715. void
  4716. gfc_trans_auto_array_allocation (tree decl, gfc_symbol * sym,
  4717. gfc_wrapped_block * block)
  4718. {
  4719. stmtblock_t init;
  4720. tree type;
  4721. tree tmp = NULL_TREE;
  4722. tree size;
  4723. tree offset;
  4724. tree space;
  4725. tree inittree;
  4726. bool onstack;
  4727. gcc_assert (!(sym->attr.pointer || sym->attr.allocatable));
  4728. /* Do nothing for USEd variables. */
  4729. if (sym->attr.use_assoc)
  4730. return;
  4731. type = TREE_TYPE (decl);
  4732. gcc_assert (GFC_ARRAY_TYPE_P (type));
  4733. onstack = TREE_CODE (type) != POINTER_TYPE;
  4734. gfc_init_block (&init);
  4735. /* Evaluate character string length. */
  4736. if (sym->ts.type == BT_CHARACTER
  4737. && onstack && !INTEGER_CST_P (sym->ts.u.cl->backend_decl))
  4738. {
  4739. gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
  4740. gfc_trans_vla_type_sizes (sym, &init);
  4741. /* Emit a DECL_EXPR for this variable, which will cause the
  4742. gimplifier to allocate storage, and all that good stuff. */
  4743. tmp = fold_build1_loc (input_location, DECL_EXPR, TREE_TYPE (decl), decl);
  4744. gfc_add_expr_to_block (&init, tmp);
  4745. }
  4746. if (onstack)
  4747. {
  4748. gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
  4749. return;
  4750. }
  4751. type = TREE_TYPE (type);
  4752. gcc_assert (!sym->attr.use_assoc);
  4753. gcc_assert (!TREE_STATIC (decl));
  4754. gcc_assert (!sym->module);
  4755. if (sym->ts.type == BT_CHARACTER
  4756. && !INTEGER_CST_P (sym->ts.u.cl->backend_decl))
  4757. gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
  4758. size = gfc_trans_array_bounds (type, sym, &offset, &init);
  4759. /* Don't actually allocate space for Cray Pointees. */
  4760. if (sym->attr.cray_pointee)
  4761. {
  4762. if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
  4763. gfc_add_modify (&init, GFC_TYPE_ARRAY_OFFSET (type), offset);
  4764. gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
  4765. return;
  4766. }
  4767. if (flag_stack_arrays)
  4768. {
  4769. gcc_assert (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE);
  4770. space = build_decl (sym->declared_at.lb->location,
  4771. VAR_DECL, create_tmp_var_name ("A"),
  4772. TREE_TYPE (TREE_TYPE (decl)));
  4773. gfc_trans_vla_type_sizes (sym, &init);
  4774. }
  4775. else
  4776. {
  4777. /* The size is the number of elements in the array, so multiply by the
  4778. size of an element to get the total size. */
  4779. tmp = TYPE_SIZE_UNIT (gfc_get_element_type (type));
  4780. size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
  4781. size, fold_convert (gfc_array_index_type, tmp));
  4782. /* Allocate memory to hold the data. */
  4783. tmp = gfc_call_malloc (&init, TREE_TYPE (decl), size);
  4784. gfc_add_modify (&init, decl, tmp);
  4785. /* Free the temporary. */
  4786. tmp = gfc_call_free (convert (pvoid_type_node, decl));
  4787. space = NULL_TREE;
  4788. }
  4789. /* Set offset of the array. */
  4790. if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
  4791. gfc_add_modify (&init, GFC_TYPE_ARRAY_OFFSET (type), offset);
  4792. /* Automatic arrays should not have initializers. */
  4793. gcc_assert (!sym->value);
  4794. inittree = gfc_finish_block (&init);
  4795. if (space)
  4796. {
  4797. tree addr;
  4798. pushdecl (space);
  4799. /* Don't create new scope, emit the DECL_EXPR in exactly the scope
  4800. where also space is located. */
  4801. gfc_init_block (&init);
  4802. tmp = fold_build1_loc (input_location, DECL_EXPR,
  4803. TREE_TYPE (space), space);
  4804. gfc_add_expr_to_block (&init, tmp);
  4805. addr = fold_build1_loc (sym->declared_at.lb->location,
  4806. ADDR_EXPR, TREE_TYPE (decl), space);
  4807. gfc_add_modify (&init, decl, addr);
  4808. gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
  4809. tmp = NULL_TREE;
  4810. }
  4811. gfc_add_init_cleanup (block, inittree, tmp);
  4812. }
  4813. /* Generate entry and exit code for g77 calling convention arrays. */
  4814. void
  4815. gfc_trans_g77_array (gfc_symbol * sym, gfc_wrapped_block * block)
  4816. {
  4817. tree parm;
  4818. tree type;
  4819. locus loc;
  4820. tree offset;
  4821. tree tmp;
  4822. tree stmt;
  4823. stmtblock_t init;
  4824. gfc_save_backend_locus (&loc);
  4825. gfc_set_backend_locus (&sym->declared_at);
  4826. /* Descriptor type. */
  4827. parm = sym->backend_decl;
  4828. type = TREE_TYPE (parm);
  4829. gcc_assert (GFC_ARRAY_TYPE_P (type));
  4830. gfc_start_block (&init);
  4831. if (sym->ts.type == BT_CHARACTER
  4832. && TREE_CODE (sym->ts.u.cl->backend_decl) == VAR_DECL)
  4833. gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
  4834. /* Evaluate the bounds of the array. */
  4835. gfc_trans_array_bounds (type, sym, &offset, &init);
  4836. /* Set the offset. */
  4837. if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
  4838. gfc_add_modify (&init, GFC_TYPE_ARRAY_OFFSET (type), offset);
  4839. /* Set the pointer itself if we aren't using the parameter directly. */
  4840. if (TREE_CODE (parm) != PARM_DECL)
  4841. {
  4842. tmp = convert (TREE_TYPE (parm), GFC_DECL_SAVED_DESCRIPTOR (parm));
  4843. gfc_add_modify (&init, parm, tmp);
  4844. }
  4845. stmt = gfc_finish_block (&init);
  4846. gfc_restore_backend_locus (&loc);
  4847. /* Add the initialization code to the start of the function. */
  4848. if (sym->attr.optional || sym->attr.not_always_present)
  4849. {
  4850. tmp = gfc_conv_expr_present (sym);
  4851. stmt = build3_v (COND_EXPR, tmp, stmt, build_empty_stmt (input_location));
  4852. }
  4853. gfc_add_init_cleanup (block, stmt, NULL_TREE);
  4854. }
  4855. /* Modify the descriptor of an array parameter so that it has the
  4856. correct lower bound. Also move the upper bound accordingly.
  4857. If the array is not packed, it will be copied into a temporary.
  4858. For each dimension we set the new lower and upper bounds. Then we copy the
  4859. stride and calculate the offset for this dimension. We also work out
  4860. what the stride of a packed array would be, and see it the two match.
  4861. If the array need repacking, we set the stride to the values we just
  4862. calculated, recalculate the offset and copy the array data.
  4863. Code is also added to copy the data back at the end of the function.
  4864. */
  4865. void
  4866. gfc_trans_dummy_array_bias (gfc_symbol * sym, tree tmpdesc,
  4867. gfc_wrapped_block * block)
  4868. {
  4869. tree size;
  4870. tree type;
  4871. tree offset;
  4872. locus loc;
  4873. stmtblock_t init;
  4874. tree stmtInit, stmtCleanup;
  4875. tree lbound;
  4876. tree ubound;
  4877. tree dubound;
  4878. tree dlbound;
  4879. tree dumdesc;
  4880. tree tmp;
  4881. tree stride, stride2;
  4882. tree stmt_packed;
  4883. tree stmt_unpacked;
  4884. tree partial;
  4885. gfc_se se;
  4886. int n;
  4887. int checkparm;
  4888. int no_repack;
  4889. bool optional_arg;
  4890. /* Do nothing for pointer and allocatable arrays. */
  4891. if (sym->attr.pointer || sym->attr.allocatable)
  4892. return;
  4893. if (sym->attr.dummy && gfc_is_nodesc_array (sym))
  4894. {
  4895. gfc_trans_g77_array (sym, block);
  4896. return;
  4897. }
  4898. gfc_save_backend_locus (&loc);
  4899. gfc_set_backend_locus (&sym->declared_at);
  4900. /* Descriptor type. */
  4901. type = TREE_TYPE (tmpdesc);
  4902. gcc_assert (GFC_ARRAY_TYPE_P (type));
  4903. dumdesc = GFC_DECL_SAVED_DESCRIPTOR (tmpdesc);
  4904. dumdesc = build_fold_indirect_ref_loc (input_location, dumdesc);
  4905. gfc_start_block (&init);
  4906. if (sym->ts.type == BT_CHARACTER
  4907. && TREE_CODE (sym->ts.u.cl->backend_decl) == VAR_DECL)
  4908. gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
  4909. checkparm = (sym->as->type == AS_EXPLICIT
  4910. && (gfc_option.rtcheck & GFC_RTCHECK_BOUNDS));
  4911. no_repack = !(GFC_DECL_PACKED_ARRAY (tmpdesc)
  4912. || GFC_DECL_PARTIAL_PACKED_ARRAY (tmpdesc));
  4913. if (GFC_DECL_PARTIAL_PACKED_ARRAY (tmpdesc))
  4914. {
  4915. /* For non-constant shape arrays we only check if the first dimension
  4916. is contiguous. Repacking higher dimensions wouldn't gain us
  4917. anything as we still don't know the array stride. */
  4918. partial = gfc_create_var (boolean_type_node, "partial");
  4919. TREE_USED (partial) = 1;
  4920. tmp = gfc_conv_descriptor_stride_get (dumdesc, gfc_rank_cst[0]);
  4921. tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node, tmp,
  4922. gfc_index_one_node);
  4923. gfc_add_modify (&init, partial, tmp);
  4924. }
  4925. else
  4926. partial = NULL_TREE;
  4927. /* The naming of stmt_unpacked and stmt_packed may be counter-intuitive
  4928. here, however I think it does the right thing. */
  4929. if (no_repack)
  4930. {
  4931. /* Set the first stride. */
  4932. stride = gfc_conv_descriptor_stride_get (dumdesc, gfc_rank_cst[0]);
  4933. stride = gfc_evaluate_now (stride, &init);
  4934. tmp = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
  4935. stride, gfc_index_zero_node);
  4936. tmp = fold_build3_loc (input_location, COND_EXPR, gfc_array_index_type,
  4937. tmp, gfc_index_one_node, stride);
  4938. stride = GFC_TYPE_ARRAY_STRIDE (type, 0);
  4939. gfc_add_modify (&init, stride, tmp);
  4940. /* Allow the user to disable array repacking. */
  4941. stmt_unpacked = NULL_TREE;
  4942. }
  4943. else
  4944. {
  4945. gcc_assert (integer_onep (GFC_TYPE_ARRAY_STRIDE (type, 0)));
  4946. /* A library call to repack the array if necessary. */
  4947. tmp = GFC_DECL_SAVED_DESCRIPTOR (tmpdesc);
  4948. stmt_unpacked = build_call_expr_loc (input_location,
  4949. gfor_fndecl_in_pack, 1, tmp);
  4950. stride = gfc_index_one_node;
  4951. if (warn_array_temporaries)
  4952. gfc_warning (OPT_Warray_temporaries,
  4953. "Creating array temporary at %L", &loc);
  4954. }
  4955. /* This is for the case where the array data is used directly without
  4956. calling the repack function. */
  4957. if (no_repack || partial != NULL_TREE)
  4958. stmt_packed = gfc_conv_descriptor_data_get (dumdesc);
  4959. else
  4960. stmt_packed = NULL_TREE;
  4961. /* Assign the data pointer. */
  4962. if (stmt_packed != NULL_TREE && stmt_unpacked != NULL_TREE)
  4963. {
  4964. /* Don't repack unknown shape arrays when the first stride is 1. */
  4965. tmp = fold_build3_loc (input_location, COND_EXPR, TREE_TYPE (stmt_packed),
  4966. partial, stmt_packed, stmt_unpacked);
  4967. }
  4968. else
  4969. tmp = stmt_packed != NULL_TREE ? stmt_packed : stmt_unpacked;
  4970. gfc_add_modify (&init, tmpdesc, fold_convert (type, tmp));
  4971. offset = gfc_index_zero_node;
  4972. size = gfc_index_one_node;
  4973. /* Evaluate the bounds of the array. */
  4974. for (n = 0; n < sym->as->rank; n++)
  4975. {
  4976. if (checkparm || !sym->as->upper[n])
  4977. {
  4978. /* Get the bounds of the actual parameter. */
  4979. dubound = gfc_conv_descriptor_ubound_get (dumdesc, gfc_rank_cst[n]);
  4980. dlbound = gfc_conv_descriptor_lbound_get (dumdesc, gfc_rank_cst[n]);
  4981. }
  4982. else
  4983. {
  4984. dubound = NULL_TREE;
  4985. dlbound = NULL_TREE;
  4986. }
  4987. lbound = GFC_TYPE_ARRAY_LBOUND (type, n);
  4988. if (!INTEGER_CST_P (lbound))
  4989. {
  4990. gfc_init_se (&se, NULL);
  4991. gfc_conv_expr_type (&se, sym->as->lower[n],
  4992. gfc_array_index_type);
  4993. gfc_add_block_to_block (&init, &se.pre);
  4994. gfc_add_modify (&init, lbound, se.expr);
  4995. }
  4996. ubound = GFC_TYPE_ARRAY_UBOUND (type, n);
  4997. /* Set the desired upper bound. */
  4998. if (sym->as->upper[n])
  4999. {
  5000. /* We know what we want the upper bound to be. */
  5001. if (!INTEGER_CST_P (ubound))
  5002. {
  5003. gfc_init_se (&se, NULL);
  5004. gfc_conv_expr_type (&se, sym->as->upper[n],
  5005. gfc_array_index_type);
  5006. gfc_add_block_to_block (&init, &se.pre);
  5007. gfc_add_modify (&init, ubound, se.expr);
  5008. }
  5009. /* Check the sizes match. */
  5010. if (checkparm)
  5011. {
  5012. /* Check (ubound(a) - lbound(a) == ubound(b) - lbound(b)). */
  5013. char * msg;
  5014. tree temp;
  5015. temp = fold_build2_loc (input_location, MINUS_EXPR,
  5016. gfc_array_index_type, ubound, lbound);
  5017. temp = fold_build2_loc (input_location, PLUS_EXPR,
  5018. gfc_array_index_type,
  5019. gfc_index_one_node, temp);
  5020. stride2 = fold_build2_loc (input_location, MINUS_EXPR,
  5021. gfc_array_index_type, dubound,
  5022. dlbound);
  5023. stride2 = fold_build2_loc (input_location, PLUS_EXPR,
  5024. gfc_array_index_type,
  5025. gfc_index_one_node, stride2);
  5026. tmp = fold_build2_loc (input_location, NE_EXPR,
  5027. gfc_array_index_type, temp, stride2);
  5028. msg = xasprintf ("Dimension %d of array '%s' has extent "
  5029. "%%ld instead of %%ld", n+1, sym->name);
  5030. gfc_trans_runtime_check (true, false, tmp, &init, &loc, msg,
  5031. fold_convert (long_integer_type_node, temp),
  5032. fold_convert (long_integer_type_node, stride2));
  5033. free (msg);
  5034. }
  5035. }
  5036. else
  5037. {
  5038. /* For assumed shape arrays move the upper bound by the same amount
  5039. as the lower bound. */
  5040. tmp = fold_build2_loc (input_location, MINUS_EXPR,
  5041. gfc_array_index_type, dubound, dlbound);
  5042. tmp = fold_build2_loc (input_location, PLUS_EXPR,
  5043. gfc_array_index_type, tmp, lbound);
  5044. gfc_add_modify (&init, ubound, tmp);
  5045. }
  5046. /* The offset of this dimension. offset = offset - lbound * stride. */
  5047. tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
  5048. lbound, stride);
  5049. offset = fold_build2_loc (input_location, MINUS_EXPR,
  5050. gfc_array_index_type, offset, tmp);
  5051. /* The size of this dimension, and the stride of the next. */
  5052. if (n + 1 < sym->as->rank)
  5053. {
  5054. stride = GFC_TYPE_ARRAY_STRIDE (type, n + 1);
  5055. if (no_repack || partial != NULL_TREE)
  5056. stmt_unpacked =
  5057. gfc_conv_descriptor_stride_get (dumdesc, gfc_rank_cst[n+1]);
  5058. /* Figure out the stride if not a known constant. */
  5059. if (!INTEGER_CST_P (stride))
  5060. {
  5061. if (no_repack)
  5062. stmt_packed = NULL_TREE;
  5063. else
  5064. {
  5065. /* Calculate stride = size * (ubound + 1 - lbound). */
  5066. tmp = fold_build2_loc (input_location, MINUS_EXPR,
  5067. gfc_array_index_type,
  5068. gfc_index_one_node, lbound);
  5069. tmp = fold_build2_loc (input_location, PLUS_EXPR,
  5070. gfc_array_index_type, ubound, tmp);
  5071. size = fold_build2_loc (input_location, MULT_EXPR,
  5072. gfc_array_index_type, size, tmp);
  5073. stmt_packed = size;
  5074. }
  5075. /* Assign the stride. */
  5076. if (stmt_packed != NULL_TREE && stmt_unpacked != NULL_TREE)
  5077. tmp = fold_build3_loc (input_location, COND_EXPR,
  5078. gfc_array_index_type, partial,
  5079. stmt_unpacked, stmt_packed);
  5080. else
  5081. tmp = (stmt_packed != NULL_TREE) ? stmt_packed : stmt_unpacked;
  5082. gfc_add_modify (&init, stride, tmp);
  5083. }
  5084. }
  5085. else
  5086. {
  5087. stride = GFC_TYPE_ARRAY_SIZE (type);
  5088. if (stride && !INTEGER_CST_P (stride))
  5089. {
  5090. /* Calculate size = stride * (ubound + 1 - lbound). */
  5091. tmp = fold_build2_loc (input_location, MINUS_EXPR,
  5092. gfc_array_index_type,
  5093. gfc_index_one_node, lbound);
  5094. tmp = fold_build2_loc (input_location, PLUS_EXPR,
  5095. gfc_array_index_type,
  5096. ubound, tmp);
  5097. tmp = fold_build2_loc (input_location, MULT_EXPR,
  5098. gfc_array_index_type,
  5099. GFC_TYPE_ARRAY_STRIDE (type, n), tmp);
  5100. gfc_add_modify (&init, stride, tmp);
  5101. }
  5102. }
  5103. }
  5104. gfc_trans_array_cobounds (type, &init, sym);
  5105. /* Set the offset. */
  5106. if (TREE_CODE (GFC_TYPE_ARRAY_OFFSET (type)) == VAR_DECL)
  5107. gfc_add_modify (&init, GFC_TYPE_ARRAY_OFFSET (type), offset);
  5108. gfc_trans_vla_type_sizes (sym, &init);
  5109. stmtInit = gfc_finish_block (&init);
  5110. /* Only do the entry/initialization code if the arg is present. */
  5111. dumdesc = GFC_DECL_SAVED_DESCRIPTOR (tmpdesc);
  5112. optional_arg = (sym->attr.optional
  5113. || (sym->ns->proc_name->attr.entry_master
  5114. && sym->attr.dummy));
  5115. if (optional_arg)
  5116. {
  5117. tmp = gfc_conv_expr_present (sym);
  5118. stmtInit = build3_v (COND_EXPR, tmp, stmtInit,
  5119. build_empty_stmt (input_location));
  5120. }
  5121. /* Cleanup code. */
  5122. if (no_repack)
  5123. stmtCleanup = NULL_TREE;
  5124. else
  5125. {
  5126. stmtblock_t cleanup;
  5127. gfc_start_block (&cleanup);
  5128. if (sym->attr.intent != INTENT_IN)
  5129. {
  5130. /* Copy the data back. */
  5131. tmp = build_call_expr_loc (input_location,
  5132. gfor_fndecl_in_unpack, 2, dumdesc, tmpdesc);
  5133. gfc_add_expr_to_block (&cleanup, tmp);
  5134. }
  5135. /* Free the temporary. */
  5136. tmp = gfc_call_free (tmpdesc);
  5137. gfc_add_expr_to_block (&cleanup, tmp);
  5138. stmtCleanup = gfc_finish_block (&cleanup);
  5139. /* Only do the cleanup if the array was repacked. */
  5140. tmp = build_fold_indirect_ref_loc (input_location, dumdesc);
  5141. tmp = gfc_conv_descriptor_data_get (tmp);
  5142. tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
  5143. tmp, tmpdesc);
  5144. stmtCleanup = build3_v (COND_EXPR, tmp, stmtCleanup,
  5145. build_empty_stmt (input_location));
  5146. if (optional_arg)
  5147. {
  5148. tmp = gfc_conv_expr_present (sym);
  5149. stmtCleanup = build3_v (COND_EXPR, tmp, stmtCleanup,
  5150. build_empty_stmt (input_location));
  5151. }
  5152. }
  5153. /* We don't need to free any memory allocated by internal_pack as it will
  5154. be freed at the end of the function by pop_context. */
  5155. gfc_add_init_cleanup (block, stmtInit, stmtCleanup);
  5156. gfc_restore_backend_locus (&loc);
  5157. }
  5158. /* Calculate the overall offset, including subreferences. */
  5159. static void
  5160. gfc_get_dataptr_offset (stmtblock_t *block, tree parm, tree desc, tree offset,
  5161. bool subref, gfc_expr *expr)
  5162. {
  5163. tree tmp;
  5164. tree field;
  5165. tree stride;
  5166. tree index;
  5167. gfc_ref *ref;
  5168. gfc_se start;
  5169. int n;
  5170. /* If offset is NULL and this is not a subreferenced array, there is
  5171. nothing to do. */
  5172. if (offset == NULL_TREE)
  5173. {
  5174. if (subref)
  5175. offset = gfc_index_zero_node;
  5176. else
  5177. return;
  5178. }
  5179. tmp = build_array_ref (desc, offset, NULL);
  5180. /* Offset the data pointer for pointer assignments from arrays with
  5181. subreferences; e.g. my_integer => my_type(:)%integer_component. */
  5182. if (subref)
  5183. {
  5184. /* Go past the array reference. */
  5185. for (ref = expr->ref; ref; ref = ref->next)
  5186. if (ref->type == REF_ARRAY &&
  5187. ref->u.ar.type != AR_ELEMENT)
  5188. {
  5189. ref = ref->next;
  5190. break;
  5191. }
  5192. /* Calculate the offset for each subsequent subreference. */
  5193. for (; ref; ref = ref->next)
  5194. {
  5195. switch (ref->type)
  5196. {
  5197. case REF_COMPONENT:
  5198. field = ref->u.c.component->backend_decl;
  5199. gcc_assert (field && TREE_CODE (field) == FIELD_DECL);
  5200. tmp = fold_build3_loc (input_location, COMPONENT_REF,
  5201. TREE_TYPE (field),
  5202. tmp, field, NULL_TREE);
  5203. break;
  5204. case REF_SUBSTRING:
  5205. gcc_assert (TREE_CODE (TREE_TYPE (tmp)) == ARRAY_TYPE);
  5206. gfc_init_se (&start, NULL);
  5207. gfc_conv_expr_type (&start, ref->u.ss.start, gfc_charlen_type_node);
  5208. gfc_add_block_to_block (block, &start.pre);
  5209. tmp = gfc_build_array_ref (tmp, start.expr, NULL);
  5210. break;
  5211. case REF_ARRAY:
  5212. gcc_assert (TREE_CODE (TREE_TYPE (tmp)) == ARRAY_TYPE
  5213. && ref->u.ar.type == AR_ELEMENT);
  5214. /* TODO - Add bounds checking. */
  5215. stride = gfc_index_one_node;
  5216. index = gfc_index_zero_node;
  5217. for (n = 0; n < ref->u.ar.dimen; n++)
  5218. {
  5219. tree itmp;
  5220. tree jtmp;
  5221. /* Update the index. */
  5222. gfc_init_se (&start, NULL);
  5223. gfc_conv_expr_type (&start, ref->u.ar.start[n], gfc_array_index_type);
  5224. itmp = gfc_evaluate_now (start.expr, block);
  5225. gfc_init_se (&start, NULL);
  5226. gfc_conv_expr_type (&start, ref->u.ar.as->lower[n], gfc_array_index_type);
  5227. jtmp = gfc_evaluate_now (start.expr, block);
  5228. itmp = fold_build2_loc (input_location, MINUS_EXPR,
  5229. gfc_array_index_type, itmp, jtmp);
  5230. itmp = fold_build2_loc (input_location, MULT_EXPR,
  5231. gfc_array_index_type, itmp, stride);
  5232. index = fold_build2_loc (input_location, PLUS_EXPR,
  5233. gfc_array_index_type, itmp, index);
  5234. index = gfc_evaluate_now (index, block);
  5235. /* Update the stride. */
  5236. gfc_init_se (&start, NULL);
  5237. gfc_conv_expr_type (&start, ref->u.ar.as->upper[n], gfc_array_index_type);
  5238. itmp = fold_build2_loc (input_location, MINUS_EXPR,
  5239. gfc_array_index_type, start.expr,
  5240. jtmp);
  5241. itmp = fold_build2_loc (input_location, PLUS_EXPR,
  5242. gfc_array_index_type,
  5243. gfc_index_one_node, itmp);
  5244. stride = fold_build2_loc (input_location, MULT_EXPR,
  5245. gfc_array_index_type, stride, itmp);
  5246. stride = gfc_evaluate_now (stride, block);
  5247. }
  5248. /* Apply the index to obtain the array element. */
  5249. tmp = gfc_build_array_ref (tmp, index, NULL);
  5250. break;
  5251. default:
  5252. gcc_unreachable ();
  5253. break;
  5254. }
  5255. }
  5256. }
  5257. /* Set the target data pointer. */
  5258. offset = gfc_build_addr_expr (gfc_array_dataptr_type (desc), tmp);
  5259. gfc_conv_descriptor_data_set (block, parm, offset);
  5260. }
  5261. /* gfc_conv_expr_descriptor needs the string length an expression
  5262. so that the size of the temporary can be obtained. This is done
  5263. by adding up the string lengths of all the elements in the
  5264. expression. Function with non-constant expressions have their
  5265. string lengths mapped onto the actual arguments using the
  5266. interface mapping machinery in trans-expr.c. */
  5267. static void
  5268. get_array_charlen (gfc_expr *expr, gfc_se *se)
  5269. {
  5270. gfc_interface_mapping mapping;
  5271. gfc_formal_arglist *formal;
  5272. gfc_actual_arglist *arg;
  5273. gfc_se tse;
  5274. if (expr->ts.u.cl->length
  5275. && gfc_is_constant_expr (expr->ts.u.cl->length))
  5276. {
  5277. if (!expr->ts.u.cl->backend_decl)
  5278. gfc_conv_string_length (expr->ts.u.cl, expr, &se->pre);
  5279. return;
  5280. }
  5281. switch (expr->expr_type)
  5282. {
  5283. case EXPR_OP:
  5284. get_array_charlen (expr->value.op.op1, se);
  5285. /* For parentheses the expression ts.u.cl is identical. */
  5286. if (expr->value.op.op == INTRINSIC_PARENTHESES)
  5287. return;
  5288. expr->ts.u.cl->backend_decl =
  5289. gfc_create_var (gfc_charlen_type_node, "sln");
  5290. if (expr->value.op.op2)
  5291. {
  5292. get_array_charlen (expr->value.op.op2, se);
  5293. gcc_assert (expr->value.op.op == INTRINSIC_CONCAT);
  5294. /* Add the string lengths and assign them to the expression
  5295. string length backend declaration. */
  5296. gfc_add_modify (&se->pre, expr->ts.u.cl->backend_decl,
  5297. fold_build2_loc (input_location, PLUS_EXPR,
  5298. gfc_charlen_type_node,
  5299. expr->value.op.op1->ts.u.cl->backend_decl,
  5300. expr->value.op.op2->ts.u.cl->backend_decl));
  5301. }
  5302. else
  5303. gfc_add_modify (&se->pre, expr->ts.u.cl->backend_decl,
  5304. expr->value.op.op1->ts.u.cl->backend_decl);
  5305. break;
  5306. case EXPR_FUNCTION:
  5307. if (expr->value.function.esym == NULL
  5308. || expr->ts.u.cl->length->expr_type == EXPR_CONSTANT)
  5309. {
  5310. gfc_conv_string_length (expr->ts.u.cl, expr, &se->pre);
  5311. break;
  5312. }
  5313. /* Map expressions involving the dummy arguments onto the actual
  5314. argument expressions. */
  5315. gfc_init_interface_mapping (&mapping);
  5316. formal = gfc_sym_get_dummy_args (expr->symtree->n.sym);
  5317. arg = expr->value.function.actual;
  5318. /* Set se = NULL in the calls to the interface mapping, to suppress any
  5319. backend stuff. */
  5320. for (; arg != NULL; arg = arg->next, formal = formal ? formal->next : NULL)
  5321. {
  5322. if (!arg->expr)
  5323. continue;
  5324. if (formal->sym)
  5325. gfc_add_interface_mapping (&mapping, formal->sym, NULL, arg->expr);
  5326. }
  5327. gfc_init_se (&tse, NULL);
  5328. /* Build the expression for the character length and convert it. */
  5329. gfc_apply_interface_mapping (&mapping, &tse, expr->ts.u.cl->length);
  5330. gfc_add_block_to_block (&se->pre, &tse.pre);
  5331. gfc_add_block_to_block (&se->post, &tse.post);
  5332. tse.expr = fold_convert (gfc_charlen_type_node, tse.expr);
  5333. tse.expr = fold_build2_loc (input_location, MAX_EXPR,
  5334. gfc_charlen_type_node, tse.expr,
  5335. build_int_cst (gfc_charlen_type_node, 0));
  5336. expr->ts.u.cl->backend_decl = tse.expr;
  5337. gfc_free_interface_mapping (&mapping);
  5338. break;
  5339. default:
  5340. gfc_conv_string_length (expr->ts.u.cl, expr, &se->pre);
  5341. break;
  5342. }
  5343. }
  5344. /* Helper function to check dimensions. */
  5345. static bool
  5346. transposed_dims (gfc_ss *ss)
  5347. {
  5348. int n;
  5349. for (n = 0; n < ss->dimen; n++)
  5350. if (ss->dim[n] != n)
  5351. return true;
  5352. return false;
  5353. }
  5354. /* Convert the last ref of a scalar coarray from an AR_ELEMENT to an
  5355. AR_FULL, suitable for the scalarizer. */
  5356. static gfc_ss *
  5357. walk_coarray (gfc_expr *e)
  5358. {
  5359. gfc_ss *ss;
  5360. gcc_assert (gfc_get_corank (e) > 0);
  5361. ss = gfc_walk_expr (e);
  5362. /* Fix scalar coarray. */
  5363. if (ss == gfc_ss_terminator)
  5364. {
  5365. gfc_ref *ref;
  5366. ref = e->ref;
  5367. while (ref)
  5368. {
  5369. if (ref->type == REF_ARRAY
  5370. && ref->u.ar.codimen > 0)
  5371. break;
  5372. ref = ref->next;
  5373. }
  5374. gcc_assert (ref != NULL);
  5375. if (ref->u.ar.type == AR_ELEMENT)
  5376. ref->u.ar.type = AR_SECTION;
  5377. ss = gfc_reverse_ss (gfc_walk_array_ref (ss, e, ref));
  5378. }
  5379. return ss;
  5380. }
  5381. /* Convert an array for passing as an actual argument. Expressions and
  5382. vector subscripts are evaluated and stored in a temporary, which is then
  5383. passed. For whole arrays the descriptor is passed. For array sections
  5384. a modified copy of the descriptor is passed, but using the original data.
  5385. This function is also used for array pointer assignments, and there
  5386. are three cases:
  5387. - se->want_pointer && !se->direct_byref
  5388. EXPR is an actual argument. On exit, se->expr contains a
  5389. pointer to the array descriptor.
  5390. - !se->want_pointer && !se->direct_byref
  5391. EXPR is an actual argument to an intrinsic function or the
  5392. left-hand side of a pointer assignment. On exit, se->expr
  5393. contains the descriptor for EXPR.
  5394. - !se->want_pointer && se->direct_byref
  5395. EXPR is the right-hand side of a pointer assignment and
  5396. se->expr is the descriptor for the previously-evaluated
  5397. left-hand side. The function creates an assignment from
  5398. EXPR to se->expr.
  5399. The se->force_tmp flag disables the non-copying descriptor optimization
  5400. that is used for transpose. It may be used in cases where there is an
  5401. alias between the transpose argument and another argument in the same
  5402. function call. */
  5403. void
  5404. gfc_conv_expr_descriptor (gfc_se *se, gfc_expr *expr)
  5405. {
  5406. gfc_ss *ss;
  5407. gfc_ss_type ss_type;
  5408. gfc_ss_info *ss_info;
  5409. gfc_loopinfo loop;
  5410. gfc_array_info *info;
  5411. int need_tmp;
  5412. int n;
  5413. tree tmp;
  5414. tree desc;
  5415. stmtblock_t block;
  5416. tree start;
  5417. tree offset;
  5418. int full;
  5419. bool subref_array_target = false;
  5420. gfc_expr *arg, *ss_expr;
  5421. if (se->want_coarray)
  5422. ss = walk_coarray (expr);
  5423. else
  5424. ss = gfc_walk_expr (expr);
  5425. gcc_assert (ss != NULL);
  5426. gcc_assert (ss != gfc_ss_terminator);
  5427. ss_info = ss->info;
  5428. ss_type = ss_info->type;
  5429. ss_expr = ss_info->expr;
  5430. /* Special case: TRANSPOSE which needs no temporary. */
  5431. while (expr->expr_type == EXPR_FUNCTION && expr->value.function.isym
  5432. && NULL != (arg = gfc_get_noncopying_intrinsic_argument (expr)))
  5433. {
  5434. /* This is a call to transpose which has already been handled by the
  5435. scalarizer, so that we just need to get its argument's descriptor. */
  5436. gcc_assert (expr->value.function.isym->id == GFC_ISYM_TRANSPOSE);
  5437. expr = expr->value.function.actual->expr;
  5438. }
  5439. /* Special case things we know we can pass easily. */
  5440. switch (expr->expr_type)
  5441. {
  5442. case EXPR_VARIABLE:
  5443. /* If we have a linear array section, we can pass it directly.
  5444. Otherwise we need to copy it into a temporary. */
  5445. gcc_assert (ss_type == GFC_SS_SECTION);
  5446. gcc_assert (ss_expr == expr);
  5447. info = &ss_info->data.array;
  5448. /* Get the descriptor for the array. */
  5449. gfc_conv_ss_descriptor (&se->pre, ss, 0);
  5450. desc = info->descriptor;
  5451. subref_array_target = se->direct_byref && is_subref_array (expr);
  5452. need_tmp = gfc_ref_needs_temporary_p (expr->ref)
  5453. && !subref_array_target;
  5454. if (se->force_tmp)
  5455. need_tmp = 1;
  5456. if (need_tmp)
  5457. full = 0;
  5458. else if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
  5459. {
  5460. /* Create a new descriptor if the array doesn't have one. */
  5461. full = 0;
  5462. }
  5463. else if (info->ref->u.ar.type == AR_FULL || se->descriptor_only)
  5464. full = 1;
  5465. else if (se->direct_byref)
  5466. full = 0;
  5467. else
  5468. full = gfc_full_array_ref_p (info->ref, NULL);
  5469. if (full && !transposed_dims (ss))
  5470. {
  5471. if (se->direct_byref && !se->byref_noassign)
  5472. {
  5473. /* Copy the descriptor for pointer assignments. */
  5474. gfc_add_modify (&se->pre, se->expr, desc);
  5475. /* Add any offsets from subreferences. */
  5476. gfc_get_dataptr_offset (&se->pre, se->expr, desc, NULL_TREE,
  5477. subref_array_target, expr);
  5478. }
  5479. else if (se->want_pointer)
  5480. {
  5481. /* We pass full arrays directly. This means that pointers and
  5482. allocatable arrays should also work. */
  5483. se->expr = gfc_build_addr_expr (NULL_TREE, desc);
  5484. }
  5485. else
  5486. {
  5487. se->expr = desc;
  5488. }
  5489. if (expr->ts.type == BT_CHARACTER)
  5490. se->string_length = gfc_get_expr_charlen (expr);
  5491. gfc_free_ss_chain (ss);
  5492. return;
  5493. }
  5494. break;
  5495. case EXPR_FUNCTION:
  5496. /* A transformational function return value will be a temporary
  5497. array descriptor. We still need to go through the scalarizer
  5498. to create the descriptor. Elemental functions are handled as
  5499. arbitrary expressions, i.e. copy to a temporary. */
  5500. if (se->direct_byref)
  5501. {
  5502. gcc_assert (ss_type == GFC_SS_FUNCTION && ss_expr == expr);
  5503. /* For pointer assignments pass the descriptor directly. */
  5504. if (se->ss == NULL)
  5505. se->ss = ss;
  5506. else
  5507. gcc_assert (se->ss == ss);
  5508. se->expr = gfc_build_addr_expr (NULL_TREE, se->expr);
  5509. gfc_conv_expr (se, expr);
  5510. gfc_free_ss_chain (ss);
  5511. return;
  5512. }
  5513. if (ss_expr != expr || ss_type != GFC_SS_FUNCTION)
  5514. {
  5515. if (ss_expr != expr)
  5516. /* Elemental function. */
  5517. gcc_assert ((expr->value.function.esym != NULL
  5518. && expr->value.function.esym->attr.elemental)
  5519. || (expr->value.function.isym != NULL
  5520. && expr->value.function.isym->elemental)
  5521. || gfc_inline_intrinsic_function_p (expr));
  5522. else
  5523. gcc_assert (ss_type == GFC_SS_INTRINSIC);
  5524. need_tmp = 1;
  5525. if (expr->ts.type == BT_CHARACTER
  5526. && expr->ts.u.cl->length->expr_type != EXPR_CONSTANT)
  5527. get_array_charlen (expr, se);
  5528. info = NULL;
  5529. }
  5530. else
  5531. {
  5532. /* Transformational function. */
  5533. info = &ss_info->data.array;
  5534. need_tmp = 0;
  5535. }
  5536. break;
  5537. case EXPR_ARRAY:
  5538. /* Constant array constructors don't need a temporary. */
  5539. if (ss_type == GFC_SS_CONSTRUCTOR
  5540. && expr->ts.type != BT_CHARACTER
  5541. && gfc_constant_array_constructor_p (expr->value.constructor))
  5542. {
  5543. need_tmp = 0;
  5544. info = &ss_info->data.array;
  5545. }
  5546. else
  5547. {
  5548. need_tmp = 1;
  5549. info = NULL;
  5550. }
  5551. break;
  5552. default:
  5553. /* Something complicated. Copy it into a temporary. */
  5554. need_tmp = 1;
  5555. info = NULL;
  5556. break;
  5557. }
  5558. /* If we are creating a temporary, we don't need to bother about aliases
  5559. anymore. */
  5560. if (need_tmp)
  5561. se->force_tmp = 0;
  5562. gfc_init_loopinfo (&loop);
  5563. /* Associate the SS with the loop. */
  5564. gfc_add_ss_to_loop (&loop, ss);
  5565. /* Tell the scalarizer not to bother creating loop variables, etc. */
  5566. if (!need_tmp)
  5567. loop.array_parameter = 1;
  5568. else
  5569. /* The right-hand side of a pointer assignment mustn't use a temporary. */
  5570. gcc_assert (!se->direct_byref);
  5571. /* Setup the scalarizing loops and bounds. */
  5572. gfc_conv_ss_startstride (&loop);
  5573. if (need_tmp)
  5574. {
  5575. if (expr->ts.type == BT_CHARACTER && !expr->ts.u.cl->backend_decl)
  5576. get_array_charlen (expr, se);
  5577. /* Tell the scalarizer to make a temporary. */
  5578. loop.temp_ss = gfc_get_temp_ss (gfc_typenode_for_spec (&expr->ts),
  5579. ((expr->ts.type == BT_CHARACTER)
  5580. ? expr->ts.u.cl->backend_decl
  5581. : NULL),
  5582. loop.dimen);
  5583. se->string_length = loop.temp_ss->info->string_length;
  5584. gcc_assert (loop.temp_ss->dimen == loop.dimen);
  5585. gfc_add_ss_to_loop (&loop, loop.temp_ss);
  5586. }
  5587. gfc_conv_loop_setup (&loop, & expr->where);
  5588. if (need_tmp)
  5589. {
  5590. /* Copy into a temporary and pass that. We don't need to copy the data
  5591. back because expressions and vector subscripts must be INTENT_IN. */
  5592. /* TODO: Optimize passing function return values. */
  5593. gfc_se lse;
  5594. gfc_se rse;
  5595. /* Start the copying loops. */
  5596. gfc_mark_ss_chain_used (loop.temp_ss, 1);
  5597. gfc_mark_ss_chain_used (ss, 1);
  5598. gfc_start_scalarized_body (&loop, &block);
  5599. /* Copy each data element. */
  5600. gfc_init_se (&lse, NULL);
  5601. gfc_copy_loopinfo_to_se (&lse, &loop);
  5602. gfc_init_se (&rse, NULL);
  5603. gfc_copy_loopinfo_to_se (&rse, &loop);
  5604. lse.ss = loop.temp_ss;
  5605. rse.ss = ss;
  5606. gfc_conv_scalarized_array_ref (&lse, NULL);
  5607. if (expr->ts.type == BT_CHARACTER)
  5608. {
  5609. gfc_conv_expr (&rse, expr);
  5610. if (POINTER_TYPE_P (TREE_TYPE (rse.expr)))
  5611. rse.expr = build_fold_indirect_ref_loc (input_location,
  5612. rse.expr);
  5613. }
  5614. else
  5615. gfc_conv_expr_val (&rse, expr);
  5616. gfc_add_block_to_block (&block, &rse.pre);
  5617. gfc_add_block_to_block (&block, &lse.pre);
  5618. lse.string_length = rse.string_length;
  5619. tmp = gfc_trans_scalar_assign (&lse, &rse, expr->ts, true,
  5620. expr->expr_type == EXPR_VARIABLE
  5621. || expr->expr_type == EXPR_ARRAY, true);
  5622. gfc_add_expr_to_block (&block, tmp);
  5623. /* Finish the copying loops. */
  5624. gfc_trans_scalarizing_loops (&loop, &block);
  5625. desc = loop.temp_ss->info->data.array.descriptor;
  5626. }
  5627. else if (expr->expr_type == EXPR_FUNCTION && !transposed_dims (ss))
  5628. {
  5629. desc = info->descriptor;
  5630. se->string_length = ss_info->string_length;
  5631. }
  5632. else
  5633. {
  5634. /* We pass sections without copying to a temporary. Make a new
  5635. descriptor and point it at the section we want. The loop variable
  5636. limits will be the limits of the section.
  5637. A function may decide to repack the array to speed up access, but
  5638. we're not bothered about that here. */
  5639. int dim, ndim, codim;
  5640. tree parm;
  5641. tree parmtype;
  5642. tree stride;
  5643. tree from;
  5644. tree to;
  5645. tree base;
  5646. ndim = info->ref ? info->ref->u.ar.dimen : ss->dimen;
  5647. if (se->want_coarray)
  5648. {
  5649. gfc_array_ref *ar = &info->ref->u.ar;
  5650. codim = gfc_get_corank (expr);
  5651. for (n = 0; n < codim - 1; n++)
  5652. {
  5653. /* Make sure we are not lost somehow. */
  5654. gcc_assert (ar->dimen_type[n + ndim] == DIMEN_THIS_IMAGE);
  5655. /* Make sure the call to gfc_conv_section_startstride won't
  5656. generate unnecessary code to calculate stride. */
  5657. gcc_assert (ar->stride[n + ndim] == NULL);
  5658. gfc_conv_section_startstride (&loop.pre, ss, n + ndim);
  5659. loop.from[n + loop.dimen] = info->start[n + ndim];
  5660. loop.to[n + loop.dimen] = info->end[n + ndim];
  5661. }
  5662. gcc_assert (n == codim - 1);
  5663. evaluate_bound (&loop.pre, info->start, ar->start,
  5664. info->descriptor, n + ndim, true);
  5665. loop.from[n + loop.dimen] = info->start[n + ndim];
  5666. }
  5667. else
  5668. codim = 0;
  5669. /* Set the string_length for a character array. */
  5670. if (expr->ts.type == BT_CHARACTER)
  5671. se->string_length = gfc_get_expr_charlen (expr);
  5672. desc = info->descriptor;
  5673. if (se->direct_byref && !se->byref_noassign)
  5674. {
  5675. /* For pointer assignments we fill in the destination. */
  5676. parm = se->expr;
  5677. parmtype = TREE_TYPE (parm);
  5678. }
  5679. else
  5680. {
  5681. /* Otherwise make a new one. */
  5682. parmtype = gfc_get_element_type (TREE_TYPE (desc));
  5683. parmtype = gfc_get_array_type_bounds (parmtype, loop.dimen, codim,
  5684. loop.from, loop.to, 0,
  5685. GFC_ARRAY_UNKNOWN, false);
  5686. parm = gfc_create_var (parmtype, "parm");
  5687. }
  5688. offset = gfc_index_zero_node;
  5689. /* The following can be somewhat confusing. We have two
  5690. descriptors, a new one and the original array.
  5691. {parm, parmtype, dim} refer to the new one.
  5692. {desc, type, n, loop} refer to the original, which maybe
  5693. a descriptorless array.
  5694. The bounds of the scalarization are the bounds of the section.
  5695. We don't have to worry about numeric overflows when calculating
  5696. the offsets because all elements are within the array data. */
  5697. /* Set the dtype. */
  5698. tmp = gfc_conv_descriptor_dtype (parm);
  5699. gfc_add_modify (&loop.pre, tmp, gfc_get_dtype (parmtype));
  5700. /* Set offset for assignments to pointer only to zero if it is not
  5701. the full array. */
  5702. if ((se->direct_byref || se->use_offset)
  5703. && ((info->ref && info->ref->u.ar.type != AR_FULL)
  5704. || (expr->expr_type == EXPR_ARRAY && se->use_offset)))
  5705. base = gfc_index_zero_node;
  5706. else if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
  5707. base = gfc_evaluate_now (gfc_conv_array_offset (desc), &loop.pre);
  5708. else
  5709. base = NULL_TREE;
  5710. for (n = 0; n < ndim; n++)
  5711. {
  5712. stride = gfc_conv_array_stride (desc, n);
  5713. /* Work out the offset. */
  5714. if (info->ref
  5715. && info->ref->u.ar.dimen_type[n] == DIMEN_ELEMENT)
  5716. {
  5717. gcc_assert (info->subscript[n]
  5718. && info->subscript[n]->info->type == GFC_SS_SCALAR);
  5719. start = info->subscript[n]->info->data.scalar.value;
  5720. }
  5721. else
  5722. {
  5723. /* Evaluate and remember the start of the section. */
  5724. start = info->start[n];
  5725. stride = gfc_evaluate_now (stride, &loop.pre);
  5726. }
  5727. tmp = gfc_conv_array_lbound (desc, n);
  5728. tmp = fold_build2_loc (input_location, MINUS_EXPR, TREE_TYPE (tmp),
  5729. start, tmp);
  5730. tmp = fold_build2_loc (input_location, MULT_EXPR, TREE_TYPE (tmp),
  5731. tmp, stride);
  5732. offset = fold_build2_loc (input_location, PLUS_EXPR, TREE_TYPE (tmp),
  5733. offset, tmp);
  5734. if (info->ref
  5735. && info->ref->u.ar.dimen_type[n] == DIMEN_ELEMENT)
  5736. {
  5737. /* For elemental dimensions, we only need the offset. */
  5738. continue;
  5739. }
  5740. /* Vector subscripts need copying and are handled elsewhere. */
  5741. if (info->ref)
  5742. gcc_assert (info->ref->u.ar.dimen_type[n] == DIMEN_RANGE);
  5743. /* look for the corresponding scalarizer dimension: dim. */
  5744. for (dim = 0; dim < ndim; dim++)
  5745. if (ss->dim[dim] == n)
  5746. break;
  5747. /* loop exited early: the DIM being looked for has been found. */
  5748. gcc_assert (dim < ndim);
  5749. /* Set the new lower bound. */
  5750. from = loop.from[dim];
  5751. to = loop.to[dim];
  5752. /* If we have an array section or are assigning make sure that
  5753. the lower bound is 1. References to the full
  5754. array should otherwise keep the original bounds. */
  5755. if ((!info->ref
  5756. || info->ref->u.ar.type != AR_FULL)
  5757. && !integer_onep (from))
  5758. {
  5759. tmp = fold_build2_loc (input_location, MINUS_EXPR,
  5760. gfc_array_index_type, gfc_index_one_node,
  5761. from);
  5762. to = fold_build2_loc (input_location, PLUS_EXPR,
  5763. gfc_array_index_type, to, tmp);
  5764. from = gfc_index_one_node;
  5765. }
  5766. gfc_conv_descriptor_lbound_set (&loop.pre, parm,
  5767. gfc_rank_cst[dim], from);
  5768. /* Set the new upper bound. */
  5769. gfc_conv_descriptor_ubound_set (&loop.pre, parm,
  5770. gfc_rank_cst[dim], to);
  5771. /* Multiply the stride by the section stride to get the
  5772. total stride. */
  5773. stride = fold_build2_loc (input_location, MULT_EXPR,
  5774. gfc_array_index_type,
  5775. stride, info->stride[n]);
  5776. if (se->direct_byref
  5777. && ((info->ref && info->ref->u.ar.type != AR_FULL)
  5778. || (expr->expr_type == EXPR_ARRAY && se->use_offset)))
  5779. {
  5780. base = fold_build2_loc (input_location, MINUS_EXPR,
  5781. TREE_TYPE (base), base, stride);
  5782. }
  5783. else if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)) || se->use_offset)
  5784. {
  5785. tmp = gfc_conv_array_lbound (desc, n);
  5786. tmp = fold_build2_loc (input_location, MINUS_EXPR,
  5787. TREE_TYPE (base), tmp, loop.from[dim]);
  5788. tmp = fold_build2_loc (input_location, MULT_EXPR,
  5789. TREE_TYPE (base), tmp,
  5790. gfc_conv_array_stride (desc, n));
  5791. base = fold_build2_loc (input_location, PLUS_EXPR,
  5792. TREE_TYPE (base), tmp, base);
  5793. }
  5794. /* Store the new stride. */
  5795. gfc_conv_descriptor_stride_set (&loop.pre, parm,
  5796. gfc_rank_cst[dim], stride);
  5797. }
  5798. for (n = loop.dimen; n < loop.dimen + codim; n++)
  5799. {
  5800. from = loop.from[n];
  5801. to = loop.to[n];
  5802. gfc_conv_descriptor_lbound_set (&loop.pre, parm,
  5803. gfc_rank_cst[n], from);
  5804. if (n < loop.dimen + codim - 1)
  5805. gfc_conv_descriptor_ubound_set (&loop.pre, parm,
  5806. gfc_rank_cst[n], to);
  5807. }
  5808. if (se->data_not_needed)
  5809. gfc_conv_descriptor_data_set (&loop.pre, parm,
  5810. gfc_index_zero_node);
  5811. else
  5812. /* Point the data pointer at the 1st element in the section. */
  5813. gfc_get_dataptr_offset (&loop.pre, parm, desc, offset,
  5814. subref_array_target, expr);
  5815. if (((se->direct_byref || GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
  5816. && !se->data_not_needed)
  5817. || (se->use_offset && base != NULL_TREE))
  5818. {
  5819. /* Set the offset. */
  5820. gfc_conv_descriptor_offset_set (&loop.pre, parm, base);
  5821. }
  5822. else
  5823. {
  5824. /* Only the callee knows what the correct offset it, so just set
  5825. it to zero here. */
  5826. gfc_conv_descriptor_offset_set (&loop.pre, parm, gfc_index_zero_node);
  5827. }
  5828. desc = parm;
  5829. }
  5830. if (!se->direct_byref || se->byref_noassign)
  5831. {
  5832. /* Get a pointer to the new descriptor. */
  5833. if (se->want_pointer)
  5834. se->expr = gfc_build_addr_expr (NULL_TREE, desc);
  5835. else
  5836. se->expr = desc;
  5837. }
  5838. gfc_add_block_to_block (&se->pre, &loop.pre);
  5839. gfc_add_block_to_block (&se->post, &loop.post);
  5840. /* Cleanup the scalarizer. */
  5841. gfc_cleanup_loop (&loop);
  5842. }
  5843. /* Helper function for gfc_conv_array_parameter if array size needs to be
  5844. computed. */
  5845. static void
  5846. array_parameter_size (tree desc, gfc_expr *expr, tree *size)
  5847. {
  5848. tree elem;
  5849. if (GFC_ARRAY_TYPE_P (TREE_TYPE (desc)))
  5850. *size = GFC_TYPE_ARRAY_SIZE (TREE_TYPE (desc));
  5851. else if (expr->rank > 1)
  5852. *size = build_call_expr_loc (input_location,
  5853. gfor_fndecl_size0, 1,
  5854. gfc_build_addr_expr (NULL, desc));
  5855. else
  5856. {
  5857. tree ubound = gfc_conv_descriptor_ubound_get (desc, gfc_index_zero_node);
  5858. tree lbound = gfc_conv_descriptor_lbound_get (desc, gfc_index_zero_node);
  5859. *size = fold_build2_loc (input_location, MINUS_EXPR,
  5860. gfc_array_index_type, ubound, lbound);
  5861. *size = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
  5862. *size, gfc_index_one_node);
  5863. *size = fold_build2_loc (input_location, MAX_EXPR, gfc_array_index_type,
  5864. *size, gfc_index_zero_node);
  5865. }
  5866. elem = TYPE_SIZE_UNIT (gfc_get_element_type (TREE_TYPE (desc)));
  5867. *size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
  5868. *size, fold_convert (gfc_array_index_type, elem));
  5869. }
  5870. /* Convert an array for passing as an actual parameter. */
  5871. /* TODO: Optimize passing g77 arrays. */
  5872. void
  5873. gfc_conv_array_parameter (gfc_se * se, gfc_expr * expr, bool g77,
  5874. const gfc_symbol *fsym, const char *proc_name,
  5875. tree *size)
  5876. {
  5877. tree ptr;
  5878. tree desc;
  5879. tree tmp = NULL_TREE;
  5880. tree stmt;
  5881. tree parent = DECL_CONTEXT (current_function_decl);
  5882. bool full_array_var;
  5883. bool this_array_result;
  5884. bool contiguous;
  5885. bool no_pack;
  5886. bool array_constructor;
  5887. bool good_allocatable;
  5888. bool ultimate_ptr_comp;
  5889. bool ultimate_alloc_comp;
  5890. gfc_symbol *sym;
  5891. stmtblock_t block;
  5892. gfc_ref *ref;
  5893. ultimate_ptr_comp = false;
  5894. ultimate_alloc_comp = false;
  5895. for (ref = expr->ref; ref; ref = ref->next)
  5896. {
  5897. if (ref->next == NULL)
  5898. break;
  5899. if (ref->type == REF_COMPONENT)
  5900. {
  5901. ultimate_ptr_comp = ref->u.c.component->attr.pointer;
  5902. ultimate_alloc_comp = ref->u.c.component->attr.allocatable;
  5903. }
  5904. }
  5905. full_array_var = false;
  5906. contiguous = false;
  5907. if (expr->expr_type == EXPR_VARIABLE && ref && !ultimate_ptr_comp)
  5908. full_array_var = gfc_full_array_ref_p (ref, &contiguous);
  5909. sym = full_array_var ? expr->symtree->n.sym : NULL;
  5910. /* The symbol should have an array specification. */
  5911. gcc_assert (!sym || sym->as || ref->u.ar.as);
  5912. if (expr->expr_type == EXPR_ARRAY && expr->ts.type == BT_CHARACTER)
  5913. {
  5914. get_array_ctor_strlen (&se->pre, expr->value.constructor, &tmp);
  5915. expr->ts.u.cl->backend_decl = tmp;
  5916. se->string_length = tmp;
  5917. }
  5918. /* Is this the result of the enclosing procedure? */
  5919. this_array_result = (full_array_var && sym->attr.flavor == FL_PROCEDURE);
  5920. if (this_array_result
  5921. && (sym->backend_decl != current_function_decl)
  5922. && (sym->backend_decl != parent))
  5923. this_array_result = false;
  5924. /* Passing address of the array if it is not pointer or assumed-shape. */
  5925. if (full_array_var && g77 && !this_array_result
  5926. && sym->ts.type != BT_DERIVED && sym->ts.type != BT_CLASS)
  5927. {
  5928. tmp = gfc_get_symbol_decl (sym);
  5929. if (sym->ts.type == BT_CHARACTER)
  5930. se->string_length = sym->ts.u.cl->backend_decl;
  5931. if (!sym->attr.pointer
  5932. && sym->as
  5933. && sym->as->type != AS_ASSUMED_SHAPE
  5934. && sym->as->type != AS_DEFERRED
  5935. && sym->as->type != AS_ASSUMED_RANK
  5936. && !sym->attr.allocatable)
  5937. {
  5938. /* Some variables are declared directly, others are declared as
  5939. pointers and allocated on the heap. */
  5940. if (sym->attr.dummy || POINTER_TYPE_P (TREE_TYPE (tmp)))
  5941. se->expr = tmp;
  5942. else
  5943. se->expr = gfc_build_addr_expr (NULL_TREE, tmp);
  5944. if (size)
  5945. array_parameter_size (tmp, expr, size);
  5946. return;
  5947. }
  5948. if (sym->attr.allocatable)
  5949. {
  5950. if (sym->attr.dummy || sym->attr.result)
  5951. {
  5952. gfc_conv_expr_descriptor (se, expr);
  5953. tmp = se->expr;
  5954. }
  5955. if (size)
  5956. array_parameter_size (tmp, expr, size);
  5957. se->expr = gfc_conv_array_data (tmp);
  5958. return;
  5959. }
  5960. }
  5961. /* A convenient reduction in scope. */
  5962. contiguous = g77 && !this_array_result && contiguous;
  5963. /* There is no need to pack and unpack the array, if it is contiguous
  5964. and not a deferred- or assumed-shape array, or if it is simply
  5965. contiguous. */
  5966. no_pack = ((sym && sym->as
  5967. && !sym->attr.pointer
  5968. && sym->as->type != AS_DEFERRED
  5969. && sym->as->type != AS_ASSUMED_RANK
  5970. && sym->as->type != AS_ASSUMED_SHAPE)
  5971. ||
  5972. (ref && ref->u.ar.as
  5973. && ref->u.ar.as->type != AS_DEFERRED
  5974. && ref->u.ar.as->type != AS_ASSUMED_RANK
  5975. && ref->u.ar.as->type != AS_ASSUMED_SHAPE)
  5976. ||
  5977. gfc_is_simply_contiguous (expr, false));
  5978. no_pack = contiguous && no_pack;
  5979. /* Array constructors are always contiguous and do not need packing. */
  5980. array_constructor = g77 && !this_array_result && expr->expr_type == EXPR_ARRAY;
  5981. /* Same is true of contiguous sections from allocatable variables. */
  5982. good_allocatable = contiguous
  5983. && expr->symtree
  5984. && expr->symtree->n.sym->attr.allocatable;
  5985. /* Or ultimate allocatable components. */
  5986. ultimate_alloc_comp = contiguous && ultimate_alloc_comp;
  5987. if (no_pack || array_constructor || good_allocatable || ultimate_alloc_comp)
  5988. {
  5989. gfc_conv_expr_descriptor (se, expr);
  5990. if (expr->ts.type == BT_CHARACTER)
  5991. se->string_length = expr->ts.u.cl->backend_decl;
  5992. if (size)
  5993. array_parameter_size (se->expr, expr, size);
  5994. se->expr = gfc_conv_array_data (se->expr);
  5995. return;
  5996. }
  5997. if (this_array_result)
  5998. {
  5999. /* Result of the enclosing function. */
  6000. gfc_conv_expr_descriptor (se, expr);
  6001. if (size)
  6002. array_parameter_size (se->expr, expr, size);
  6003. se->expr = gfc_build_addr_expr (NULL_TREE, se->expr);
  6004. if (g77 && TREE_TYPE (TREE_TYPE (se->expr)) != NULL_TREE
  6005. && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (TREE_TYPE (se->expr))))
  6006. se->expr = gfc_conv_array_data (build_fold_indirect_ref_loc (input_location,
  6007. se->expr));
  6008. return;
  6009. }
  6010. else
  6011. {
  6012. /* Every other type of array. */
  6013. se->want_pointer = 1;
  6014. gfc_conv_expr_descriptor (se, expr);
  6015. if (size)
  6016. array_parameter_size (build_fold_indirect_ref_loc (input_location,
  6017. se->expr),
  6018. expr, size);
  6019. }
  6020. /* Deallocate the allocatable components of structures that are
  6021. not variable. */
  6022. if ((expr->ts.type == BT_DERIVED || expr->ts.type == BT_CLASS)
  6023. && expr->ts.u.derived->attr.alloc_comp
  6024. && expr->expr_type != EXPR_VARIABLE)
  6025. {
  6026. tmp = build_fold_indirect_ref_loc (input_location, se->expr);
  6027. tmp = gfc_deallocate_alloc_comp (expr->ts.u.derived, tmp, expr->rank);
  6028. /* The components shall be deallocated before their containing entity. */
  6029. gfc_prepend_expr_to_block (&se->post, tmp);
  6030. }
  6031. if (g77 || (fsym && fsym->attr.contiguous
  6032. && !gfc_is_simply_contiguous (expr, false)))
  6033. {
  6034. tree origptr = NULL_TREE;
  6035. desc = se->expr;
  6036. /* For contiguous arrays, save the original value of the descriptor. */
  6037. if (!g77)
  6038. {
  6039. origptr = gfc_create_var (pvoid_type_node, "origptr");
  6040. tmp = build_fold_indirect_ref_loc (input_location, desc);
  6041. tmp = gfc_conv_array_data (tmp);
  6042. tmp = fold_build2_loc (input_location, MODIFY_EXPR,
  6043. TREE_TYPE (origptr), origptr,
  6044. fold_convert (TREE_TYPE (origptr), tmp));
  6045. gfc_add_expr_to_block (&se->pre, tmp);
  6046. }
  6047. /* Repack the array. */
  6048. if (warn_array_temporaries)
  6049. {
  6050. if (fsym)
  6051. gfc_warning (OPT_Warray_temporaries,
  6052. "Creating array temporary at %L for argument %qs",
  6053. &expr->where, fsym->name);
  6054. else
  6055. gfc_warning (OPT_Warray_temporaries,
  6056. "Creating array temporary at %L", &expr->where);
  6057. }
  6058. ptr = build_call_expr_loc (input_location,
  6059. gfor_fndecl_in_pack, 1, desc);
  6060. if (fsym && fsym->attr.optional && sym && sym->attr.optional)
  6061. {
  6062. tmp = gfc_conv_expr_present (sym);
  6063. ptr = build3_loc (input_location, COND_EXPR, TREE_TYPE (se->expr),
  6064. tmp, fold_convert (TREE_TYPE (se->expr), ptr),
  6065. fold_convert (TREE_TYPE (se->expr), null_pointer_node));
  6066. }
  6067. ptr = gfc_evaluate_now (ptr, &se->pre);
  6068. /* Use the packed data for the actual argument, except for contiguous arrays,
  6069. where the descriptor's data component is set. */
  6070. if (g77)
  6071. se->expr = ptr;
  6072. else
  6073. {
  6074. tmp = build_fold_indirect_ref_loc (input_location, desc);
  6075. gfc_ss * ss = gfc_walk_expr (expr);
  6076. if (!transposed_dims (ss))
  6077. gfc_conv_descriptor_data_set (&se->pre, tmp, ptr);
  6078. else
  6079. {
  6080. tree old_field, new_field;
  6081. /* The original descriptor has transposed dims so we can't reuse
  6082. it directly; we have to create a new one. */
  6083. tree old_desc = tmp;
  6084. tree new_desc = gfc_create_var (TREE_TYPE (old_desc), "arg_desc");
  6085. old_field = gfc_conv_descriptor_dtype (old_desc);
  6086. new_field = gfc_conv_descriptor_dtype (new_desc);
  6087. gfc_add_modify (&se->pre, new_field, old_field);
  6088. old_field = gfc_conv_descriptor_offset (old_desc);
  6089. new_field = gfc_conv_descriptor_offset (new_desc);
  6090. gfc_add_modify (&se->pre, new_field, old_field);
  6091. for (int i = 0; i < expr->rank; i++)
  6092. {
  6093. old_field = gfc_conv_descriptor_dimension (old_desc,
  6094. gfc_rank_cst[get_array_ref_dim_for_loop_dim (ss, i)]);
  6095. new_field = gfc_conv_descriptor_dimension (new_desc,
  6096. gfc_rank_cst[i]);
  6097. gfc_add_modify (&se->pre, new_field, old_field);
  6098. }
  6099. if (flag_coarray == GFC_FCOARRAY_LIB
  6100. && GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (old_desc))
  6101. && GFC_TYPE_ARRAY_AKIND (TREE_TYPE (old_desc))
  6102. == GFC_ARRAY_ALLOCATABLE)
  6103. {
  6104. old_field = gfc_conv_descriptor_token (old_desc);
  6105. new_field = gfc_conv_descriptor_token (new_desc);
  6106. gfc_add_modify (&se->pre, new_field, old_field);
  6107. }
  6108. gfc_conv_descriptor_data_set (&se->pre, new_desc, ptr);
  6109. se->expr = gfc_build_addr_expr (NULL_TREE, new_desc);
  6110. }
  6111. gfc_free_ss (ss);
  6112. }
  6113. if (gfc_option.rtcheck & GFC_RTCHECK_ARRAY_TEMPS)
  6114. {
  6115. char * msg;
  6116. if (fsym && proc_name)
  6117. msg = xasprintf ("An array temporary was created for argument "
  6118. "'%s' of procedure '%s'", fsym->name, proc_name);
  6119. else
  6120. msg = xasprintf ("An array temporary was created");
  6121. tmp = build_fold_indirect_ref_loc (input_location,
  6122. desc);
  6123. tmp = gfc_conv_array_data (tmp);
  6124. tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
  6125. fold_convert (TREE_TYPE (tmp), ptr), tmp);
  6126. if (fsym && fsym->attr.optional && sym && sym->attr.optional)
  6127. tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR,
  6128. boolean_type_node,
  6129. gfc_conv_expr_present (sym), tmp);
  6130. gfc_trans_runtime_check (false, true, tmp, &se->pre,
  6131. &expr->where, msg);
  6132. free (msg);
  6133. }
  6134. gfc_start_block (&block);
  6135. /* Copy the data back. */
  6136. if (fsym == NULL || fsym->attr.intent != INTENT_IN)
  6137. {
  6138. tmp = build_call_expr_loc (input_location,
  6139. gfor_fndecl_in_unpack, 2, desc, ptr);
  6140. gfc_add_expr_to_block (&block, tmp);
  6141. }
  6142. /* Free the temporary. */
  6143. tmp = gfc_call_free (convert (pvoid_type_node, ptr));
  6144. gfc_add_expr_to_block (&block, tmp);
  6145. stmt = gfc_finish_block (&block);
  6146. gfc_init_block (&block);
  6147. /* Only if it was repacked. This code needs to be executed before the
  6148. loop cleanup code. */
  6149. tmp = build_fold_indirect_ref_loc (input_location,
  6150. desc);
  6151. tmp = gfc_conv_array_data (tmp);
  6152. tmp = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
  6153. fold_convert (TREE_TYPE (tmp), ptr), tmp);
  6154. if (fsym && fsym->attr.optional && sym && sym->attr.optional)
  6155. tmp = fold_build2_loc (input_location, TRUTH_AND_EXPR,
  6156. boolean_type_node,
  6157. gfc_conv_expr_present (sym), tmp);
  6158. tmp = build3_v (COND_EXPR, tmp, stmt, build_empty_stmt (input_location));
  6159. gfc_add_expr_to_block (&block, tmp);
  6160. gfc_add_block_to_block (&block, &se->post);
  6161. gfc_init_block (&se->post);
  6162. /* Reset the descriptor pointer. */
  6163. if (!g77)
  6164. {
  6165. tmp = build_fold_indirect_ref_loc (input_location, desc);
  6166. gfc_conv_descriptor_data_set (&se->post, tmp, origptr);
  6167. }
  6168. gfc_add_block_to_block (&se->post, &block);
  6169. }
  6170. }
  6171. /* Generate code to deallocate an array, if it is allocated. */
  6172. tree
  6173. gfc_trans_dealloc_allocated (tree descriptor, bool coarray, gfc_expr *expr)
  6174. {
  6175. tree tmp;
  6176. tree var;
  6177. stmtblock_t block;
  6178. gfc_start_block (&block);
  6179. var = gfc_conv_descriptor_data_get (descriptor);
  6180. STRIP_NOPS (var);
  6181. /* Call array_deallocate with an int * present in the second argument.
  6182. Although it is ignored here, it's presence ensures that arrays that
  6183. are already deallocated are ignored. */
  6184. tmp = gfc_deallocate_with_status (coarray ? descriptor : var, NULL_TREE,
  6185. NULL_TREE, NULL_TREE, NULL_TREE, true,
  6186. expr, coarray);
  6187. gfc_add_expr_to_block (&block, tmp);
  6188. /* Zero the data pointer. */
  6189. tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node,
  6190. var, build_int_cst (TREE_TYPE (var), 0));
  6191. gfc_add_expr_to_block (&block, tmp);
  6192. return gfc_finish_block (&block);
  6193. }
  6194. /* This helper function calculates the size in words of a full array. */
  6195. tree
  6196. gfc_full_array_size (stmtblock_t *block, tree decl, int rank)
  6197. {
  6198. tree idx;
  6199. tree nelems;
  6200. tree tmp;
  6201. idx = gfc_rank_cst[rank - 1];
  6202. nelems = gfc_conv_descriptor_ubound_get (decl, idx);
  6203. tmp = gfc_conv_descriptor_lbound_get (decl, idx);
  6204. tmp = fold_build2_loc (input_location, MINUS_EXPR, gfc_array_index_type,
  6205. nelems, tmp);
  6206. tmp = fold_build2_loc (input_location, PLUS_EXPR, gfc_array_index_type,
  6207. tmp, gfc_index_one_node);
  6208. tmp = gfc_evaluate_now (tmp, block);
  6209. nelems = gfc_conv_descriptor_stride_get (decl, idx);
  6210. tmp = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
  6211. nelems, tmp);
  6212. return gfc_evaluate_now (tmp, block);
  6213. }
  6214. /* Allocate dest to the same size as src, and copy src -> dest.
  6215. If no_malloc is set, only the copy is done. */
  6216. static tree
  6217. duplicate_allocatable (tree dest, tree src, tree type, int rank,
  6218. bool no_malloc, bool no_memcpy, tree str_sz)
  6219. {
  6220. tree tmp;
  6221. tree size;
  6222. tree nelems;
  6223. tree null_cond;
  6224. tree null_data;
  6225. stmtblock_t block;
  6226. /* If the source is null, set the destination to null. Then,
  6227. allocate memory to the destination. */
  6228. gfc_init_block (&block);
  6229. if (!GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (dest)))
  6230. {
  6231. tmp = null_pointer_node;
  6232. tmp = fold_build2_loc (input_location, MODIFY_EXPR, type, dest, tmp);
  6233. gfc_add_expr_to_block (&block, tmp);
  6234. null_data = gfc_finish_block (&block);
  6235. gfc_init_block (&block);
  6236. if (str_sz != NULL_TREE)
  6237. size = str_sz;
  6238. else
  6239. size = TYPE_SIZE_UNIT (TREE_TYPE (type));
  6240. if (!no_malloc)
  6241. {
  6242. tmp = gfc_call_malloc (&block, type, size);
  6243. tmp = fold_build2_loc (input_location, MODIFY_EXPR, void_type_node,
  6244. dest, fold_convert (type, tmp));
  6245. gfc_add_expr_to_block (&block, tmp);
  6246. }
  6247. if (!no_memcpy)
  6248. {
  6249. tmp = builtin_decl_explicit (BUILT_IN_MEMCPY);
  6250. tmp = build_call_expr_loc (input_location, tmp, 3, dest, src,
  6251. fold_convert (size_type_node, size));
  6252. gfc_add_expr_to_block (&block, tmp);
  6253. }
  6254. }
  6255. else
  6256. {
  6257. gfc_conv_descriptor_data_set (&block, dest, null_pointer_node);
  6258. null_data = gfc_finish_block (&block);
  6259. gfc_init_block (&block);
  6260. if (rank)
  6261. nelems = gfc_full_array_size (&block, src, rank);
  6262. else
  6263. nelems = gfc_index_one_node;
  6264. if (str_sz != NULL_TREE)
  6265. tmp = fold_convert (gfc_array_index_type, str_sz);
  6266. else
  6267. tmp = fold_convert (gfc_array_index_type,
  6268. TYPE_SIZE_UNIT (gfc_get_element_type (type)));
  6269. size = fold_build2_loc (input_location, MULT_EXPR, gfc_array_index_type,
  6270. nelems, tmp);
  6271. if (!no_malloc)
  6272. {
  6273. tmp = TREE_TYPE (gfc_conv_descriptor_data_get (src));
  6274. tmp = gfc_call_malloc (&block, tmp, size);
  6275. gfc_conv_descriptor_data_set (&block, dest, tmp);
  6276. }
  6277. /* We know the temporary and the value will be the same length,
  6278. so can use memcpy. */
  6279. if (!no_memcpy)
  6280. {
  6281. tmp = builtin_decl_explicit (BUILT_IN_MEMCPY);
  6282. tmp = build_call_expr_loc (input_location, tmp, 3,
  6283. gfc_conv_descriptor_data_get (dest),
  6284. gfc_conv_descriptor_data_get (src),
  6285. fold_convert (size_type_node, size));
  6286. gfc_add_expr_to_block (&block, tmp);
  6287. }
  6288. }
  6289. tmp = gfc_finish_block (&block);
  6290. /* Null the destination if the source is null; otherwise do
  6291. the allocate and copy. */
  6292. if (!GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (src)))
  6293. null_cond = src;
  6294. else
  6295. null_cond = gfc_conv_descriptor_data_get (src);
  6296. null_cond = convert (pvoid_type_node, null_cond);
  6297. null_cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
  6298. null_cond, null_pointer_node);
  6299. return build3_v (COND_EXPR, null_cond, tmp, null_data);
  6300. }
  6301. /* Allocate dest to the same size as src, and copy data src -> dest. */
  6302. tree
  6303. gfc_duplicate_allocatable (tree dest, tree src, tree type, int rank)
  6304. {
  6305. return duplicate_allocatable (dest, src, type, rank, false, false,
  6306. NULL_TREE);
  6307. }
  6308. /* Copy data src -> dest. */
  6309. tree
  6310. gfc_copy_allocatable_data (tree dest, tree src, tree type, int rank)
  6311. {
  6312. return duplicate_allocatable (dest, src, type, rank, true, false,
  6313. NULL_TREE);
  6314. }
  6315. /* Allocate dest to the same size as src, but don't copy anything. */
  6316. tree
  6317. gfc_duplicate_allocatable_nocopy (tree dest, tree src, tree type, int rank)
  6318. {
  6319. return duplicate_allocatable (dest, src, type, rank, false, true, NULL_TREE);
  6320. }
  6321. /* Recursively traverse an object of derived type, generating code to
  6322. deallocate, nullify or copy allocatable components. This is the work horse
  6323. function for the functions named in this enum. */
  6324. enum {DEALLOCATE_ALLOC_COMP = 1, DEALLOCATE_ALLOC_COMP_NO_CAF,
  6325. NULLIFY_ALLOC_COMP, COPY_ALLOC_COMP, COPY_ONLY_ALLOC_COMP,
  6326. COPY_ALLOC_COMP_CAF};
  6327. static tree
  6328. structure_alloc_comps (gfc_symbol * der_type, tree decl,
  6329. tree dest, int rank, int purpose)
  6330. {
  6331. gfc_component *c;
  6332. gfc_loopinfo loop;
  6333. stmtblock_t fnblock;
  6334. stmtblock_t loopbody;
  6335. stmtblock_t tmpblock;
  6336. tree decl_type;
  6337. tree tmp;
  6338. tree comp;
  6339. tree dcmp;
  6340. tree nelems;
  6341. tree index;
  6342. tree var;
  6343. tree cdecl;
  6344. tree ctype;
  6345. tree vref, dref;
  6346. tree null_cond = NULL_TREE;
  6347. bool called_dealloc_with_status;
  6348. gfc_init_block (&fnblock);
  6349. decl_type = TREE_TYPE (decl);
  6350. if ((POINTER_TYPE_P (decl_type) && rank != 0)
  6351. || (TREE_CODE (decl_type) == REFERENCE_TYPE && rank == 0))
  6352. decl = build_fold_indirect_ref_loc (input_location, decl);
  6353. /* Just in case in gets dereferenced. */
  6354. decl_type = TREE_TYPE (decl);
  6355. /* If this an array of derived types with allocatable components
  6356. build a loop and recursively call this function. */
  6357. if (TREE_CODE (decl_type) == ARRAY_TYPE
  6358. || (GFC_DESCRIPTOR_TYPE_P (decl_type) && rank != 0))
  6359. {
  6360. tmp = gfc_conv_array_data (decl);
  6361. var = build_fold_indirect_ref_loc (input_location,
  6362. tmp);
  6363. /* Get the number of elements - 1 and set the counter. */
  6364. if (GFC_DESCRIPTOR_TYPE_P (decl_type))
  6365. {
  6366. /* Use the descriptor for an allocatable array. Since this
  6367. is a full array reference, we only need the descriptor
  6368. information from dimension = rank. */
  6369. tmp = gfc_full_array_size (&fnblock, decl, rank);
  6370. tmp = fold_build2_loc (input_location, MINUS_EXPR,
  6371. gfc_array_index_type, tmp,
  6372. gfc_index_one_node);
  6373. null_cond = gfc_conv_descriptor_data_get (decl);
  6374. null_cond = fold_build2_loc (input_location, NE_EXPR,
  6375. boolean_type_node, null_cond,
  6376. build_int_cst (TREE_TYPE (null_cond), 0));
  6377. }
  6378. else
  6379. {
  6380. /* Otherwise use the TYPE_DOMAIN information. */
  6381. tmp = array_type_nelts (decl_type);
  6382. tmp = fold_convert (gfc_array_index_type, tmp);
  6383. }
  6384. /* Remember that this is, in fact, the no. of elements - 1. */
  6385. nelems = gfc_evaluate_now (tmp, &fnblock);
  6386. index = gfc_create_var (gfc_array_index_type, "S");
  6387. /* Build the body of the loop. */
  6388. gfc_init_block (&loopbody);
  6389. vref = gfc_build_array_ref (var, index, NULL);
  6390. if (purpose == COPY_ALLOC_COMP)
  6391. {
  6392. if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (dest)))
  6393. {
  6394. tmp = gfc_duplicate_allocatable (dest, decl, decl_type, rank);
  6395. gfc_add_expr_to_block (&fnblock, tmp);
  6396. }
  6397. tmp = build_fold_indirect_ref_loc (input_location,
  6398. gfc_conv_array_data (dest));
  6399. dref = gfc_build_array_ref (tmp, index, NULL);
  6400. tmp = structure_alloc_comps (der_type, vref, dref, rank, purpose);
  6401. }
  6402. else if (purpose == COPY_ONLY_ALLOC_COMP)
  6403. {
  6404. tmp = build_fold_indirect_ref_loc (input_location,
  6405. gfc_conv_array_data (dest));
  6406. dref = gfc_build_array_ref (tmp, index, NULL);
  6407. tmp = structure_alloc_comps (der_type, vref, dref, rank,
  6408. COPY_ALLOC_COMP);
  6409. }
  6410. else
  6411. tmp = structure_alloc_comps (der_type, vref, NULL_TREE, rank, purpose);
  6412. gfc_add_expr_to_block (&loopbody, tmp);
  6413. /* Build the loop and return. */
  6414. gfc_init_loopinfo (&loop);
  6415. loop.dimen = 1;
  6416. loop.from[0] = gfc_index_zero_node;
  6417. loop.loopvar[0] = index;
  6418. loop.to[0] = nelems;
  6419. gfc_trans_scalarizing_loops (&loop, &loopbody);
  6420. gfc_add_block_to_block (&fnblock, &loop.pre);
  6421. tmp = gfc_finish_block (&fnblock);
  6422. if (null_cond != NULL_TREE)
  6423. tmp = build3_v (COND_EXPR, null_cond, tmp,
  6424. build_empty_stmt (input_location));
  6425. return tmp;
  6426. }
  6427. /* Otherwise, act on the components or recursively call self to
  6428. act on a chain of components. */
  6429. for (c = der_type->components; c; c = c->next)
  6430. {
  6431. bool cmp_has_alloc_comps = (c->ts.type == BT_DERIVED
  6432. || c->ts.type == BT_CLASS)
  6433. && c->ts.u.derived->attr.alloc_comp;
  6434. cdecl = c->backend_decl;
  6435. ctype = TREE_TYPE (cdecl);
  6436. switch (purpose)
  6437. {
  6438. case DEALLOCATE_ALLOC_COMP:
  6439. case DEALLOCATE_ALLOC_COMP_NO_CAF:
  6440. /* gfc_deallocate_scalar_with_status calls gfc_deallocate_alloc_comp
  6441. (i.e. this function) so generate all the calls and suppress the
  6442. recursion from here, if necessary. */
  6443. called_dealloc_with_status = false;
  6444. gfc_init_block (&tmpblock);
  6445. if ((c->ts.type == BT_DERIVED && !c->attr.pointer)
  6446. || (c->ts.type == BT_CLASS && !CLASS_DATA (c)->attr.class_pointer))
  6447. {
  6448. comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
  6449. decl, cdecl, NULL_TREE);
  6450. /* The finalizer frees allocatable components. */
  6451. called_dealloc_with_status
  6452. = gfc_add_comp_finalizer_call (&tmpblock, comp, c,
  6453. purpose == DEALLOCATE_ALLOC_COMP);
  6454. }
  6455. else
  6456. comp = NULL_TREE;
  6457. if (c->attr.allocatable && !c->attr.proc_pointer
  6458. && (c->attr.dimension
  6459. || (c->attr.codimension
  6460. && purpose != DEALLOCATE_ALLOC_COMP_NO_CAF)))
  6461. {
  6462. if (comp == NULL_TREE)
  6463. comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
  6464. decl, cdecl, NULL_TREE);
  6465. tmp = gfc_trans_dealloc_allocated (comp, c->attr.codimension, NULL);
  6466. gfc_add_expr_to_block (&tmpblock, tmp);
  6467. }
  6468. else if (c->attr.allocatable && !c->attr.codimension)
  6469. {
  6470. /* Allocatable scalar components. */
  6471. if (comp == NULL_TREE)
  6472. comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
  6473. decl, cdecl, NULL_TREE);
  6474. tmp = gfc_deallocate_scalar_with_status (comp, NULL, true, NULL,
  6475. c->ts);
  6476. gfc_add_expr_to_block (&tmpblock, tmp);
  6477. called_dealloc_with_status = true;
  6478. tmp = fold_build2_loc (input_location, MODIFY_EXPR,
  6479. void_type_node, comp,
  6480. build_int_cst (TREE_TYPE (comp), 0));
  6481. gfc_add_expr_to_block (&tmpblock, tmp);
  6482. }
  6483. else if (c->ts.type == BT_CLASS && CLASS_DATA (c)->attr.allocatable
  6484. && (!CLASS_DATA (c)->attr.codimension
  6485. || purpose != DEALLOCATE_ALLOC_COMP_NO_CAF))
  6486. {
  6487. /* Allocatable CLASS components. */
  6488. /* Add reference to '_data' component. */
  6489. tmp = CLASS_DATA (c)->backend_decl;
  6490. comp = fold_build3_loc (input_location, COMPONENT_REF,
  6491. TREE_TYPE (tmp), comp, tmp, NULL_TREE);
  6492. if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (comp)))
  6493. tmp = gfc_trans_dealloc_allocated (comp,
  6494. CLASS_DATA (c)->attr.codimension, NULL);
  6495. else
  6496. {
  6497. tmp = gfc_deallocate_scalar_with_status (comp, NULL_TREE, true, NULL,
  6498. CLASS_DATA (c)->ts);
  6499. gfc_add_expr_to_block (&tmpblock, tmp);
  6500. called_dealloc_with_status = true;
  6501. tmp = fold_build2_loc (input_location, MODIFY_EXPR,
  6502. void_type_node, comp,
  6503. build_int_cst (TREE_TYPE (comp), 0));
  6504. }
  6505. gfc_add_expr_to_block (&tmpblock, tmp);
  6506. }
  6507. if (cmp_has_alloc_comps
  6508. && !c->attr.pointer
  6509. && !called_dealloc_with_status)
  6510. {
  6511. /* Do not deallocate the components of ultimate pointer
  6512. components or iteratively call self if call has been made
  6513. to gfc_trans_dealloc_allocated */
  6514. comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
  6515. decl, cdecl, NULL_TREE);
  6516. rank = c->as ? c->as->rank : 0;
  6517. tmp = structure_alloc_comps (c->ts.u.derived, comp, NULL_TREE,
  6518. rank, purpose);
  6519. gfc_add_expr_to_block (&fnblock, tmp);
  6520. }
  6521. /* Now add the deallocation of this component. */
  6522. gfc_add_block_to_block (&fnblock, &tmpblock);
  6523. break;
  6524. case NULLIFY_ALLOC_COMP:
  6525. if (c->attr.pointer || c->attr.proc_pointer)
  6526. continue;
  6527. else if (c->attr.allocatable
  6528. && (c->attr.dimension|| c->attr.codimension))
  6529. {
  6530. comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
  6531. decl, cdecl, NULL_TREE);
  6532. gfc_conv_descriptor_data_set (&fnblock, comp, null_pointer_node);
  6533. }
  6534. else if (c->attr.allocatable)
  6535. {
  6536. /* Allocatable scalar components. */
  6537. comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
  6538. decl, cdecl, NULL_TREE);
  6539. tmp = fold_build2_loc (input_location, MODIFY_EXPR,
  6540. void_type_node, comp,
  6541. build_int_cst (TREE_TYPE (comp), 0));
  6542. gfc_add_expr_to_block (&fnblock, tmp);
  6543. if (gfc_deferred_strlen (c, &comp))
  6544. {
  6545. comp = fold_build3_loc (input_location, COMPONENT_REF,
  6546. TREE_TYPE (comp),
  6547. decl, comp, NULL_TREE);
  6548. tmp = fold_build2_loc (input_location, MODIFY_EXPR,
  6549. TREE_TYPE (comp), comp,
  6550. build_int_cst (TREE_TYPE (comp), 0));
  6551. gfc_add_expr_to_block (&fnblock, tmp);
  6552. }
  6553. }
  6554. else if (c->ts.type == BT_CLASS && CLASS_DATA (c)->attr.allocatable)
  6555. {
  6556. /* Allocatable CLASS components. */
  6557. comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
  6558. decl, cdecl, NULL_TREE);
  6559. /* Add reference to '_data' component. */
  6560. tmp = CLASS_DATA (c)->backend_decl;
  6561. comp = fold_build3_loc (input_location, COMPONENT_REF,
  6562. TREE_TYPE (tmp), comp, tmp, NULL_TREE);
  6563. if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (comp)))
  6564. gfc_conv_descriptor_data_set (&fnblock, comp, null_pointer_node);
  6565. else
  6566. {
  6567. tmp = fold_build2_loc (input_location, MODIFY_EXPR,
  6568. void_type_node, comp,
  6569. build_int_cst (TREE_TYPE (comp), 0));
  6570. gfc_add_expr_to_block (&fnblock, tmp);
  6571. }
  6572. }
  6573. else if (cmp_has_alloc_comps)
  6574. {
  6575. comp = fold_build3_loc (input_location, COMPONENT_REF, ctype,
  6576. decl, cdecl, NULL_TREE);
  6577. rank = c->as ? c->as->rank : 0;
  6578. tmp = structure_alloc_comps (c->ts.u.derived, comp, NULL_TREE,
  6579. rank, purpose);
  6580. gfc_add_expr_to_block (&fnblock, tmp);
  6581. }
  6582. break;
  6583. case COPY_ALLOC_COMP_CAF:
  6584. if (!c->attr.codimension
  6585. && (c->ts.type != BT_CLASS || CLASS_DATA (c)->attr.coarray_comp)
  6586. && (c->ts.type != BT_DERIVED
  6587. || !c->ts.u.derived->attr.coarray_comp))
  6588. continue;
  6589. comp = fold_build3_loc (input_location, COMPONENT_REF, ctype, decl,
  6590. cdecl, NULL_TREE);
  6591. dcmp = fold_build3_loc (input_location, COMPONENT_REF, ctype, dest,
  6592. cdecl, NULL_TREE);
  6593. if (c->attr.codimension)
  6594. {
  6595. if (c->ts.type == BT_CLASS)
  6596. {
  6597. comp = gfc_class_data_get (comp);
  6598. dcmp = gfc_class_data_get (dcmp);
  6599. }
  6600. gfc_conv_descriptor_data_set (&fnblock, dcmp,
  6601. gfc_conv_descriptor_data_get (comp));
  6602. }
  6603. else
  6604. {
  6605. tmp = structure_alloc_comps (c->ts.u.derived, comp, dcmp,
  6606. rank, purpose);
  6607. gfc_add_expr_to_block (&fnblock, tmp);
  6608. }
  6609. break;
  6610. case COPY_ALLOC_COMP:
  6611. if (c->attr.pointer)
  6612. continue;
  6613. /* We need source and destination components. */
  6614. comp = fold_build3_loc (input_location, COMPONENT_REF, ctype, decl,
  6615. cdecl, NULL_TREE);
  6616. dcmp = fold_build3_loc (input_location, COMPONENT_REF, ctype, dest,
  6617. cdecl, NULL_TREE);
  6618. dcmp = fold_convert (TREE_TYPE (comp), dcmp);
  6619. if (c->ts.type == BT_CLASS && CLASS_DATA (c)->attr.allocatable)
  6620. {
  6621. tree ftn_tree;
  6622. tree size;
  6623. tree dst_data;
  6624. tree src_data;
  6625. tree null_data;
  6626. dst_data = gfc_class_data_get (dcmp);
  6627. src_data = gfc_class_data_get (comp);
  6628. size = fold_convert (size_type_node,
  6629. gfc_class_vtab_size_get (comp));
  6630. if (CLASS_DATA (c)->attr.dimension)
  6631. {
  6632. nelems = gfc_conv_descriptor_size (src_data,
  6633. CLASS_DATA (c)->as->rank);
  6634. size = fold_build2_loc (input_location, MULT_EXPR,
  6635. size_type_node, size,
  6636. fold_convert (size_type_node,
  6637. nelems));
  6638. }
  6639. else
  6640. nelems = build_int_cst (size_type_node, 1);
  6641. if (CLASS_DATA (c)->attr.dimension
  6642. || CLASS_DATA (c)->attr.codimension)
  6643. {
  6644. src_data = gfc_conv_descriptor_data_get (src_data);
  6645. dst_data = gfc_conv_descriptor_data_get (dst_data);
  6646. }
  6647. gfc_init_block (&tmpblock);
  6648. /* Coarray component have to have the same allocation status and
  6649. shape/type-parameter/effective-type on the LHS and RHS of an
  6650. intrinsic assignment. Hence, we did not deallocated them - and
  6651. do not allocate them here. */
  6652. if (!CLASS_DATA (c)->attr.codimension)
  6653. {
  6654. ftn_tree = builtin_decl_explicit (BUILT_IN_MALLOC);
  6655. tmp = build_call_expr_loc (input_location, ftn_tree, 1, size);
  6656. gfc_add_modify (&tmpblock, dst_data,
  6657. fold_convert (TREE_TYPE (dst_data), tmp));
  6658. }
  6659. tmp = gfc_copy_class_to_class (comp, dcmp, nelems,
  6660. UNLIMITED_POLY (c));
  6661. gfc_add_expr_to_block (&tmpblock, tmp);
  6662. tmp = gfc_finish_block (&tmpblock);
  6663. gfc_init_block (&tmpblock);
  6664. gfc_add_modify (&tmpblock, dst_data,
  6665. fold_convert (TREE_TYPE (dst_data),
  6666. null_pointer_node));
  6667. null_data = gfc_finish_block (&tmpblock);
  6668. null_cond = fold_build2_loc (input_location, NE_EXPR,
  6669. boolean_type_node, src_data,
  6670. null_pointer_node);
  6671. gfc_add_expr_to_block (&fnblock, build3_v (COND_EXPR, null_cond,
  6672. tmp, null_data));
  6673. continue;
  6674. }
  6675. if (gfc_deferred_strlen (c, &tmp))
  6676. {
  6677. tree len, size;
  6678. len = tmp;
  6679. tmp = fold_build3_loc (input_location, COMPONENT_REF,
  6680. TREE_TYPE (len),
  6681. decl, len, NULL_TREE);
  6682. len = fold_build3_loc (input_location, COMPONENT_REF,
  6683. TREE_TYPE (len),
  6684. dest, len, NULL_TREE);
  6685. tmp = fold_build2_loc (input_location, MODIFY_EXPR,
  6686. TREE_TYPE (len), len, tmp);
  6687. gfc_add_expr_to_block (&fnblock, tmp);
  6688. size = size_of_string_in_bytes (c->ts.kind, len);
  6689. tmp = duplicate_allocatable (dcmp, comp, ctype, rank,
  6690. false, false, size);
  6691. gfc_add_expr_to_block (&fnblock, tmp);
  6692. }
  6693. else if (c->attr.allocatable && !c->attr.proc_pointer
  6694. && !cmp_has_alloc_comps)
  6695. {
  6696. rank = c->as ? c->as->rank : 0;
  6697. if (c->attr.codimension)
  6698. tmp = gfc_copy_allocatable_data (dcmp, comp, ctype, rank);
  6699. else
  6700. tmp = gfc_duplicate_allocatable (dcmp, comp, ctype, rank);
  6701. gfc_add_expr_to_block (&fnblock, tmp);
  6702. }
  6703. if (cmp_has_alloc_comps)
  6704. {
  6705. rank = c->as ? c->as->rank : 0;
  6706. tmp = fold_convert (TREE_TYPE (dcmp), comp);
  6707. gfc_add_modify (&fnblock, dcmp, tmp);
  6708. tmp = structure_alloc_comps (c->ts.u.derived, comp, dcmp,
  6709. rank, purpose);
  6710. gfc_add_expr_to_block (&fnblock, tmp);
  6711. }
  6712. break;
  6713. default:
  6714. gcc_unreachable ();
  6715. break;
  6716. }
  6717. }
  6718. return gfc_finish_block (&fnblock);
  6719. }
  6720. /* Recursively traverse an object of derived type, generating code to
  6721. nullify allocatable components. */
  6722. tree
  6723. gfc_nullify_alloc_comp (gfc_symbol * der_type, tree decl, int rank)
  6724. {
  6725. return structure_alloc_comps (der_type, decl, NULL_TREE, rank,
  6726. NULLIFY_ALLOC_COMP);
  6727. }
  6728. /* Recursively traverse an object of derived type, generating code to
  6729. deallocate allocatable components. */
  6730. tree
  6731. gfc_deallocate_alloc_comp (gfc_symbol * der_type, tree decl, int rank)
  6732. {
  6733. return structure_alloc_comps (der_type, decl, NULL_TREE, rank,
  6734. DEALLOCATE_ALLOC_COMP);
  6735. }
  6736. /* Recursively traverse an object of derived type, generating code to
  6737. deallocate allocatable components. But do not deallocate coarrays.
  6738. To be used for intrinsic assignment, which may not change the allocation
  6739. status of coarrays. */
  6740. tree
  6741. gfc_deallocate_alloc_comp_no_caf (gfc_symbol * der_type, tree decl, int rank)
  6742. {
  6743. return structure_alloc_comps (der_type, decl, NULL_TREE, rank,
  6744. DEALLOCATE_ALLOC_COMP_NO_CAF);
  6745. }
  6746. tree
  6747. gfc_reassign_alloc_comp_caf (gfc_symbol *der_type, tree decl, tree dest)
  6748. {
  6749. return structure_alloc_comps (der_type, decl, dest, 0, COPY_ALLOC_COMP_CAF);
  6750. }
  6751. /* Recursively traverse an object of derived type, generating code to
  6752. copy it and its allocatable components. */
  6753. tree
  6754. gfc_copy_alloc_comp (gfc_symbol * der_type, tree decl, tree dest, int rank)
  6755. {
  6756. return structure_alloc_comps (der_type, decl, dest, rank, COPY_ALLOC_COMP);
  6757. }
  6758. /* Recursively traverse an object of derived type, generating code to
  6759. copy only its allocatable components. */
  6760. tree
  6761. gfc_copy_only_alloc_comp (gfc_symbol * der_type, tree decl, tree dest, int rank)
  6762. {
  6763. return structure_alloc_comps (der_type, decl, dest, rank, COPY_ONLY_ALLOC_COMP);
  6764. }
  6765. /* Returns the value of LBOUND for an expression. This could be broken out
  6766. from gfc_conv_intrinsic_bound but this seemed to be simpler. This is
  6767. called by gfc_alloc_allocatable_for_assignment. */
  6768. static tree
  6769. get_std_lbound (gfc_expr *expr, tree desc, int dim, bool assumed_size)
  6770. {
  6771. tree lbound;
  6772. tree ubound;
  6773. tree stride;
  6774. tree cond, cond1, cond3, cond4;
  6775. tree tmp;
  6776. gfc_ref *ref;
  6777. if (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (desc)))
  6778. {
  6779. tmp = gfc_rank_cst[dim];
  6780. lbound = gfc_conv_descriptor_lbound_get (desc, tmp);
  6781. ubound = gfc_conv_descriptor_ubound_get (desc, tmp);
  6782. stride = gfc_conv_descriptor_stride_get (desc, tmp);
  6783. cond1 = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
  6784. ubound, lbound);
  6785. cond3 = fold_build2_loc (input_location, GE_EXPR, boolean_type_node,
  6786. stride, gfc_index_zero_node);
  6787. cond3 = fold_build2_loc (input_location, TRUTH_AND_EXPR,
  6788. boolean_type_node, cond3, cond1);
  6789. cond4 = fold_build2_loc (input_location, LT_EXPR, boolean_type_node,
  6790. stride, gfc_index_zero_node);
  6791. if (assumed_size)
  6792. cond = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
  6793. tmp, build_int_cst (gfc_array_index_type,
  6794. expr->rank - 1));
  6795. else
  6796. cond = boolean_false_node;
  6797. cond1 = fold_build2_loc (input_location, TRUTH_OR_EXPR,
  6798. boolean_type_node, cond3, cond4);
  6799. cond = fold_build2_loc (input_location, TRUTH_OR_EXPR,
  6800. boolean_type_node, cond, cond1);
  6801. return fold_build3_loc (input_location, COND_EXPR,
  6802. gfc_array_index_type, cond,
  6803. lbound, gfc_index_one_node);
  6804. }
  6805. if (expr->expr_type == EXPR_FUNCTION)
  6806. {
  6807. /* A conversion function, so use the argument. */
  6808. gcc_assert (expr->value.function.isym
  6809. && expr->value.function.isym->conversion);
  6810. expr = expr->value.function.actual->expr;
  6811. }
  6812. if (expr->expr_type == EXPR_VARIABLE)
  6813. {
  6814. tmp = TREE_TYPE (expr->symtree->n.sym->backend_decl);
  6815. for (ref = expr->ref; ref; ref = ref->next)
  6816. {
  6817. if (ref->type == REF_COMPONENT
  6818. && ref->u.c.component->as
  6819. && ref->next
  6820. && ref->next->u.ar.type == AR_FULL)
  6821. tmp = TREE_TYPE (ref->u.c.component->backend_decl);
  6822. }
  6823. return GFC_TYPE_ARRAY_LBOUND(tmp, dim);
  6824. }
  6825. return gfc_index_one_node;
  6826. }
  6827. /* Returns true if an expression represents an lhs that can be reallocated
  6828. on assignment. */
  6829. bool
  6830. gfc_is_reallocatable_lhs (gfc_expr *expr)
  6831. {
  6832. gfc_ref * ref;
  6833. if (!expr->ref)
  6834. return false;
  6835. /* An allocatable variable. */
  6836. if (expr->symtree->n.sym->attr.allocatable
  6837. && expr->ref
  6838. && expr->ref->type == REF_ARRAY
  6839. && expr->ref->u.ar.type == AR_FULL)
  6840. return true;
  6841. /* All that can be left are allocatable components. */
  6842. if ((expr->symtree->n.sym->ts.type != BT_DERIVED
  6843. && expr->symtree->n.sym->ts.type != BT_CLASS)
  6844. || !expr->symtree->n.sym->ts.u.derived->attr.alloc_comp)
  6845. return false;
  6846. /* Find a component ref followed by an array reference. */
  6847. for (ref = expr->ref; ref; ref = ref->next)
  6848. if (ref->next
  6849. && ref->type == REF_COMPONENT
  6850. && ref->next->type == REF_ARRAY
  6851. && !ref->next->next)
  6852. break;
  6853. if (!ref)
  6854. return false;
  6855. /* Return true if valid reallocatable lhs. */
  6856. if (ref->u.c.component->attr.allocatable
  6857. && ref->next->u.ar.type == AR_FULL)
  6858. return true;
  6859. return false;
  6860. }
  6861. /* Allocate the lhs of an assignment to an allocatable array, otherwise
  6862. reallocate it. */
  6863. tree
  6864. gfc_alloc_allocatable_for_assignment (gfc_loopinfo *loop,
  6865. gfc_expr *expr1,
  6866. gfc_expr *expr2)
  6867. {
  6868. stmtblock_t realloc_block;
  6869. stmtblock_t alloc_block;
  6870. stmtblock_t fblock;
  6871. gfc_ss *rss;
  6872. gfc_ss *lss;
  6873. gfc_array_info *linfo;
  6874. tree realloc_expr;
  6875. tree alloc_expr;
  6876. tree size1;
  6877. tree size2;
  6878. tree array1;
  6879. tree cond_null;
  6880. tree cond;
  6881. tree tmp;
  6882. tree tmp2;
  6883. tree lbound;
  6884. tree ubound;
  6885. tree desc;
  6886. tree old_desc;
  6887. tree desc2;
  6888. tree offset;
  6889. tree jump_label1;
  6890. tree jump_label2;
  6891. tree neq_size;
  6892. tree lbd;
  6893. int n;
  6894. int dim;
  6895. gfc_array_spec * as;
  6896. /* x = f(...) with x allocatable. In this case, expr1 is the rhs.
  6897. Find the lhs expression in the loop chain and set expr1 and
  6898. expr2 accordingly. */
  6899. if (expr1->expr_type == EXPR_FUNCTION && expr2 == NULL)
  6900. {
  6901. expr2 = expr1;
  6902. /* Find the ss for the lhs. */
  6903. lss = loop->ss;
  6904. for (; lss && lss != gfc_ss_terminator; lss = lss->loop_chain)
  6905. if (lss->info->expr && lss->info->expr->expr_type == EXPR_VARIABLE)
  6906. break;
  6907. if (lss == gfc_ss_terminator)
  6908. return NULL_TREE;
  6909. expr1 = lss->info->expr;
  6910. }
  6911. /* Bail out if this is not a valid allocate on assignment. */
  6912. if (!gfc_is_reallocatable_lhs (expr1)
  6913. || (expr2 && !expr2->rank))
  6914. return NULL_TREE;
  6915. /* Find the ss for the lhs. */
  6916. lss = loop->ss;
  6917. for (; lss && lss != gfc_ss_terminator; lss = lss->loop_chain)
  6918. if (lss->info->expr == expr1)
  6919. break;
  6920. if (lss == gfc_ss_terminator)
  6921. return NULL_TREE;
  6922. linfo = &lss->info->data.array;
  6923. /* Find an ss for the rhs. For operator expressions, we see the
  6924. ss's for the operands. Any one of these will do. */
  6925. rss = loop->ss;
  6926. for (; rss && rss != gfc_ss_terminator; rss = rss->loop_chain)
  6927. if (rss->info->expr != expr1 && rss != loop->temp_ss)
  6928. break;
  6929. if (expr2 && rss == gfc_ss_terminator)
  6930. return NULL_TREE;
  6931. gfc_start_block (&fblock);
  6932. /* Since the lhs is allocatable, this must be a descriptor type.
  6933. Get the data and array size. */
  6934. desc = linfo->descriptor;
  6935. gcc_assert (GFC_DESCRIPTOR_TYPE_P (TREE_TYPE (desc)));
  6936. array1 = gfc_conv_descriptor_data_get (desc);
  6937. /* 7.4.1.3 "If variable is an allocated allocatable variable, it is
  6938. deallocated if expr is an array of different shape or any of the
  6939. corresponding length type parameter values of variable and expr
  6940. differ." This assures F95 compatibility. */
  6941. jump_label1 = gfc_build_label_decl (NULL_TREE);
  6942. jump_label2 = gfc_build_label_decl (NULL_TREE);
  6943. /* Allocate if data is NULL. */
  6944. cond_null = fold_build2_loc (input_location, EQ_EXPR, boolean_type_node,
  6945. array1, build_int_cst (TREE_TYPE (array1), 0));
  6946. tmp = build3_v (COND_EXPR, cond_null,
  6947. build1_v (GOTO_EXPR, jump_label1),
  6948. build_empty_stmt (input_location));
  6949. gfc_add_expr_to_block (&fblock, tmp);
  6950. /* Get arrayspec if expr is a full array. */
  6951. if (expr2 && expr2->expr_type == EXPR_FUNCTION
  6952. && expr2->value.function.isym
  6953. && expr2->value.function.isym->conversion)
  6954. {
  6955. /* For conversion functions, take the arg. */
  6956. gfc_expr *arg = expr2->value.function.actual->expr;
  6957. as = gfc_get_full_arrayspec_from_expr (arg);
  6958. }
  6959. else if (expr2)
  6960. as = gfc_get_full_arrayspec_from_expr (expr2);
  6961. else
  6962. as = NULL;
  6963. /* If the lhs shape is not the same as the rhs jump to setting the
  6964. bounds and doing the reallocation....... */
  6965. for (n = 0; n < expr1->rank; n++)
  6966. {
  6967. /* Check the shape. */
  6968. lbound = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[n]);
  6969. ubound = gfc_conv_descriptor_ubound_get (desc, gfc_rank_cst[n]);
  6970. tmp = fold_build2_loc (input_location, MINUS_EXPR,
  6971. gfc_array_index_type,
  6972. loop->to[n], loop->from[n]);
  6973. tmp = fold_build2_loc (input_location, PLUS_EXPR,
  6974. gfc_array_index_type,
  6975. tmp, lbound);
  6976. tmp = fold_build2_loc (input_location, MINUS_EXPR,
  6977. gfc_array_index_type,
  6978. tmp, ubound);
  6979. cond = fold_build2_loc (input_location, NE_EXPR,
  6980. boolean_type_node,
  6981. tmp, gfc_index_zero_node);
  6982. tmp = build3_v (COND_EXPR, cond,
  6983. build1_v (GOTO_EXPR, jump_label1),
  6984. build_empty_stmt (input_location));
  6985. gfc_add_expr_to_block (&fblock, tmp);
  6986. }
  6987. /* ....else jump past the (re)alloc code. */
  6988. tmp = build1_v (GOTO_EXPR, jump_label2);
  6989. gfc_add_expr_to_block (&fblock, tmp);
  6990. /* Add the label to start automatic (re)allocation. */
  6991. tmp = build1_v (LABEL_EXPR, jump_label1);
  6992. gfc_add_expr_to_block (&fblock, tmp);
  6993. /* If the lhs has not been allocated, its bounds will not have been
  6994. initialized and so its size is set to zero. */
  6995. size1 = gfc_create_var (gfc_array_index_type, NULL);
  6996. gfc_init_block (&alloc_block);
  6997. gfc_add_modify (&alloc_block, size1, gfc_index_zero_node);
  6998. gfc_init_block (&realloc_block);
  6999. gfc_add_modify (&realloc_block, size1,
  7000. gfc_conv_descriptor_size (desc, expr1->rank));
  7001. tmp = build3_v (COND_EXPR, cond_null,
  7002. gfc_finish_block (&alloc_block),
  7003. gfc_finish_block (&realloc_block));
  7004. gfc_add_expr_to_block (&fblock, tmp);
  7005. /* Get the rhs size and fix it. */
  7006. if (expr2)
  7007. desc2 = rss->info->data.array.descriptor;
  7008. else
  7009. desc2 = NULL_TREE;
  7010. size2 = gfc_index_one_node;
  7011. for (n = 0; n < expr2->rank; n++)
  7012. {
  7013. tmp = fold_build2_loc (input_location, MINUS_EXPR,
  7014. gfc_array_index_type,
  7015. loop->to[n], loop->from[n]);
  7016. tmp = fold_build2_loc (input_location, PLUS_EXPR,
  7017. gfc_array_index_type,
  7018. tmp, gfc_index_one_node);
  7019. size2 = fold_build2_loc (input_location, MULT_EXPR,
  7020. gfc_array_index_type,
  7021. tmp, size2);
  7022. }
  7023. size2 = gfc_evaluate_now (size2, &fblock);
  7024. cond = fold_build2_loc (input_location, NE_EXPR, boolean_type_node,
  7025. size1, size2);
  7026. neq_size = gfc_evaluate_now (cond, &fblock);
  7027. /* Deallocation of allocatable components will have to occur on
  7028. reallocation. Fix the old descriptor now. */
  7029. if ((expr1->ts.type == BT_DERIVED)
  7030. && expr1->ts.u.derived->attr.alloc_comp)
  7031. old_desc = gfc_evaluate_now (desc, &fblock);
  7032. else
  7033. old_desc = NULL_TREE;
  7034. /* Now modify the lhs descriptor and the associated scalarizer
  7035. variables. F2003 7.4.1.3: "If variable is or becomes an
  7036. unallocated allocatable variable, then it is allocated with each
  7037. deferred type parameter equal to the corresponding type parameters
  7038. of expr , with the shape of expr , and with each lower bound equal
  7039. to the corresponding element of LBOUND(expr)."
  7040. Reuse size1 to keep a dimension-by-dimension track of the
  7041. stride of the new array. */
  7042. size1 = gfc_index_one_node;
  7043. offset = gfc_index_zero_node;
  7044. for (n = 0; n < expr2->rank; n++)
  7045. {
  7046. tmp = fold_build2_loc (input_location, MINUS_EXPR,
  7047. gfc_array_index_type,
  7048. loop->to[n], loop->from[n]);
  7049. tmp = fold_build2_loc (input_location, PLUS_EXPR,
  7050. gfc_array_index_type,
  7051. tmp, gfc_index_one_node);
  7052. lbound = gfc_index_one_node;
  7053. ubound = tmp;
  7054. if (as)
  7055. {
  7056. lbd = get_std_lbound (expr2, desc2, n,
  7057. as->type == AS_ASSUMED_SIZE);
  7058. ubound = fold_build2_loc (input_location,
  7059. MINUS_EXPR,
  7060. gfc_array_index_type,
  7061. ubound, lbound);
  7062. ubound = fold_build2_loc (input_location,
  7063. PLUS_EXPR,
  7064. gfc_array_index_type,
  7065. ubound, lbd);
  7066. lbound = lbd;
  7067. }
  7068. gfc_conv_descriptor_lbound_set (&fblock, desc,
  7069. gfc_rank_cst[n],
  7070. lbound);
  7071. gfc_conv_descriptor_ubound_set (&fblock, desc,
  7072. gfc_rank_cst[n],
  7073. ubound);
  7074. gfc_conv_descriptor_stride_set (&fblock, desc,
  7075. gfc_rank_cst[n],
  7076. size1);
  7077. lbound = gfc_conv_descriptor_lbound_get (desc,
  7078. gfc_rank_cst[n]);
  7079. tmp2 = fold_build2_loc (input_location, MULT_EXPR,
  7080. gfc_array_index_type,
  7081. lbound, size1);
  7082. offset = fold_build2_loc (input_location, MINUS_EXPR,
  7083. gfc_array_index_type,
  7084. offset, tmp2);
  7085. size1 = fold_build2_loc (input_location, MULT_EXPR,
  7086. gfc_array_index_type,
  7087. tmp, size1);
  7088. }
  7089. /* Set the lhs descriptor and scalarizer offsets. For rank > 1,
  7090. the array offset is saved and the info.offset is used for a
  7091. running offset. Use the saved_offset instead. */
  7092. tmp = gfc_conv_descriptor_offset (desc);
  7093. gfc_add_modify (&fblock, tmp, offset);
  7094. if (linfo->saved_offset
  7095. && TREE_CODE (linfo->saved_offset) == VAR_DECL)
  7096. gfc_add_modify (&fblock, linfo->saved_offset, tmp);
  7097. /* Now set the deltas for the lhs. */
  7098. for (n = 0; n < expr1->rank; n++)
  7099. {
  7100. tmp = gfc_conv_descriptor_lbound_get (desc, gfc_rank_cst[n]);
  7101. dim = lss->dim[n];
  7102. tmp = fold_build2_loc (input_location, MINUS_EXPR,
  7103. gfc_array_index_type, tmp,
  7104. loop->from[dim]);
  7105. if (linfo->delta[dim]
  7106. && TREE_CODE (linfo->delta[dim]) == VAR_DECL)
  7107. gfc_add_modify (&fblock, linfo->delta[dim], tmp);
  7108. }
  7109. /* Get the new lhs size in bytes. */
  7110. if (expr1->ts.type == BT_CHARACTER && expr1->ts.deferred)
  7111. {
  7112. if (expr2->ts.deferred)
  7113. {
  7114. if (TREE_CODE (expr2->ts.u.cl->backend_decl) == VAR_DECL)
  7115. tmp = expr2->ts.u.cl->backend_decl;
  7116. else
  7117. tmp = rss->info->string_length;
  7118. }
  7119. else
  7120. {
  7121. tmp = expr2->ts.u.cl->backend_decl;
  7122. tmp = fold_convert (TREE_TYPE (expr1->ts.u.cl->backend_decl), tmp);
  7123. }
  7124. if (expr1->ts.u.cl->backend_decl
  7125. && TREE_CODE (expr1->ts.u.cl->backend_decl) == VAR_DECL)
  7126. gfc_add_modify (&fblock, expr1->ts.u.cl->backend_decl, tmp);
  7127. else
  7128. gfc_add_modify (&fblock, lss->info->string_length, tmp);
  7129. }
  7130. else if (expr1->ts.type == BT_CHARACTER && expr1->ts.u.cl->backend_decl)
  7131. {
  7132. tmp = TYPE_SIZE_UNIT (TREE_TYPE (gfc_typenode_for_spec (&expr1->ts)));
  7133. tmp = fold_build2_loc (input_location, MULT_EXPR,
  7134. gfc_array_index_type, tmp,
  7135. expr1->ts.u.cl->backend_decl);
  7136. }
  7137. else
  7138. tmp = TYPE_SIZE_UNIT (gfc_typenode_for_spec (&expr1->ts));
  7139. tmp = fold_convert (gfc_array_index_type, tmp);
  7140. size2 = fold_build2_loc (input_location, MULT_EXPR,
  7141. gfc_array_index_type,
  7142. tmp, size2);
  7143. size2 = fold_convert (size_type_node, size2);
  7144. size2 = fold_build2_loc (input_location, MAX_EXPR, size_type_node,
  7145. size2, size_one_node);
  7146. size2 = gfc_evaluate_now (size2, &fblock);
  7147. /* Realloc expression. Note that the scalarizer uses desc.data
  7148. in the array reference - (*desc.data)[<element>]. */
  7149. gfc_init_block (&realloc_block);
  7150. if ((expr1->ts.type == BT_DERIVED)
  7151. && expr1->ts.u.derived->attr.alloc_comp)
  7152. {
  7153. tmp = gfc_deallocate_alloc_comp_no_caf (expr1->ts.u.derived, old_desc,
  7154. expr1->rank);
  7155. gfc_add_expr_to_block (&realloc_block, tmp);
  7156. }
  7157. tmp = build_call_expr_loc (input_location,
  7158. builtin_decl_explicit (BUILT_IN_REALLOC), 2,
  7159. fold_convert (pvoid_type_node, array1),
  7160. size2);
  7161. gfc_conv_descriptor_data_set (&realloc_block,
  7162. desc, tmp);
  7163. if ((expr1->ts.type == BT_DERIVED)
  7164. && expr1->ts.u.derived->attr.alloc_comp)
  7165. {
  7166. tmp = gfc_nullify_alloc_comp (expr1->ts.u.derived, desc,
  7167. expr1->rank);
  7168. gfc_add_expr_to_block (&realloc_block, tmp);
  7169. }
  7170. realloc_expr = gfc_finish_block (&realloc_block);
  7171. /* Only reallocate if sizes are different. */
  7172. tmp = build3_v (COND_EXPR, neq_size, realloc_expr,
  7173. build_empty_stmt (input_location));
  7174. realloc_expr = tmp;
  7175. /* Malloc expression. */
  7176. gfc_init_block (&alloc_block);
  7177. tmp = build_call_expr_loc (input_location,
  7178. builtin_decl_explicit (BUILT_IN_MALLOC),
  7179. 1, size2);
  7180. gfc_conv_descriptor_data_set (&alloc_block,
  7181. desc, tmp);
  7182. tmp = gfc_conv_descriptor_dtype (desc);
  7183. gfc_add_modify (&alloc_block, tmp, gfc_get_dtype (TREE_TYPE (desc)));
  7184. if ((expr1->ts.type == BT_DERIVED)
  7185. && expr1->ts.u.derived->attr.alloc_comp)
  7186. {
  7187. tmp = gfc_nullify_alloc_comp (expr1->ts.u.derived, desc,
  7188. expr1->rank);
  7189. gfc_add_expr_to_block (&alloc_block, tmp);
  7190. }
  7191. alloc_expr = gfc_finish_block (&alloc_block);
  7192. /* Malloc if not allocated; realloc otherwise. */
  7193. tmp = build_int_cst (TREE_TYPE (array1), 0);
  7194. cond = fold_build2_loc (input_location, EQ_EXPR,
  7195. boolean_type_node,
  7196. array1, tmp);
  7197. tmp = build3_v (COND_EXPR, cond, alloc_expr, realloc_expr);
  7198. gfc_add_expr_to_block (&fblock, tmp);
  7199. /* Make sure that the scalarizer data pointer is updated. */
  7200. if (linfo->data
  7201. && TREE_CODE (linfo->data) == VAR_DECL)
  7202. {
  7203. tmp = gfc_conv_descriptor_data_get (desc);
  7204. gfc_add_modify (&fblock, linfo->data, tmp);
  7205. }
  7206. /* Add the exit label. */
  7207. tmp = build1_v (LABEL_EXPR, jump_label2);
  7208. gfc_add_expr_to_block (&fblock, tmp);
  7209. return gfc_finish_block (&fblock);
  7210. }
  7211. /* NULLIFY an allocatable/pointer array on function entry, free it on exit.
  7212. Do likewise, recursively if necessary, with the allocatable components of
  7213. derived types. */
  7214. void
  7215. gfc_trans_deferred_array (gfc_symbol * sym, gfc_wrapped_block * block)
  7216. {
  7217. tree type;
  7218. tree tmp;
  7219. tree descriptor;
  7220. stmtblock_t init;
  7221. stmtblock_t cleanup;
  7222. locus loc;
  7223. int rank;
  7224. bool sym_has_alloc_comp, has_finalizer;
  7225. sym_has_alloc_comp = (sym->ts.type == BT_DERIVED
  7226. || sym->ts.type == BT_CLASS)
  7227. && sym->ts.u.derived->attr.alloc_comp;
  7228. has_finalizer = sym->ts.type == BT_CLASS || sym->ts.type == BT_DERIVED
  7229. ? gfc_is_finalizable (sym->ts.u.derived, NULL) : false;
  7230. /* Make sure the frontend gets these right. */
  7231. gcc_assert (sym->attr.pointer || sym->attr.allocatable || sym_has_alloc_comp
  7232. || has_finalizer);
  7233. gfc_save_backend_locus (&loc);
  7234. gfc_set_backend_locus (&sym->declared_at);
  7235. gfc_init_block (&init);
  7236. gcc_assert (TREE_CODE (sym->backend_decl) == VAR_DECL
  7237. || TREE_CODE (sym->backend_decl) == PARM_DECL);
  7238. if (sym->ts.type == BT_CHARACTER
  7239. && !INTEGER_CST_P (sym->ts.u.cl->backend_decl))
  7240. {
  7241. gfc_conv_string_length (sym->ts.u.cl, NULL, &init);
  7242. gfc_trans_vla_type_sizes (sym, &init);
  7243. }
  7244. /* Dummy, use associated and result variables don't need anything special. */
  7245. if (sym->attr.dummy || sym->attr.use_assoc || sym->attr.result)
  7246. {
  7247. gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
  7248. gfc_restore_backend_locus (&loc);
  7249. return;
  7250. }
  7251. descriptor = sym->backend_decl;
  7252. /* Although static, derived types with default initializers and
  7253. allocatable components must not be nulled wholesale; instead they
  7254. are treated component by component. */
  7255. if (TREE_STATIC (descriptor) && !sym_has_alloc_comp && !has_finalizer)
  7256. {
  7257. /* SAVEd variables are not freed on exit. */
  7258. gfc_trans_static_array_pointer (sym);
  7259. gfc_add_init_cleanup (block, gfc_finish_block (&init), NULL_TREE);
  7260. gfc_restore_backend_locus (&loc);
  7261. return;
  7262. }
  7263. /* Get the descriptor type. */
  7264. type = TREE_TYPE (sym->backend_decl);
  7265. if ((sym_has_alloc_comp || (has_finalizer && sym->ts.type != BT_CLASS))
  7266. && !(sym->attr.pointer || sym->attr.allocatable))
  7267. {
  7268. if (!sym->attr.save
  7269. && !(TREE_STATIC (sym->backend_decl) && sym->attr.is_main_program))
  7270. {
  7271. if (sym->value == NULL
  7272. || !gfc_has_default_initializer (sym->ts.u.derived))
  7273. {
  7274. rank = sym->as ? sym->as->rank : 0;
  7275. tmp = gfc_nullify_alloc_comp (sym->ts.u.derived,
  7276. descriptor, rank);
  7277. gfc_add_expr_to_block (&init, tmp);
  7278. }
  7279. else
  7280. gfc_init_default_dt (sym, &init, false);
  7281. }
  7282. }
  7283. else if (!GFC_DESCRIPTOR_TYPE_P (type))
  7284. {
  7285. /* If the backend_decl is not a descriptor, we must have a pointer
  7286. to one. */
  7287. descriptor = build_fold_indirect_ref_loc (input_location,
  7288. sym->backend_decl);
  7289. type = TREE_TYPE (descriptor);
  7290. }
  7291. /* NULLIFY the data pointer, for non-saved allocatables. */
  7292. if (GFC_DESCRIPTOR_TYPE_P (type) && !sym->attr.save && sym->attr.allocatable)
  7293. gfc_conv_descriptor_data_set (&init, descriptor, null_pointer_node);
  7294. gfc_restore_backend_locus (&loc);
  7295. gfc_init_block (&cleanup);
  7296. /* Allocatable arrays need to be freed when they go out of scope.
  7297. The allocatable components of pointers must not be touched. */
  7298. if (!sym->attr.allocatable && has_finalizer && sym->ts.type != BT_CLASS
  7299. && !sym->attr.pointer && !sym->attr.artificial && !sym->attr.save
  7300. && !sym->ns->proc_name->attr.is_main_program)
  7301. {
  7302. gfc_expr *e;
  7303. sym->attr.referenced = 1;
  7304. e = gfc_lval_expr_from_sym (sym);
  7305. gfc_add_finalizer_call (&cleanup, e);
  7306. gfc_free_expr (e);
  7307. }
  7308. else if ((!sym->attr.allocatable || !has_finalizer)
  7309. && sym_has_alloc_comp && !(sym->attr.function || sym->attr.result)
  7310. && !sym->attr.pointer && !sym->attr.save
  7311. && !sym->ns->proc_name->attr.is_main_program)
  7312. {
  7313. int rank;
  7314. rank = sym->as ? sym->as->rank : 0;
  7315. tmp = gfc_deallocate_alloc_comp (sym->ts.u.derived, descriptor, rank);
  7316. gfc_add_expr_to_block (&cleanup, tmp);
  7317. }
  7318. if (sym->attr.allocatable && (sym->attr.dimension || sym->attr.codimension)
  7319. && !sym->attr.save && !sym->attr.result
  7320. && !sym->ns->proc_name->attr.is_main_program)
  7321. {
  7322. gfc_expr *e;
  7323. e = has_finalizer ? gfc_lval_expr_from_sym (sym) : NULL;
  7324. tmp = gfc_trans_dealloc_allocated (sym->backend_decl,
  7325. sym->attr.codimension, e);
  7326. if (e)
  7327. gfc_free_expr (e);
  7328. gfc_add_expr_to_block (&cleanup, tmp);
  7329. }
  7330. gfc_add_init_cleanup (block, gfc_finish_block (&init),
  7331. gfc_finish_block (&cleanup));
  7332. }
  7333. /************ Expression Walking Functions ******************/
  7334. /* Walk a variable reference.
  7335. Possible extension - multiple component subscripts.
  7336. x(:,:) = foo%a(:)%b(:)
  7337. Transforms to
  7338. forall (i=..., j=...)
  7339. x(i,j) = foo%a(j)%b(i)
  7340. end forall
  7341. This adds a fair amount of complexity because you need to deal with more
  7342. than one ref. Maybe handle in a similar manner to vector subscripts.
  7343. Maybe not worth the effort. */
  7344. static gfc_ss *
  7345. gfc_walk_variable_expr (gfc_ss * ss, gfc_expr * expr)
  7346. {
  7347. gfc_ref *ref;
  7348. for (ref = expr->ref; ref; ref = ref->next)
  7349. if (ref->type == REF_ARRAY && ref->u.ar.type != AR_ELEMENT)
  7350. break;
  7351. return gfc_walk_array_ref (ss, expr, ref);
  7352. }
  7353. gfc_ss *
  7354. gfc_walk_array_ref (gfc_ss * ss, gfc_expr * expr, gfc_ref * ref)
  7355. {
  7356. gfc_array_ref *ar;
  7357. gfc_ss *newss;
  7358. int n;
  7359. for (; ref; ref = ref->next)
  7360. {
  7361. if (ref->type == REF_SUBSTRING)
  7362. {
  7363. ss = gfc_get_scalar_ss (ss, ref->u.ss.start);
  7364. ss = gfc_get_scalar_ss (ss, ref->u.ss.end);
  7365. }
  7366. /* We're only interested in array sections from now on. */
  7367. if (ref->type != REF_ARRAY)
  7368. continue;
  7369. ar = &ref->u.ar;
  7370. switch (ar->type)
  7371. {
  7372. case AR_ELEMENT:
  7373. for (n = ar->dimen - 1; n >= 0; n--)
  7374. ss = gfc_get_scalar_ss (ss, ar->start[n]);
  7375. break;
  7376. case AR_FULL:
  7377. newss = gfc_get_array_ss (ss, expr, ar->as->rank, GFC_SS_SECTION);
  7378. newss->info->data.array.ref = ref;
  7379. /* Make sure array is the same as array(:,:), this way
  7380. we don't need to special case all the time. */
  7381. ar->dimen = ar->as->rank;
  7382. for (n = 0; n < ar->dimen; n++)
  7383. {
  7384. ar->dimen_type[n] = DIMEN_RANGE;
  7385. gcc_assert (ar->start[n] == NULL);
  7386. gcc_assert (ar->end[n] == NULL);
  7387. gcc_assert (ar->stride[n] == NULL);
  7388. }
  7389. ss = newss;
  7390. break;
  7391. case AR_SECTION:
  7392. newss = gfc_get_array_ss (ss, expr, 0, GFC_SS_SECTION);
  7393. newss->info->data.array.ref = ref;
  7394. /* We add SS chains for all the subscripts in the section. */
  7395. for (n = 0; n < ar->dimen; n++)
  7396. {
  7397. gfc_ss *indexss;
  7398. switch (ar->dimen_type[n])
  7399. {
  7400. case DIMEN_ELEMENT:
  7401. /* Add SS for elemental (scalar) subscripts. */
  7402. gcc_assert (ar->start[n]);
  7403. indexss = gfc_get_scalar_ss (gfc_ss_terminator, ar->start[n]);
  7404. indexss->loop_chain = gfc_ss_terminator;
  7405. newss->info->data.array.subscript[n] = indexss;
  7406. break;
  7407. case DIMEN_RANGE:
  7408. /* We don't add anything for sections, just remember this
  7409. dimension for later. */
  7410. newss->dim[newss->dimen] = n;
  7411. newss->dimen++;
  7412. break;
  7413. case DIMEN_VECTOR:
  7414. /* Create a GFC_SS_VECTOR index in which we can store
  7415. the vector's descriptor. */
  7416. indexss = gfc_get_array_ss (gfc_ss_terminator, ar->start[n],
  7417. 1, GFC_SS_VECTOR);
  7418. indexss->loop_chain = gfc_ss_terminator;
  7419. newss->info->data.array.subscript[n] = indexss;
  7420. newss->dim[newss->dimen] = n;
  7421. newss->dimen++;
  7422. break;
  7423. default:
  7424. /* We should know what sort of section it is by now. */
  7425. gcc_unreachable ();
  7426. }
  7427. }
  7428. /* We should have at least one non-elemental dimension,
  7429. unless we are creating a descriptor for a (scalar) coarray. */
  7430. gcc_assert (newss->dimen > 0
  7431. || newss->info->data.array.ref->u.ar.as->corank > 0);
  7432. ss = newss;
  7433. break;
  7434. default:
  7435. /* We should know what sort of section it is by now. */
  7436. gcc_unreachable ();
  7437. }
  7438. }
  7439. return ss;
  7440. }
  7441. /* Walk an expression operator. If only one operand of a binary expression is
  7442. scalar, we must also add the scalar term to the SS chain. */
  7443. static gfc_ss *
  7444. gfc_walk_op_expr (gfc_ss * ss, gfc_expr * expr)
  7445. {
  7446. gfc_ss *head;
  7447. gfc_ss *head2;
  7448. head = gfc_walk_subexpr (ss, expr->value.op.op1);
  7449. if (expr->value.op.op2 == NULL)
  7450. head2 = head;
  7451. else
  7452. head2 = gfc_walk_subexpr (head, expr->value.op.op2);
  7453. /* All operands are scalar. Pass back and let the caller deal with it. */
  7454. if (head2 == ss)
  7455. return head2;
  7456. /* All operands require scalarization. */
  7457. if (head != ss && (expr->value.op.op2 == NULL || head2 != head))
  7458. return head2;
  7459. /* One of the operands needs scalarization, the other is scalar.
  7460. Create a gfc_ss for the scalar expression. */
  7461. if (head == ss)
  7462. {
  7463. /* First operand is scalar. We build the chain in reverse order, so
  7464. add the scalar SS after the second operand. */
  7465. head = head2;
  7466. while (head && head->next != ss)
  7467. head = head->next;
  7468. /* Check we haven't somehow broken the chain. */
  7469. gcc_assert (head);
  7470. head->next = gfc_get_scalar_ss (ss, expr->value.op.op1);
  7471. }
  7472. else /* head2 == head */
  7473. {
  7474. gcc_assert (head2 == head);
  7475. /* Second operand is scalar. */
  7476. head2 = gfc_get_scalar_ss (head2, expr->value.op.op2);
  7477. }
  7478. return head2;
  7479. }
  7480. /* Reverse a SS chain. */
  7481. gfc_ss *
  7482. gfc_reverse_ss (gfc_ss * ss)
  7483. {
  7484. gfc_ss *next;
  7485. gfc_ss *head;
  7486. gcc_assert (ss != NULL);
  7487. head = gfc_ss_terminator;
  7488. while (ss != gfc_ss_terminator)
  7489. {
  7490. next = ss->next;
  7491. /* Check we didn't somehow break the chain. */
  7492. gcc_assert (next != NULL);
  7493. ss->next = head;
  7494. head = ss;
  7495. ss = next;
  7496. }
  7497. return (head);
  7498. }
  7499. /* Given an expression referring to a procedure, return the symbol of its
  7500. interface. We can't get the procedure symbol directly as we have to handle
  7501. the case of (deferred) type-bound procedures. */
  7502. gfc_symbol *
  7503. gfc_get_proc_ifc_for_expr (gfc_expr *procedure_ref)
  7504. {
  7505. gfc_symbol *sym;
  7506. gfc_ref *ref;
  7507. if (procedure_ref == NULL)
  7508. return NULL;
  7509. /* Normal procedure case. */
  7510. sym = procedure_ref->symtree->n.sym;
  7511. /* Typebound procedure case. */
  7512. for (ref = procedure_ref->ref; ref; ref = ref->next)
  7513. {
  7514. if (ref->type == REF_COMPONENT
  7515. && ref->u.c.component->attr.proc_pointer)
  7516. sym = ref->u.c.component->ts.interface;
  7517. else
  7518. sym = NULL;
  7519. }
  7520. return sym;
  7521. }
  7522. /* Walk the arguments of an elemental function.
  7523. PROC_EXPR is used to check whether an argument is permitted to be absent. If
  7524. it is NULL, we don't do the check and the argument is assumed to be present.
  7525. */
  7526. gfc_ss *
  7527. gfc_walk_elemental_function_args (gfc_ss * ss, gfc_actual_arglist *arg,
  7528. gfc_symbol *proc_ifc, gfc_ss_type type)
  7529. {
  7530. gfc_formal_arglist *dummy_arg;
  7531. int scalar;
  7532. gfc_ss *head;
  7533. gfc_ss *tail;
  7534. gfc_ss *newss;
  7535. head = gfc_ss_terminator;
  7536. tail = NULL;
  7537. if (proc_ifc)
  7538. dummy_arg = gfc_sym_get_dummy_args (proc_ifc);
  7539. else
  7540. dummy_arg = NULL;
  7541. scalar = 1;
  7542. for (; arg; arg = arg->next)
  7543. {
  7544. if (!arg->expr || arg->expr->expr_type == EXPR_NULL)
  7545. continue;
  7546. newss = gfc_walk_subexpr (head, arg->expr);
  7547. if (newss == head)
  7548. {
  7549. /* Scalar argument. */
  7550. gcc_assert (type == GFC_SS_SCALAR || type == GFC_SS_REFERENCE);
  7551. newss = gfc_get_scalar_ss (head, arg->expr);
  7552. newss->info->type = type;
  7553. }
  7554. else
  7555. scalar = 0;
  7556. if (dummy_arg != NULL
  7557. && dummy_arg->sym->attr.optional
  7558. && arg->expr->expr_type == EXPR_VARIABLE
  7559. && (gfc_expr_attr (arg->expr).optional
  7560. || gfc_expr_attr (arg->expr).allocatable
  7561. || gfc_expr_attr (arg->expr).pointer))
  7562. newss->info->can_be_null_ref = true;
  7563. head = newss;
  7564. if (!tail)
  7565. {
  7566. tail = head;
  7567. while (tail->next != gfc_ss_terminator)
  7568. tail = tail->next;
  7569. }
  7570. if (dummy_arg != NULL)
  7571. dummy_arg = dummy_arg->next;
  7572. }
  7573. if (scalar)
  7574. {
  7575. /* If all the arguments are scalar we don't need the argument SS. */
  7576. gfc_free_ss_chain (head);
  7577. /* Pass it back. */
  7578. return ss;
  7579. }
  7580. /* Add it onto the existing chain. */
  7581. tail->next = ss;
  7582. return head;
  7583. }
  7584. /* Walk a function call. Scalar functions are passed back, and taken out of
  7585. scalarization loops. For elemental functions we walk their arguments.
  7586. The result of functions returning arrays is stored in a temporary outside
  7587. the loop, so that the function is only called once. Hence we do not need
  7588. to walk their arguments. */
  7589. static gfc_ss *
  7590. gfc_walk_function_expr (gfc_ss * ss, gfc_expr * expr)
  7591. {
  7592. gfc_intrinsic_sym *isym;
  7593. gfc_symbol *sym;
  7594. gfc_component *comp = NULL;
  7595. isym = expr->value.function.isym;
  7596. /* Handle intrinsic functions separately. */
  7597. if (isym)
  7598. return gfc_walk_intrinsic_function (ss, expr, isym);
  7599. sym = expr->value.function.esym;
  7600. if (!sym)
  7601. sym = expr->symtree->n.sym;
  7602. if (gfc_is_alloc_class_array_function (expr))
  7603. return gfc_get_array_ss (ss, expr,
  7604. CLASS_DATA (expr->value.function.esym->result)->as->rank,
  7605. GFC_SS_FUNCTION);
  7606. /* A function that returns arrays. */
  7607. comp = gfc_get_proc_ptr_comp (expr);
  7608. if ((!comp && gfc_return_by_reference (sym) && sym->result->attr.dimension)
  7609. || (comp && comp->attr.dimension))
  7610. return gfc_get_array_ss (ss, expr, expr->rank, GFC_SS_FUNCTION);
  7611. /* Walk the parameters of an elemental function. For now we always pass
  7612. by reference. */
  7613. if (sym->attr.elemental || (comp && comp->attr.elemental))
  7614. {
  7615. gfc_ss *old_ss = ss;
  7616. ss = gfc_walk_elemental_function_args (old_ss,
  7617. expr->value.function.actual,
  7618. gfc_get_proc_ifc_for_expr (expr),
  7619. GFC_SS_REFERENCE);
  7620. if (ss != old_ss
  7621. && (comp
  7622. || sym->attr.proc_pointer
  7623. || sym->attr.if_source != IFSRC_DECL
  7624. || sym->attr.array_outer_dependency))
  7625. ss->info->array_outer_dependency = 1;
  7626. }
  7627. /* Scalar functions are OK as these are evaluated outside the scalarization
  7628. loop. Pass back and let the caller deal with it. */
  7629. return ss;
  7630. }
  7631. /* An array temporary is constructed for array constructors. */
  7632. static gfc_ss *
  7633. gfc_walk_array_constructor (gfc_ss * ss, gfc_expr * expr)
  7634. {
  7635. return gfc_get_array_ss (ss, expr, expr->rank, GFC_SS_CONSTRUCTOR);
  7636. }
  7637. /* Walk an expression. Add walked expressions to the head of the SS chain.
  7638. A wholly scalar expression will not be added. */
  7639. gfc_ss *
  7640. gfc_walk_subexpr (gfc_ss * ss, gfc_expr * expr)
  7641. {
  7642. gfc_ss *head;
  7643. switch (expr->expr_type)
  7644. {
  7645. case EXPR_VARIABLE:
  7646. head = gfc_walk_variable_expr (ss, expr);
  7647. return head;
  7648. case EXPR_OP:
  7649. head = gfc_walk_op_expr (ss, expr);
  7650. return head;
  7651. case EXPR_FUNCTION:
  7652. head = gfc_walk_function_expr (ss, expr);
  7653. return head;
  7654. case EXPR_CONSTANT:
  7655. case EXPR_NULL:
  7656. case EXPR_STRUCTURE:
  7657. /* Pass back and let the caller deal with it. */
  7658. break;
  7659. case EXPR_ARRAY:
  7660. head = gfc_walk_array_constructor (ss, expr);
  7661. return head;
  7662. case EXPR_SUBSTRING:
  7663. /* Pass back and let the caller deal with it. */
  7664. break;
  7665. default:
  7666. gfc_internal_error ("bad expression type during walk (%d)",
  7667. expr->expr_type);
  7668. }
  7669. return ss;
  7670. }
  7671. /* Entry point for expression walking.
  7672. A return value equal to the passed chain means this is
  7673. a scalar expression. It is up to the caller to take whatever action is
  7674. necessary to translate these. */
  7675. gfc_ss *
  7676. gfc_walk_expr (gfc_expr * expr)
  7677. {
  7678. gfc_ss *res;
  7679. res = gfc_walk_subexpr (gfc_ss_terminator, expr);
  7680. return gfc_reverse_ss (res);
  7681. }