tree-chkp.c 122 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942394339443945394639473948394939503951395239533954395539563957395839593960396139623963396439653966396739683969397039713972397339743975397639773978397939803981398239833984398539863987398839893990399139923993399439953996399739983999400040014002400340044005400640074008400940104011401240134014401540164017401840194020402140224023402440254026402740284029403040314032403340344035403640374038403940404041404240434044404540464047404840494050405140524053405440554056405740584059406040614062406340644065406640674068406940704071407240734074407540764077407840794080408140824083408440854086408740884089409040914092409340944095409640974098409941004101410241034104410541064107410841094110411141124113411441154116411741184119412041214122412341244125412641274128412941304131413241334134413541364137413841394140414141424143414441454146414741484149415041514152415341544155415641574158415941604161416241634164416541664167416841694170417141724173417441754176417741784179418041814182418341844185418641874188418941904191419241934194419541964197419841994200420142024203420442054206420742084209421042114212421342144215421642174218421942204221422242234224422542264227422842294230423142324233423442354236423742384239424042414242424342444245424642474248424942504251425242534254425542564257425842594260426142624263426442654266426742684269427042714272427342744275427642774278427942804281428242834284428542864287428842894290429142924293429442954296429742984299430043014302430343044305430643074308430943104311431243134314431543164317431843194320432143224323432443254326432743284329433043314332433343344335433643374338433943404341434243434344434543464347434843494350435143524353435443554356435743584359436043614362436343644365436643674368436943704371437243734374437543764377437843794380438143824383438443854386438743884389439043914392439343944395439643974398439944004401440244034404440544064407440844094410441144124413441444154416441744184419442044214422442344244425442644274428442944304431443244334434443544364437443844394440444144424443444444454446444744484449445044514452
  1. /* Pointer Bounds Checker insrumentation pass.
  2. Copyright (C) 2014-2015 Free Software Foundation, Inc.
  3. Contributed by Ilya Enkovich (ilya.enkovich@intel.com)
  4. This file is part of GCC.
  5. GCC is free software; you can redistribute it and/or modify it under
  6. the terms of the GNU General Public License as published by the Free
  7. Software Foundation; either version 3, or (at your option) any later
  8. version.
  9. GCC is distributed in the hope that it will be useful, but WITHOUT ANY
  10. WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11. FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
  12. for more details.
  13. You should have received a copy of the GNU General Public License
  14. along with GCC; see the file COPYING3. If not see
  15. <http://www.gnu.org/licenses/>. */
  16. #include "config.h"
  17. #include "system.h"
  18. #include "coretypes.h"
  19. #include "hash-set.h"
  20. #include "machmode.h"
  21. #include "vec.h"
  22. #include "double-int.h"
  23. #include "input.h"
  24. #include "alias.h"
  25. #include "symtab.h"
  26. #include "options.h"
  27. #include "wide-int.h"
  28. #include "inchash.h"
  29. #include "tree.h"
  30. #include "fold-const.h"
  31. #include "stor-layout.h"
  32. #include "varasm.h"
  33. #include "target.h"
  34. #include "tree-iterator.h"
  35. #include "tree-cfg.h"
  36. #include "langhooks.h"
  37. #include "tree-pass.h"
  38. #include "diagnostic.h"
  39. #include "ggc.h"
  40. #include "is-a.h"
  41. #include "cfgloop.h"
  42. #include "stringpool.h"
  43. #include "tree-ssa-alias.h"
  44. #include "tree-ssanames.h"
  45. #include "tree-ssa-operands.h"
  46. #include "tree-ssa-address.h"
  47. #include "tree-ssa.h"
  48. #include "predict.h"
  49. #include "dominance.h"
  50. #include "cfg.h"
  51. #include "basic-block.h"
  52. #include "tree-ssa-loop-niter.h"
  53. #include "gimple-expr.h"
  54. #include "gimple.h"
  55. #include "tree-phinodes.h"
  56. #include "gimple-ssa.h"
  57. #include "ssa-iterators.h"
  58. #include "gimple-pretty-print.h"
  59. #include "gimple-iterator.h"
  60. #include "gimplify.h"
  61. #include "gimplify-me.h"
  62. #include "print-tree.h"
  63. #include "hashtab.h"
  64. #include "tm.h"
  65. #include "hard-reg-set.h"
  66. #include "function.h"
  67. #include "rtl.h"
  68. #include "flags.h"
  69. #include "statistics.h"
  70. #include "real.h"
  71. #include "fixed-value.h"
  72. #include "insn-config.h"
  73. #include "expmed.h"
  74. #include "dojump.h"
  75. #include "explow.h"
  76. #include "calls.h"
  77. #include "emit-rtl.h"
  78. #include "stmt.h"
  79. #include "expr.h"
  80. #include "tree-ssa-propagate.h"
  81. #include "gimple-fold.h"
  82. #include "tree-chkp.h"
  83. #include "gimple-walk.h"
  84. #include "rtl.h" /* For MEM_P, assign_temp. */
  85. #include "tree-dfa.h"
  86. #include "ipa-ref.h"
  87. #include "lto-streamer.h"
  88. #include "cgraph.h"
  89. #include "ipa-chkp.h"
  90. #include "params.h"
  91. /* Pointer Bounds Checker instruments code with memory checks to find
  92. out-of-bounds memory accesses. Checks are performed by computing
  93. bounds for each pointer and then comparing address of accessed
  94. memory before pointer dereferencing.
  95. 1. Function clones.
  96. See ipa-chkp.c.
  97. 2. Instrumentation.
  98. There are few things to instrument:
  99. a) Memory accesses - add checker calls to check address of accessed memory
  100. against bounds of dereferenced pointer. Obviously safe memory
  101. accesses like static variable access does not have to be instrumented
  102. with checks.
  103. Example:
  104. val_2 = *p_1;
  105. with 4 bytes access is transformed into:
  106. __builtin___chkp_bndcl (__bound_tmp.1_3, p_1);
  107. D.1_4 = p_1 + 3;
  108. __builtin___chkp_bndcu (__bound_tmp.1_3, D.1_4);
  109. val_2 = *p_1;
  110. where __bound_tmp.1_3 are bounds computed for pointer p_1,
  111. __builtin___chkp_bndcl is a lower bound check and
  112. __builtin___chkp_bndcu is an upper bound check.
  113. b) Pointer stores.
  114. When pointer is stored in memory we need to store its bounds. To
  115. achieve compatibility of instrumented code with regular codes
  116. we have to keep data layout and store bounds in special bound tables
  117. via special checker call. Implementation of bounds table may vary for
  118. different platforms. It has to associate pointer value and its
  119. location (it is required because we may have two equal pointers
  120. with different bounds stored in different places) with bounds.
  121. Another checker builtin allows to get bounds for specified pointer
  122. loaded from specified location.
  123. Example:
  124. buf1[i_1] = &buf2;
  125. is transformed into:
  126. buf1[i_1] = &buf2;
  127. D.1_2 = &buf1[i_1];
  128. __builtin___chkp_bndstx (D.1_2, &buf2, __bound_tmp.1_2);
  129. where __bound_tmp.1_2 are bounds of &buf2.
  130. c) Static initialization.
  131. The special case of pointer store is static pointer initialization.
  132. Bounds initialization is performed in a few steps:
  133. - register all static initializations in front-end using
  134. chkp_register_var_initializer
  135. - when file compilation finishes we create functions with special
  136. attribute 'chkp ctor' and put explicit initialization code
  137. (assignments) for all statically initialized pointers.
  138. - when checker constructor is compiled checker pass adds required
  139. bounds initialization for all statically initialized pointers
  140. - since we do not actually need excess pointers initialization
  141. in checker constructor we remove such assignments from them
  142. d) Calls.
  143. For each call in the code we add additional arguments to pass
  144. bounds for pointer arguments. We determine type of call arguments
  145. using arguments list from function declaration; if function
  146. declaration is not available we use function type; otherwise
  147. (e.g. for unnamed arguments) we use type of passed value. Function
  148. declaration/type is replaced with the instrumented one.
  149. Example:
  150. val_1 = foo (&buf1, &buf2, &buf1, 0);
  151. is translated into:
  152. val_1 = foo.chkp (&buf1, __bound_tmp.1_2, &buf2, __bound_tmp.1_3,
  153. &buf1, __bound_tmp.1_2, 0);
  154. e) Returns.
  155. If function returns a pointer value we have to return bounds also.
  156. A new operand was added for return statement to hold returned bounds.
  157. Example:
  158. return &_buf1;
  159. is transformed into
  160. return &_buf1, __bound_tmp.1_1;
  161. 3. Bounds computation.
  162. Compiler is fully responsible for computing bounds to be used for each
  163. memory access. The first step for bounds computation is to find the
  164. origin of pointer dereferenced for memory access. Basing on pointer
  165. origin we define a way to compute its bounds. There are just few
  166. possible cases:
  167. a) Pointer is returned by call.
  168. In this case we use corresponding checker builtin method to obtain returned
  169. bounds.
  170. Example:
  171. buf_1 = malloc (size_2);
  172. foo (buf_1);
  173. is translated into:
  174. buf_1 = malloc (size_2);
  175. __bound_tmp.1_3 = __builtin___chkp_bndret (buf_1);
  176. foo (buf_1, __bound_tmp.1_3);
  177. b) Pointer is an address of an object.
  178. In this case compiler tries to compute objects size and create corresponding
  179. bounds. If object has incomplete type then special checker builtin is used to
  180. obtain its size at runtime.
  181. Example:
  182. foo ()
  183. {
  184. <unnamed type> __bound_tmp.3;
  185. static int buf[100];
  186. <bb 3>:
  187. __bound_tmp.3_2 = __builtin___chkp_bndmk (&buf, 400);
  188. <bb 2>:
  189. return &buf, __bound_tmp.3_2;
  190. }
  191. Example:
  192. Address of an object 'extern int buf[]' with incomplete type is
  193. returned.
  194. foo ()
  195. {
  196. <unnamed type> __bound_tmp.4;
  197. long unsigned int __size_tmp.3;
  198. <bb 3>:
  199. __size_tmp.3_4 = __builtin_ia32_sizeof (buf);
  200. __bound_tmp.4_3 = __builtin_ia32_bndmk (&buf, __size_tmp.3_4);
  201. <bb 2>:
  202. return &buf, __bound_tmp.4_3;
  203. }
  204. c) Pointer is the result of object narrowing.
  205. It happens when we use pointer to an object to compute pointer to a part
  206. of an object. E.g. we take pointer to a field of a structure. In this
  207. case we perform bounds intersection using bounds of original object and
  208. bounds of object's part (which are computed basing on its type).
  209. There may be some debatable questions about when narrowing should occur
  210. and when it should not. To avoid false bound violations in correct
  211. programs we do not perform narrowing when address of an array element is
  212. obtained (it has address of the whole array) and when address of the first
  213. structure field is obtained (because it is guaranteed to be equal to
  214. address of the whole structure and it is legal to cast it back to structure).
  215. Default narrowing behavior may be changed using compiler flags.
  216. Example:
  217. In this example address of the second structure field is returned.
  218. foo (struct A * p, __bounds_type __bounds_of_p)
  219. {
  220. <unnamed type> __bound_tmp.3;
  221. int * _2;
  222. int * _5;
  223. <bb 2>:
  224. _5 = &p_1(D)->second_field;
  225. __bound_tmp.3_6 = __builtin___chkp_bndmk (_5, 4);
  226. __bound_tmp.3_8 = __builtin___chkp_intersect (__bound_tmp.3_6,
  227. __bounds_of_p_3(D));
  228. _2 = &p_1(D)->second_field;
  229. return _2, __bound_tmp.3_8;
  230. }
  231. Example:
  232. In this example address of the first field of array element is returned.
  233. foo (struct A * p, __bounds_type __bounds_of_p, int i)
  234. {
  235. long unsigned int _3;
  236. long unsigned int _4;
  237. struct A * _6;
  238. int * _7;
  239. <bb 2>:
  240. _3 = (long unsigned int) i_1(D);
  241. _4 = _3 * 8;
  242. _6 = p_5(D) + _4;
  243. _7 = &_6->first_field;
  244. return _7, __bounds_of_p_2(D);
  245. }
  246. d) Pointer is the result of pointer arithmetic or type cast.
  247. In this case bounds of the base pointer are used. In case of binary
  248. operation producing a pointer we are analyzing data flow further
  249. looking for operand's bounds. One operand is considered as a base
  250. if it has some valid bounds. If we fall into a case when none of
  251. operands (or both of them) has valid bounds, a default bounds value
  252. is used.
  253. Trying to find out bounds for binary operations we may fall into
  254. cyclic dependencies for pointers. To avoid infinite recursion all
  255. walked phi nodes instantly obtain corresponding bounds but created
  256. bounds are marked as incomplete. It helps us to stop DF walk during
  257. bounds search.
  258. When we reach pointer source, some args of incomplete bounds phi obtain
  259. valid bounds and those values are propagated further through phi nodes.
  260. If no valid bounds were found for phi node then we mark its result as
  261. invalid bounds. Process stops when all incomplete bounds become either
  262. valid or invalid and we are able to choose a pointer base.
  263. e) Pointer is loaded from the memory.
  264. In this case we just need to load bounds from the bounds table.
  265. Example:
  266. foo ()
  267. {
  268. <unnamed type> __bound_tmp.3;
  269. static int * buf;
  270. int * _2;
  271. <bb 2>:
  272. _2 = buf;
  273. __bound_tmp.3_4 = __builtin___chkp_bndldx (&buf, _2);
  274. return _2, __bound_tmp.3_4;
  275. }
  276. */
  277. typedef void (*assign_handler)(tree, tree, void *);
  278. static tree chkp_get_zero_bounds ();
  279. static tree chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter);
  280. static tree chkp_find_bounds_loaded (tree ptr, tree ptr_src,
  281. gimple_stmt_iterator *iter);
  282. static void chkp_parse_array_and_component_ref (tree node, tree *ptr,
  283. tree *elt, bool *safe,
  284. bool *bitfield,
  285. tree *bounds,
  286. gimple_stmt_iterator *iter,
  287. bool innermost_bounds);
  288. #define chkp_bndldx_fndecl \
  289. (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDLDX))
  290. #define chkp_bndstx_fndecl \
  291. (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDSTX))
  292. #define chkp_checkl_fndecl \
  293. (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCL))
  294. #define chkp_checku_fndecl \
  295. (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDCU))
  296. #define chkp_bndmk_fndecl \
  297. (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDMK))
  298. #define chkp_ret_bnd_fndecl \
  299. (targetm.builtin_chkp_function (BUILT_IN_CHKP_BNDRET))
  300. #define chkp_intersect_fndecl \
  301. (targetm.builtin_chkp_function (BUILT_IN_CHKP_INTERSECT))
  302. #define chkp_narrow_bounds_fndecl \
  303. (targetm.builtin_chkp_function (BUILT_IN_CHKP_NARROW))
  304. #define chkp_sizeof_fndecl \
  305. (targetm.builtin_chkp_function (BUILT_IN_CHKP_SIZEOF))
  306. #define chkp_extract_lower_fndecl \
  307. (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_LOWER))
  308. #define chkp_extract_upper_fndecl \
  309. (targetm.builtin_chkp_function (BUILT_IN_CHKP_EXTRACT_UPPER))
  310. static GTY (()) tree chkp_uintptr_type;
  311. static GTY (()) tree chkp_zero_bounds_var;
  312. static GTY (()) tree chkp_none_bounds_var;
  313. static GTY (()) basic_block entry_block;
  314. static GTY (()) tree zero_bounds;
  315. static GTY (()) tree none_bounds;
  316. static GTY (()) tree incomplete_bounds;
  317. static GTY (()) tree tmp_var;
  318. static GTY (()) tree size_tmp_var;
  319. static GTY (()) bitmap chkp_abnormal_copies;
  320. struct hash_set<tree> *chkp_invalid_bounds;
  321. struct hash_set<tree> *chkp_completed_bounds_set;
  322. struct hash_map<tree, tree> *chkp_reg_bounds;
  323. struct hash_map<tree, tree> *chkp_bound_vars;
  324. struct hash_map<tree, tree> *chkp_reg_addr_bounds;
  325. struct hash_map<tree, tree> *chkp_incomplete_bounds_map;
  326. struct hash_map<tree, tree> *chkp_bounds_map;
  327. struct hash_map<tree, tree> *chkp_static_var_bounds;
  328. static bool in_chkp_pass;
  329. #define CHKP_BOUND_TMP_NAME "__bound_tmp"
  330. #define CHKP_SIZE_TMP_NAME "__size_tmp"
  331. #define CHKP_BOUNDS_OF_SYMBOL_PREFIX "__chkp_bounds_of_"
  332. #define CHKP_STRING_BOUNDS_PREFIX "__chkp_string_bounds_"
  333. #define CHKP_VAR_BOUNDS_PREFIX "__chkp_var_bounds_"
  334. #define CHKP_ZERO_BOUNDS_VAR_NAME "__chkp_zero_bounds"
  335. #define CHKP_NONE_BOUNDS_VAR_NAME "__chkp_none_bounds"
  336. /* Static checker constructors may become very large and their
  337. compilation with optimization may take too much time.
  338. Therefore we put a limit to number of statements in one
  339. constructor. Tests with 100 000 statically initialized
  340. pointers showed following compilation times on Sandy Bridge
  341. server (used -O2):
  342. limit 100 => ~18 sec.
  343. limit 300 => ~22 sec.
  344. limit 1000 => ~30 sec.
  345. limit 3000 => ~49 sec.
  346. limit 5000 => ~55 sec.
  347. limit 10000 => ~76 sec.
  348. limit 100000 => ~532 sec. */
  349. #define MAX_STMTS_IN_STATIC_CHKP_CTOR (PARAM_VALUE (PARAM_CHKP_MAX_CTOR_SIZE))
  350. struct chkp_ctor_stmt_list
  351. {
  352. tree stmts;
  353. int avail;
  354. };
  355. /* Return 1 if function FNDECL is instrumented by Pointer
  356. Bounds Checker. */
  357. bool
  358. chkp_function_instrumented_p (tree fndecl)
  359. {
  360. return fndecl
  361. && lookup_attribute ("chkp instrumented", DECL_ATTRIBUTES (fndecl));
  362. }
  363. /* Mark function FNDECL as instrumented. */
  364. void
  365. chkp_function_mark_instrumented (tree fndecl)
  366. {
  367. if (chkp_function_instrumented_p (fndecl))
  368. return;
  369. DECL_ATTRIBUTES (fndecl)
  370. = tree_cons (get_identifier ("chkp instrumented"), NULL,
  371. DECL_ATTRIBUTES (fndecl));
  372. }
  373. /* Return true when STMT is builtin call to instrumentation function
  374. corresponding to CODE. */
  375. bool
  376. chkp_gimple_call_builtin_p (gimple call,
  377. enum built_in_function code)
  378. {
  379. tree fndecl;
  380. if (is_gimple_call (call)
  381. && (fndecl = targetm.builtin_chkp_function (code))
  382. && gimple_call_fndecl (call) == fndecl)
  383. return true;
  384. return false;
  385. }
  386. /* Emit code to store zero bounds for PTR located at MEM. */
  387. void
  388. chkp_expand_bounds_reset_for_mem (tree mem, tree ptr)
  389. {
  390. tree zero_bnd, bnd, addr, bndstx;
  391. if (flag_chkp_use_static_const_bounds)
  392. zero_bnd = chkp_get_zero_bounds_var ();
  393. else
  394. zero_bnd = chkp_build_make_bounds_call (integer_zero_node,
  395. integer_zero_node);
  396. bnd = make_tree (pointer_bounds_type_node,
  397. assign_temp (pointer_bounds_type_node, 0, 1));
  398. addr = build1 (ADDR_EXPR,
  399. build_pointer_type (TREE_TYPE (mem)), mem);
  400. bndstx = chkp_build_bndstx_call (addr, ptr, bnd);
  401. expand_assignment (bnd, zero_bnd, false);
  402. expand_normal (bndstx);
  403. }
  404. /* Build retbnd call for returned value RETVAL.
  405. If BNDVAL is not NULL then result is stored
  406. in it. Otherwise a temporary is created to
  407. hold returned value.
  408. GSI points to a position for a retbnd call
  409. and is set to created stmt.
  410. Cgraph edge is created for a new call if
  411. UPDATE_EDGE is 1.
  412. Obtained bounds are returned. */
  413. tree
  414. chkp_insert_retbnd_call (tree bndval, tree retval,
  415. gimple_stmt_iterator *gsi)
  416. {
  417. gimple call;
  418. if (!bndval)
  419. bndval = create_tmp_reg (pointer_bounds_type_node, "retbnd");
  420. call = gimple_build_call (chkp_ret_bnd_fndecl, 1, retval);
  421. gimple_call_set_lhs (call, bndval);
  422. gsi_insert_after (gsi, call, GSI_CONTINUE_LINKING);
  423. return bndval;
  424. }
  425. /* Build a GIMPLE_CALL identical to CALL but skipping bounds
  426. arguments. */
  427. gcall *
  428. chkp_copy_call_skip_bounds (gcall *call)
  429. {
  430. bitmap bounds;
  431. unsigned i;
  432. bitmap_obstack_initialize (NULL);
  433. bounds = BITMAP_ALLOC (NULL);
  434. for (i = 0; i < gimple_call_num_args (call); i++)
  435. if (POINTER_BOUNDS_P (gimple_call_arg (call, i)))
  436. bitmap_set_bit (bounds, i);
  437. if (!bitmap_empty_p (bounds))
  438. call = gimple_call_copy_skip_args (call, bounds);
  439. gimple_call_set_with_bounds (call, false);
  440. BITMAP_FREE (bounds);
  441. bitmap_obstack_release (NULL);
  442. return call;
  443. }
  444. /* Redirect edge E to the correct node according to call_stmt.
  445. Return 1 if bounds removal from call_stmt should be done
  446. instead of redirection. */
  447. bool
  448. chkp_redirect_edge (cgraph_edge *e)
  449. {
  450. bool instrumented = false;
  451. tree decl = e->callee->decl;
  452. if (e->callee->instrumentation_clone
  453. || chkp_function_instrumented_p (decl))
  454. instrumented = true;
  455. if (instrumented
  456. && !gimple_call_with_bounds_p (e->call_stmt))
  457. e->redirect_callee (cgraph_node::get_create (e->callee->orig_decl));
  458. else if (!instrumented
  459. && gimple_call_with_bounds_p (e->call_stmt)
  460. && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDCL)
  461. && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDCU)
  462. && !chkp_gimple_call_builtin_p (e->call_stmt, BUILT_IN_CHKP_BNDSTX))
  463. {
  464. if (e->callee->instrumented_version)
  465. e->redirect_callee (e->callee->instrumented_version);
  466. else
  467. {
  468. tree args = TYPE_ARG_TYPES (TREE_TYPE (decl));
  469. /* Avoid bounds removal if all args will be removed. */
  470. if (!args || TREE_VALUE (args) != void_type_node)
  471. return true;
  472. else
  473. gimple_call_set_with_bounds (e->call_stmt, false);
  474. }
  475. }
  476. return false;
  477. }
  478. /* Mark statement S to not be instrumented. */
  479. static void
  480. chkp_mark_stmt (gimple s)
  481. {
  482. gimple_set_plf (s, GF_PLF_1, true);
  483. }
  484. /* Mark statement S to be instrumented. */
  485. static void
  486. chkp_unmark_stmt (gimple s)
  487. {
  488. gimple_set_plf (s, GF_PLF_1, false);
  489. }
  490. /* Return 1 if statement S should not be instrumented. */
  491. static bool
  492. chkp_marked_stmt_p (gimple s)
  493. {
  494. return gimple_plf (s, GF_PLF_1);
  495. }
  496. /* Get var to be used for bound temps. */
  497. static tree
  498. chkp_get_tmp_var (void)
  499. {
  500. if (!tmp_var)
  501. tmp_var = create_tmp_reg (pointer_bounds_type_node, CHKP_BOUND_TMP_NAME);
  502. return tmp_var;
  503. }
  504. /* Get SSA_NAME to be used as temp. */
  505. static tree
  506. chkp_get_tmp_reg (gimple stmt)
  507. {
  508. if (in_chkp_pass)
  509. return make_ssa_name (chkp_get_tmp_var (), stmt);
  510. return make_temp_ssa_name (pointer_bounds_type_node, stmt,
  511. CHKP_BOUND_TMP_NAME);
  512. }
  513. /* Get var to be used for size temps. */
  514. static tree
  515. chkp_get_size_tmp_var (void)
  516. {
  517. if (!size_tmp_var)
  518. size_tmp_var = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
  519. return size_tmp_var;
  520. }
  521. /* Register bounds BND for address of OBJ. */
  522. static void
  523. chkp_register_addr_bounds (tree obj, tree bnd)
  524. {
  525. if (bnd == incomplete_bounds)
  526. return;
  527. chkp_reg_addr_bounds->put (obj, bnd);
  528. if (dump_file && (dump_flags & TDF_DETAILS))
  529. {
  530. fprintf (dump_file, "Regsitered bound ");
  531. print_generic_expr (dump_file, bnd, 0);
  532. fprintf (dump_file, " for address of ");
  533. print_generic_expr (dump_file, obj, 0);
  534. fprintf (dump_file, "\n");
  535. }
  536. }
  537. /* Return bounds registered for address of OBJ. */
  538. static tree
  539. chkp_get_registered_addr_bounds (tree obj)
  540. {
  541. tree *slot = chkp_reg_addr_bounds->get (obj);
  542. return slot ? *slot : NULL_TREE;
  543. }
  544. /* Mark BOUNDS as completed. */
  545. static void
  546. chkp_mark_completed_bounds (tree bounds)
  547. {
  548. chkp_completed_bounds_set->add (bounds);
  549. if (dump_file && (dump_flags & TDF_DETAILS))
  550. {
  551. fprintf (dump_file, "Marked bounds ");
  552. print_generic_expr (dump_file, bounds, 0);
  553. fprintf (dump_file, " as completed\n");
  554. }
  555. }
  556. /* Return 1 if BOUNDS were marked as completed and 0 otherwise. */
  557. static bool
  558. chkp_completed_bounds (tree bounds)
  559. {
  560. return chkp_completed_bounds_set->contains (bounds);
  561. }
  562. /* Clear comleted bound marks. */
  563. static void
  564. chkp_erase_completed_bounds (void)
  565. {
  566. delete chkp_completed_bounds_set;
  567. chkp_completed_bounds_set = new hash_set<tree>;
  568. }
  569. /* Mark BOUNDS associated with PTR as incomplete. */
  570. static void
  571. chkp_register_incomplete_bounds (tree bounds, tree ptr)
  572. {
  573. chkp_incomplete_bounds_map->put (bounds, ptr);
  574. if (dump_file && (dump_flags & TDF_DETAILS))
  575. {
  576. fprintf (dump_file, "Regsitered incomplete bounds ");
  577. print_generic_expr (dump_file, bounds, 0);
  578. fprintf (dump_file, " for ");
  579. print_generic_expr (dump_file, ptr, 0);
  580. fprintf (dump_file, "\n");
  581. }
  582. }
  583. /* Return 1 if BOUNDS are incomplete and 0 otherwise. */
  584. static bool
  585. chkp_incomplete_bounds (tree bounds)
  586. {
  587. if (bounds == incomplete_bounds)
  588. return true;
  589. if (chkp_completed_bounds (bounds))
  590. return false;
  591. return chkp_incomplete_bounds_map->get (bounds) != NULL;
  592. }
  593. /* Clear incomleted bound marks. */
  594. static void
  595. chkp_erase_incomplete_bounds (void)
  596. {
  597. delete chkp_incomplete_bounds_map;
  598. chkp_incomplete_bounds_map = new hash_map<tree, tree>;
  599. }
  600. /* Build and return bndmk call which creates bounds for structure
  601. pointed by PTR. Structure should have complete type. */
  602. tree
  603. chkp_make_bounds_for_struct_addr (tree ptr)
  604. {
  605. tree type = TREE_TYPE (ptr);
  606. tree size;
  607. gcc_assert (POINTER_TYPE_P (type));
  608. size = TYPE_SIZE (TREE_TYPE (type));
  609. gcc_assert (size);
  610. return build_call_nary (pointer_bounds_type_node,
  611. build_fold_addr_expr (chkp_bndmk_fndecl),
  612. 2, ptr, size);
  613. }
  614. /* Traversal function for chkp_may_finish_incomplete_bounds.
  615. Set RES to 0 if at least one argument of phi statement
  616. defining bounds (passed in KEY arg) is unknown.
  617. Traversal stops when first unknown phi argument is found. */
  618. bool
  619. chkp_may_complete_phi_bounds (tree const &bounds, tree *slot ATTRIBUTE_UNUSED,
  620. bool *res)
  621. {
  622. gimple phi;
  623. unsigned i;
  624. gcc_assert (TREE_CODE (bounds) == SSA_NAME);
  625. phi = SSA_NAME_DEF_STMT (bounds);
  626. gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
  627. for (i = 0; i < gimple_phi_num_args (phi); i++)
  628. {
  629. tree phi_arg = gimple_phi_arg_def (phi, i);
  630. if (!phi_arg)
  631. {
  632. *res = false;
  633. /* Do not need to traverse further. */
  634. return false;
  635. }
  636. }
  637. return true;
  638. }
  639. /* Return 1 if all phi nodes created for bounds have their
  640. arguments computed. */
  641. static bool
  642. chkp_may_finish_incomplete_bounds (void)
  643. {
  644. bool res = true;
  645. chkp_incomplete_bounds_map
  646. ->traverse<bool *, chkp_may_complete_phi_bounds> (&res);
  647. return res;
  648. }
  649. /* Helper function for chkp_finish_incomplete_bounds.
  650. Recompute args for bounds phi node. */
  651. bool
  652. chkp_recompute_phi_bounds (tree const &bounds, tree *slot,
  653. void *res ATTRIBUTE_UNUSED)
  654. {
  655. tree ptr = *slot;
  656. gphi *bounds_phi;
  657. gphi *ptr_phi;
  658. unsigned i;
  659. gcc_assert (TREE_CODE (bounds) == SSA_NAME);
  660. gcc_assert (TREE_CODE (ptr) == SSA_NAME);
  661. bounds_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (bounds));
  662. ptr_phi = as_a <gphi *> (SSA_NAME_DEF_STMT (ptr));
  663. for (i = 0; i < gimple_phi_num_args (bounds_phi); i++)
  664. {
  665. tree ptr_arg = gimple_phi_arg_def (ptr_phi, i);
  666. tree bound_arg = chkp_find_bounds (ptr_arg, NULL);
  667. add_phi_arg (bounds_phi, bound_arg,
  668. gimple_phi_arg_edge (ptr_phi, i),
  669. UNKNOWN_LOCATION);
  670. }
  671. return true;
  672. }
  673. /* Mark BOUNDS as invalid. */
  674. static void
  675. chkp_mark_invalid_bounds (tree bounds)
  676. {
  677. chkp_invalid_bounds->add (bounds);
  678. if (dump_file && (dump_flags & TDF_DETAILS))
  679. {
  680. fprintf (dump_file, "Marked bounds ");
  681. print_generic_expr (dump_file, bounds, 0);
  682. fprintf (dump_file, " as invalid\n");
  683. }
  684. }
  685. /* Return 1 if BOUNDS were marked as invalid and 0 otherwise. */
  686. static bool
  687. chkp_valid_bounds (tree bounds)
  688. {
  689. if (bounds == zero_bounds || bounds == none_bounds)
  690. return false;
  691. return !chkp_invalid_bounds->contains (bounds);
  692. }
  693. /* Helper function for chkp_finish_incomplete_bounds.
  694. Check all arguments of phi nodes trying to find
  695. valid completed bounds. If there is at least one
  696. such arg then bounds produced by phi node are marked
  697. as valid completed bounds and all phi args are
  698. recomputed. */
  699. bool
  700. chkp_find_valid_phi_bounds (tree const &bounds, tree *slot, bool *res)
  701. {
  702. gimple phi;
  703. unsigned i;
  704. gcc_assert (TREE_CODE (bounds) == SSA_NAME);
  705. if (chkp_completed_bounds (bounds))
  706. return true;
  707. phi = SSA_NAME_DEF_STMT (bounds);
  708. gcc_assert (phi && gimple_code (phi) == GIMPLE_PHI);
  709. for (i = 0; i < gimple_phi_num_args (phi); i++)
  710. {
  711. tree phi_arg = gimple_phi_arg_def (phi, i);
  712. gcc_assert (phi_arg);
  713. if (chkp_valid_bounds (phi_arg) && !chkp_incomplete_bounds (phi_arg))
  714. {
  715. *res = true;
  716. chkp_mark_completed_bounds (bounds);
  717. chkp_recompute_phi_bounds (bounds, slot, NULL);
  718. return true;
  719. }
  720. }
  721. return true;
  722. }
  723. /* Helper function for chkp_finish_incomplete_bounds.
  724. Marks all incompleted bounds as invalid. */
  725. bool
  726. chkp_mark_invalid_bounds_walker (tree const &bounds,
  727. tree *slot ATTRIBUTE_UNUSED,
  728. void *res ATTRIBUTE_UNUSED)
  729. {
  730. if (!chkp_completed_bounds (bounds))
  731. {
  732. chkp_mark_invalid_bounds (bounds);
  733. chkp_mark_completed_bounds (bounds);
  734. }
  735. return true;
  736. }
  737. /* When all bound phi nodes have all their args computed
  738. we have enough info to find valid bounds. We iterate
  739. through all incompleted bounds searching for valid
  740. bounds. Found valid bounds are marked as completed
  741. and all remaining incompleted bounds are recomputed.
  742. Process continues until no new valid bounds may be
  743. found. All remained incompleted bounds are marked as
  744. invalid (i.e. have no valid source of bounds). */
  745. static void
  746. chkp_finish_incomplete_bounds (void)
  747. {
  748. bool found_valid;
  749. while (found_valid)
  750. {
  751. found_valid = false;
  752. chkp_incomplete_bounds_map->
  753. traverse<bool *, chkp_find_valid_phi_bounds> (&found_valid);
  754. if (found_valid)
  755. chkp_incomplete_bounds_map->
  756. traverse<void *, chkp_recompute_phi_bounds> (NULL);
  757. }
  758. chkp_incomplete_bounds_map->
  759. traverse<void *, chkp_mark_invalid_bounds_walker> (NULL);
  760. chkp_incomplete_bounds_map->
  761. traverse<void *, chkp_recompute_phi_bounds> (NULL);
  762. chkp_erase_completed_bounds ();
  763. chkp_erase_incomplete_bounds ();
  764. }
  765. /* Return 1 if type TYPE is a pointer type or a
  766. structure having a pointer type as one of its fields.
  767. Otherwise return 0. */
  768. bool
  769. chkp_type_has_pointer (const_tree type)
  770. {
  771. bool res = false;
  772. if (BOUNDED_TYPE_P (type))
  773. res = true;
  774. else if (RECORD_OR_UNION_TYPE_P (type))
  775. {
  776. tree field;
  777. for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
  778. if (TREE_CODE (field) == FIELD_DECL)
  779. res = res || chkp_type_has_pointer (TREE_TYPE (field));
  780. }
  781. else if (TREE_CODE (type) == ARRAY_TYPE)
  782. res = chkp_type_has_pointer (TREE_TYPE (type));
  783. return res;
  784. }
  785. unsigned
  786. chkp_type_bounds_count (const_tree type)
  787. {
  788. unsigned res = 0;
  789. if (!type)
  790. res = 0;
  791. else if (BOUNDED_TYPE_P (type))
  792. res = 1;
  793. else if (RECORD_OR_UNION_TYPE_P (type))
  794. {
  795. bitmap have_bound;
  796. bitmap_obstack_initialize (NULL);
  797. have_bound = BITMAP_ALLOC (NULL);
  798. chkp_find_bound_slots (type, have_bound);
  799. res = bitmap_count_bits (have_bound);
  800. BITMAP_FREE (have_bound);
  801. bitmap_obstack_release (NULL);
  802. }
  803. return res;
  804. }
  805. /* Get bounds associated with NODE via
  806. chkp_set_bounds call. */
  807. tree
  808. chkp_get_bounds (tree node)
  809. {
  810. tree *slot;
  811. if (!chkp_bounds_map)
  812. return NULL_TREE;
  813. slot = chkp_bounds_map->get (node);
  814. return slot ? *slot : NULL_TREE;
  815. }
  816. /* Associate bounds VAL with NODE. */
  817. void
  818. chkp_set_bounds (tree node, tree val)
  819. {
  820. if (!chkp_bounds_map)
  821. chkp_bounds_map = new hash_map<tree, tree>;
  822. chkp_bounds_map->put (node, val);
  823. }
  824. /* Check if statically initialized variable VAR require
  825. static bounds initialization. If VAR is added into
  826. bounds initlization list then 1 is returned. Otherwise
  827. return 0. */
  828. extern bool
  829. chkp_register_var_initializer (tree var)
  830. {
  831. if (!flag_check_pointer_bounds
  832. || DECL_INITIAL (var) == error_mark_node)
  833. return false;
  834. gcc_assert (TREE_CODE (var) == VAR_DECL);
  835. gcc_assert (DECL_INITIAL (var));
  836. if (TREE_STATIC (var)
  837. && chkp_type_has_pointer (TREE_TYPE (var)))
  838. {
  839. varpool_node::get_create (var)->need_bounds_init = 1;
  840. return true;
  841. }
  842. return false;
  843. }
  844. /* Helper function for chkp_finish_file.
  845. Add new modification statement (RHS is assigned to LHS)
  846. into list of static initializer statementes (passed in ARG).
  847. If statements list becomes too big, emit checker constructor
  848. and start the new one. */
  849. static void
  850. chkp_add_modification_to_stmt_list (tree lhs,
  851. tree rhs,
  852. void *arg)
  853. {
  854. struct chkp_ctor_stmt_list *stmts = (struct chkp_ctor_stmt_list *)arg;
  855. tree modify;
  856. if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
  857. rhs = build1 (CONVERT_EXPR, TREE_TYPE (lhs), rhs);
  858. modify = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
  859. append_to_statement_list (modify, &stmts->stmts);
  860. stmts->avail--;
  861. }
  862. /* Build and return ADDR_EXPR for specified object OBJ. */
  863. static tree
  864. chkp_build_addr_expr (tree obj)
  865. {
  866. return TREE_CODE (obj) == TARGET_MEM_REF
  867. ? tree_mem_ref_addr (ptr_type_node, obj)
  868. : build_fold_addr_expr (obj);
  869. }
  870. /* Helper function for chkp_finish_file.
  871. Initialize bound variable BND_VAR with bounds of variable
  872. VAR to statements list STMTS. If statements list becomes
  873. too big, emit checker constructor and start the new one. */
  874. static void
  875. chkp_output_static_bounds (tree bnd_var, tree var,
  876. struct chkp_ctor_stmt_list *stmts)
  877. {
  878. tree lb, ub, size;
  879. if (TREE_CODE (var) == STRING_CST)
  880. {
  881. lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
  882. size = build_int_cst (size_type_node, TREE_STRING_LENGTH (var) - 1);
  883. }
  884. else if (DECL_SIZE (var)
  885. && !chkp_variable_size_type (TREE_TYPE (var)))
  886. {
  887. /* Compute bounds using statically known size. */
  888. lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
  889. size = size_binop (MINUS_EXPR, DECL_SIZE_UNIT (var), size_one_node);
  890. }
  891. else
  892. {
  893. /* Compute bounds using dynamic size. */
  894. tree call;
  895. lb = build1 (CONVERT_EXPR, size_type_node, chkp_build_addr_expr (var));
  896. call = build1 (ADDR_EXPR,
  897. build_pointer_type (TREE_TYPE (chkp_sizeof_fndecl)),
  898. chkp_sizeof_fndecl);
  899. size = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_sizeof_fndecl)),
  900. call, 1, var);
  901. if (flag_chkp_zero_dynamic_size_as_infinite)
  902. {
  903. tree max_size, cond;
  904. max_size = build2 (MINUS_EXPR, size_type_node, size_zero_node, lb);
  905. cond = build2 (NE_EXPR, boolean_type_node, size, size_zero_node);
  906. size = build3 (COND_EXPR, size_type_node, cond, size, max_size);
  907. }
  908. size = size_binop (MINUS_EXPR, size, size_one_node);
  909. }
  910. ub = size_binop (PLUS_EXPR, lb, size);
  911. stmts->avail -= targetm.chkp_initialize_bounds (bnd_var, lb, ub,
  912. &stmts->stmts);
  913. if (stmts->avail <= 0)
  914. {
  915. cgraph_build_static_cdtor ('B', stmts->stmts,
  916. MAX_RESERVED_INIT_PRIORITY + 2);
  917. stmts->avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
  918. stmts->stmts = NULL;
  919. }
  920. }
  921. /* Return entry block to be used for checker initilization code.
  922. Create new block if required. */
  923. static basic_block
  924. chkp_get_entry_block (void)
  925. {
  926. if (!entry_block)
  927. entry_block = split_block (ENTRY_BLOCK_PTR_FOR_FN (cfun), NULL)->dest;
  928. return entry_block;
  929. }
  930. /* Return a bounds var to be used for pointer var PTR_VAR. */
  931. static tree
  932. chkp_get_bounds_var (tree ptr_var)
  933. {
  934. tree bnd_var;
  935. tree *slot;
  936. slot = chkp_bound_vars->get (ptr_var);
  937. if (slot)
  938. bnd_var = *slot;
  939. else
  940. {
  941. bnd_var = create_tmp_reg (pointer_bounds_type_node,
  942. CHKP_BOUND_TMP_NAME);
  943. chkp_bound_vars->put (ptr_var, bnd_var);
  944. }
  945. return bnd_var;
  946. }
  947. /* Register bounds BND for object PTR in global bounds table.
  948. A copy of bounds may be created for abnormal ssa names.
  949. Returns bounds to use for PTR. */
  950. static tree
  951. chkp_maybe_copy_and_register_bounds (tree ptr, tree bnd)
  952. {
  953. bool abnormal_ptr;
  954. if (!chkp_reg_bounds)
  955. return bnd;
  956. /* Do nothing if bounds are incomplete_bounds
  957. because it means bounds will be recomputed. */
  958. if (bnd == incomplete_bounds)
  959. return bnd;
  960. abnormal_ptr = (TREE_CODE (ptr) == SSA_NAME
  961. && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
  962. && gimple_code (SSA_NAME_DEF_STMT (ptr)) != GIMPLE_PHI);
  963. /* A single bounds value may be reused multiple times for
  964. different pointer values. It may cause coalescing issues
  965. for abnormal SSA names. To avoid it we create a bounds
  966. copy in case it is computed for abnormal SSA name.
  967. We also cannot reuse such created copies for other pointers */
  968. if (abnormal_ptr
  969. || bitmap_bit_p (chkp_abnormal_copies, SSA_NAME_VERSION (bnd)))
  970. {
  971. tree bnd_var = NULL_TREE;
  972. if (abnormal_ptr)
  973. {
  974. if (SSA_NAME_VAR (ptr))
  975. bnd_var = chkp_get_bounds_var (SSA_NAME_VAR (ptr));
  976. }
  977. else
  978. bnd_var = chkp_get_tmp_var ();
  979. /* For abnormal copies we may just find original
  980. bounds and use them. */
  981. if (!abnormal_ptr && !SSA_NAME_IS_DEFAULT_DEF (bnd))
  982. {
  983. gimple bnd_def = SSA_NAME_DEF_STMT (bnd);
  984. gcc_checking_assert (gimple_code (bnd_def) == GIMPLE_ASSIGN);
  985. bnd = gimple_assign_rhs1 (bnd_def);
  986. }
  987. /* For undefined values we usually use none bounds
  988. value but in case of abnormal edge it may cause
  989. coalescing failures. Use default definition of
  990. bounds variable instead to avoid it. */
  991. else if (SSA_NAME_IS_DEFAULT_DEF (ptr)
  992. && TREE_CODE (SSA_NAME_VAR (ptr)) != PARM_DECL)
  993. {
  994. bnd = get_or_create_ssa_default_def (cfun, bnd_var);
  995. if (dump_file && (dump_flags & TDF_DETAILS))
  996. {
  997. fprintf (dump_file, "Using default def bounds ");
  998. print_generic_expr (dump_file, bnd, 0);
  999. fprintf (dump_file, " for abnormal default def SSA name ");
  1000. print_generic_expr (dump_file, ptr, 0);
  1001. fprintf (dump_file, "\n");
  1002. }
  1003. }
  1004. else
  1005. {
  1006. tree copy;
  1007. gimple def = SSA_NAME_DEF_STMT (ptr);
  1008. gimple assign;
  1009. gimple_stmt_iterator gsi;
  1010. if (bnd_var)
  1011. copy = make_ssa_name (bnd_var, gimple_build_nop ());
  1012. else
  1013. copy = make_temp_ssa_name (pointer_bounds_type_node,
  1014. gimple_build_nop (),
  1015. CHKP_BOUND_TMP_NAME);
  1016. assign = gimple_build_assign (copy, bnd);
  1017. if (dump_file && (dump_flags & TDF_DETAILS))
  1018. {
  1019. fprintf (dump_file, "Creating a copy of bounds ");
  1020. print_generic_expr (dump_file, bnd, 0);
  1021. fprintf (dump_file, " for abnormal SSA name ");
  1022. print_generic_expr (dump_file, ptr, 0);
  1023. fprintf (dump_file, "\n");
  1024. }
  1025. if (gimple_code (def) == GIMPLE_NOP)
  1026. {
  1027. gsi = gsi_last_bb (chkp_get_entry_block ());
  1028. if (!gsi_end_p (gsi) && is_ctrl_stmt (gsi_stmt (gsi)))
  1029. gsi_insert_before (&gsi, assign, GSI_CONTINUE_LINKING);
  1030. else
  1031. gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
  1032. }
  1033. else
  1034. {
  1035. gimple bnd_def = SSA_NAME_DEF_STMT (bnd);
  1036. /* Sometimes (e.g. when we load a pointer from a
  1037. memory) bounds are produced later than a pointer.
  1038. We need to insert bounds copy appropriately. */
  1039. if (gimple_code (bnd_def) != GIMPLE_NOP
  1040. && stmt_dominates_stmt_p (def, bnd_def))
  1041. gsi = gsi_for_stmt (bnd_def);
  1042. else
  1043. gsi = gsi_for_stmt (def);
  1044. gsi_insert_after (&gsi, assign, GSI_CONTINUE_LINKING);
  1045. }
  1046. bnd = copy;
  1047. }
  1048. if (abnormal_ptr)
  1049. bitmap_set_bit (chkp_abnormal_copies, SSA_NAME_VERSION (bnd));
  1050. }
  1051. chkp_reg_bounds->put (ptr, bnd);
  1052. if (dump_file && (dump_flags & TDF_DETAILS))
  1053. {
  1054. fprintf (dump_file, "Regsitered bound ");
  1055. print_generic_expr (dump_file, bnd, 0);
  1056. fprintf (dump_file, " for pointer ");
  1057. print_generic_expr (dump_file, ptr, 0);
  1058. fprintf (dump_file, "\n");
  1059. }
  1060. return bnd;
  1061. }
  1062. /* Get bounds registered for object PTR in global bounds table. */
  1063. static tree
  1064. chkp_get_registered_bounds (tree ptr)
  1065. {
  1066. tree *slot;
  1067. if (!chkp_reg_bounds)
  1068. return NULL_TREE;
  1069. slot = chkp_reg_bounds->get (ptr);
  1070. return slot ? *slot : NULL_TREE;
  1071. }
  1072. /* Add bound retvals to return statement pointed by GSI. */
  1073. static void
  1074. chkp_add_bounds_to_ret_stmt (gimple_stmt_iterator *gsi)
  1075. {
  1076. greturn *ret = as_a <greturn *> (gsi_stmt (*gsi));
  1077. tree retval = gimple_return_retval (ret);
  1078. tree ret_decl = DECL_RESULT (cfun->decl);
  1079. tree bounds;
  1080. if (!retval)
  1081. return;
  1082. if (BOUNDED_P (ret_decl))
  1083. {
  1084. bounds = chkp_find_bounds (retval, gsi);
  1085. bounds = chkp_maybe_copy_and_register_bounds (ret_decl, bounds);
  1086. gimple_return_set_retbnd (ret, bounds);
  1087. }
  1088. update_stmt (ret);
  1089. }
  1090. /* Force OP to be suitable for using as an argument for call.
  1091. New statements (if any) go to SEQ. */
  1092. static tree
  1093. chkp_force_gimple_call_op (tree op, gimple_seq *seq)
  1094. {
  1095. gimple_seq stmts;
  1096. gimple_stmt_iterator si;
  1097. op = force_gimple_operand (unshare_expr (op), &stmts, true, NULL_TREE);
  1098. for (si = gsi_start (stmts); !gsi_end_p (si); gsi_next (&si))
  1099. chkp_mark_stmt (gsi_stmt (si));
  1100. gimple_seq_add_seq (seq, stmts);
  1101. return op;
  1102. }
  1103. /* Generate lower bound check for memory access by ADDR.
  1104. Check is inserted before the position pointed by ITER.
  1105. DIRFLAG indicates whether memory access is load or store. */
  1106. static void
  1107. chkp_check_lower (tree addr, tree bounds,
  1108. gimple_stmt_iterator iter,
  1109. location_t location,
  1110. tree dirflag)
  1111. {
  1112. gimple_seq seq;
  1113. gimple check;
  1114. tree node;
  1115. if (!chkp_function_instrumented_p (current_function_decl)
  1116. && bounds == chkp_get_zero_bounds ())
  1117. return;
  1118. if (dirflag == integer_zero_node
  1119. && !flag_chkp_check_read)
  1120. return;
  1121. if (dirflag == integer_one_node
  1122. && !flag_chkp_check_write)
  1123. return;
  1124. seq = NULL;
  1125. node = chkp_force_gimple_call_op (addr, &seq);
  1126. check = gimple_build_call (chkp_checkl_fndecl, 2, node, bounds);
  1127. chkp_mark_stmt (check);
  1128. gimple_call_set_with_bounds (check, true);
  1129. gimple_set_location (check, location);
  1130. gimple_seq_add_stmt (&seq, check);
  1131. gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
  1132. if (dump_file && (dump_flags & TDF_DETAILS))
  1133. {
  1134. gimple before = gsi_stmt (iter);
  1135. fprintf (dump_file, "Generated lower bound check for statement ");
  1136. print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
  1137. fprintf (dump_file, " ");
  1138. print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
  1139. }
  1140. }
  1141. /* Generate upper bound check for memory access by ADDR.
  1142. Check is inserted before the position pointed by ITER.
  1143. DIRFLAG indicates whether memory access is load or store. */
  1144. static void
  1145. chkp_check_upper (tree addr, tree bounds,
  1146. gimple_stmt_iterator iter,
  1147. location_t location,
  1148. tree dirflag)
  1149. {
  1150. gimple_seq seq;
  1151. gimple check;
  1152. tree node;
  1153. if (!chkp_function_instrumented_p (current_function_decl)
  1154. && bounds == chkp_get_zero_bounds ())
  1155. return;
  1156. if (dirflag == integer_zero_node
  1157. && !flag_chkp_check_read)
  1158. return;
  1159. if (dirflag == integer_one_node
  1160. && !flag_chkp_check_write)
  1161. return;
  1162. seq = NULL;
  1163. node = chkp_force_gimple_call_op (addr, &seq);
  1164. check = gimple_build_call (chkp_checku_fndecl, 2, node, bounds);
  1165. chkp_mark_stmt (check);
  1166. gimple_call_set_with_bounds (check, true);
  1167. gimple_set_location (check, location);
  1168. gimple_seq_add_stmt (&seq, check);
  1169. gsi_insert_seq_before (&iter, seq, GSI_SAME_STMT);
  1170. if (dump_file && (dump_flags & TDF_DETAILS))
  1171. {
  1172. gimple before = gsi_stmt (iter);
  1173. fprintf (dump_file, "Generated upper bound check for statement ");
  1174. print_gimple_stmt (dump_file, before, 0, TDF_VOPS|TDF_MEMSYMS);
  1175. fprintf (dump_file, " ");
  1176. print_gimple_stmt (dump_file, check, 0, TDF_VOPS|TDF_MEMSYMS);
  1177. }
  1178. }
  1179. /* Generate lower and upper bound checks for memory access
  1180. to memory slot [FIRST, LAST] againsr BOUNDS. Checks
  1181. are inserted before the position pointed by ITER.
  1182. DIRFLAG indicates whether memory access is load or store. */
  1183. void
  1184. chkp_check_mem_access (tree first, tree last, tree bounds,
  1185. gimple_stmt_iterator iter,
  1186. location_t location,
  1187. tree dirflag)
  1188. {
  1189. chkp_check_lower (first, bounds, iter, location, dirflag);
  1190. chkp_check_upper (last, bounds, iter, location, dirflag);
  1191. }
  1192. /* Replace call to _bnd_chk_* pointed by GSI with
  1193. bndcu and bndcl calls. DIRFLAG determines whether
  1194. check is for read or write. */
  1195. void
  1196. chkp_replace_address_check_builtin (gimple_stmt_iterator *gsi,
  1197. tree dirflag)
  1198. {
  1199. gimple_stmt_iterator call_iter = *gsi;
  1200. gimple call = gsi_stmt (*gsi);
  1201. tree fndecl = gimple_call_fndecl (call);
  1202. tree addr = gimple_call_arg (call, 0);
  1203. tree bounds = chkp_find_bounds (addr, gsi);
  1204. if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
  1205. || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
  1206. chkp_check_lower (addr, bounds, *gsi, gimple_location (call), dirflag);
  1207. if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS)
  1208. chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
  1209. if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS)
  1210. {
  1211. tree size = gimple_call_arg (call, 1);
  1212. addr = fold_build_pointer_plus (addr, size);
  1213. addr = fold_build_pointer_plus_hwi (addr, -1);
  1214. chkp_check_upper (addr, bounds, *gsi, gimple_location (call), dirflag);
  1215. }
  1216. gsi_remove (&call_iter, true);
  1217. }
  1218. /* Replace call to _bnd_get_ptr_* pointed by GSI with
  1219. corresponding bounds extract call. */
  1220. void
  1221. chkp_replace_extract_builtin (gimple_stmt_iterator *gsi)
  1222. {
  1223. gimple call = gsi_stmt (*gsi);
  1224. tree fndecl = gimple_call_fndecl (call);
  1225. tree addr = gimple_call_arg (call, 0);
  1226. tree bounds = chkp_find_bounds (addr, gsi);
  1227. gimple extract;
  1228. if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND)
  1229. fndecl = chkp_extract_lower_fndecl;
  1230. else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND)
  1231. fndecl = chkp_extract_upper_fndecl;
  1232. else
  1233. gcc_unreachable ();
  1234. extract = gimple_build_call (fndecl, 1, bounds);
  1235. gimple_call_set_lhs (extract, gimple_call_lhs (call));
  1236. chkp_mark_stmt (extract);
  1237. gsi_replace (gsi, extract, false);
  1238. }
  1239. /* Return COMPONENT_REF accessing FIELD in OBJ. */
  1240. static tree
  1241. chkp_build_component_ref (tree obj, tree field)
  1242. {
  1243. tree res;
  1244. /* If object is TMR then we do not use component_ref but
  1245. add offset instead. We need it to be able to get addr
  1246. of the reasult later. */
  1247. if (TREE_CODE (obj) == TARGET_MEM_REF)
  1248. {
  1249. tree offs = TMR_OFFSET (obj);
  1250. offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
  1251. offs, DECL_FIELD_OFFSET (field));
  1252. gcc_assert (offs);
  1253. res = copy_node (obj);
  1254. TREE_TYPE (res) = TREE_TYPE (field);
  1255. TMR_OFFSET (res) = offs;
  1256. }
  1257. else
  1258. res = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL_TREE);
  1259. return res;
  1260. }
  1261. /* Return ARRAY_REF for array ARR and index IDX with
  1262. specified element type ETYPE and element size ESIZE. */
  1263. static tree
  1264. chkp_build_array_ref (tree arr, tree etype, tree esize,
  1265. unsigned HOST_WIDE_INT idx)
  1266. {
  1267. tree index = build_int_cst (size_type_node, idx);
  1268. tree res;
  1269. /* If object is TMR then we do not use array_ref but
  1270. add offset instead. We need it to be able to get addr
  1271. of the reasult later. */
  1272. if (TREE_CODE (arr) == TARGET_MEM_REF)
  1273. {
  1274. tree offs = TMR_OFFSET (arr);
  1275. esize = fold_binary_to_constant (MULT_EXPR, TREE_TYPE (esize),
  1276. esize, index);
  1277. gcc_assert(esize);
  1278. offs = fold_binary_to_constant (PLUS_EXPR, TREE_TYPE (offs),
  1279. offs, esize);
  1280. gcc_assert (offs);
  1281. res = copy_node (arr);
  1282. TREE_TYPE (res) = etype;
  1283. TMR_OFFSET (res) = offs;
  1284. }
  1285. else
  1286. res = build4 (ARRAY_REF, etype, arr, index, NULL_TREE, NULL_TREE);
  1287. return res;
  1288. }
  1289. /* Helper function for chkp_add_bounds_to_call_stmt.
  1290. Fill ALL_BOUNDS output array with created bounds.
  1291. OFFS is used for recursive calls and holds basic
  1292. offset of TYPE in outer structure in bits.
  1293. ITER points a position where bounds are searched.
  1294. ALL_BOUNDS[i] is filled with elem bounds if there
  1295. is a field in TYPE which has pointer type and offset
  1296. equal to i * POINTER_SIZE in bits. */
  1297. static void
  1298. chkp_find_bounds_for_elem (tree elem, tree *all_bounds,
  1299. HOST_WIDE_INT offs,
  1300. gimple_stmt_iterator *iter)
  1301. {
  1302. tree type = TREE_TYPE (elem);
  1303. if (BOUNDED_TYPE_P (type))
  1304. {
  1305. if (!all_bounds[offs / POINTER_SIZE])
  1306. {
  1307. tree temp = make_temp_ssa_name (type, gimple_build_nop (), "");
  1308. gimple assign = gimple_build_assign (temp, elem);
  1309. gimple_stmt_iterator gsi;
  1310. gsi_insert_before (iter, assign, GSI_SAME_STMT);
  1311. gsi = gsi_for_stmt (assign);
  1312. all_bounds[offs / POINTER_SIZE] = chkp_find_bounds (temp, &gsi);
  1313. }
  1314. }
  1315. else if (RECORD_OR_UNION_TYPE_P (type))
  1316. {
  1317. tree field;
  1318. for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
  1319. if (TREE_CODE (field) == FIELD_DECL)
  1320. {
  1321. tree base = unshare_expr (elem);
  1322. tree field_ref = chkp_build_component_ref (base, field);
  1323. HOST_WIDE_INT field_offs
  1324. = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
  1325. if (DECL_FIELD_OFFSET (field))
  1326. field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
  1327. chkp_find_bounds_for_elem (field_ref, all_bounds,
  1328. offs + field_offs, iter);
  1329. }
  1330. }
  1331. else if (TREE_CODE (type) == ARRAY_TYPE)
  1332. {
  1333. tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
  1334. tree etype = TREE_TYPE (type);
  1335. HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
  1336. unsigned HOST_WIDE_INT cur;
  1337. if (!maxval || integer_minus_onep (maxval))
  1338. return;
  1339. for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
  1340. {
  1341. tree base = unshare_expr (elem);
  1342. tree arr_elem = chkp_build_array_ref (base, etype,
  1343. TYPE_SIZE (etype),
  1344. cur);
  1345. chkp_find_bounds_for_elem (arr_elem, all_bounds, offs + cur * esize,
  1346. iter);
  1347. }
  1348. }
  1349. }
  1350. /* Fill HAVE_BOUND output bitmap with information about
  1351. bounds requred for object of type TYPE.
  1352. OFFS is used for recursive calls and holds basic
  1353. offset of TYPE in outer structure in bits.
  1354. HAVE_BOUND[i] is set to 1 if there is a field
  1355. in TYPE which has pointer type and offset
  1356. equal to i * POINTER_SIZE - OFFS in bits. */
  1357. void
  1358. chkp_find_bound_slots_1 (const_tree type, bitmap have_bound,
  1359. HOST_WIDE_INT offs)
  1360. {
  1361. if (BOUNDED_TYPE_P (type))
  1362. bitmap_set_bit (have_bound, offs / POINTER_SIZE);
  1363. else if (RECORD_OR_UNION_TYPE_P (type))
  1364. {
  1365. tree field;
  1366. for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
  1367. if (TREE_CODE (field) == FIELD_DECL)
  1368. {
  1369. HOST_WIDE_INT field_offs
  1370. = TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
  1371. if (DECL_FIELD_OFFSET (field))
  1372. field_offs += TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) * 8;
  1373. chkp_find_bound_slots_1 (TREE_TYPE (field), have_bound,
  1374. offs + field_offs);
  1375. }
  1376. }
  1377. else if (TREE_CODE (type) == ARRAY_TYPE)
  1378. {
  1379. tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
  1380. tree etype = TREE_TYPE (type);
  1381. HOST_WIDE_INT esize = TREE_INT_CST_LOW (TYPE_SIZE (etype));
  1382. unsigned HOST_WIDE_INT cur;
  1383. if (!maxval
  1384. || TREE_CODE (maxval) != INTEGER_CST
  1385. || integer_minus_onep (maxval))
  1386. return;
  1387. for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
  1388. chkp_find_bound_slots_1 (etype, have_bound, offs + cur * esize);
  1389. }
  1390. }
  1391. /* Fill bitmap RES with information about bounds for
  1392. type TYPE. See chkp_find_bound_slots_1 for more
  1393. details. */
  1394. void
  1395. chkp_find_bound_slots (const_tree type, bitmap res)
  1396. {
  1397. bitmap_clear (res);
  1398. chkp_find_bound_slots_1 (type, res, 0);
  1399. }
  1400. /* Return 1 if call to FNDECL should be instrumented
  1401. and 0 otherwise. */
  1402. static bool
  1403. chkp_instrument_normal_builtin (tree fndecl)
  1404. {
  1405. switch (DECL_FUNCTION_CODE (fndecl))
  1406. {
  1407. case BUILT_IN_STRLEN:
  1408. case BUILT_IN_STRCPY:
  1409. case BUILT_IN_STRNCPY:
  1410. case BUILT_IN_STPCPY:
  1411. case BUILT_IN_STPNCPY:
  1412. case BUILT_IN_STRCAT:
  1413. case BUILT_IN_STRNCAT:
  1414. case BUILT_IN_MEMCPY:
  1415. case BUILT_IN_MEMPCPY:
  1416. case BUILT_IN_MEMSET:
  1417. case BUILT_IN_MEMMOVE:
  1418. case BUILT_IN_BZERO:
  1419. case BUILT_IN_STRCMP:
  1420. case BUILT_IN_STRNCMP:
  1421. case BUILT_IN_BCMP:
  1422. case BUILT_IN_MEMCMP:
  1423. case BUILT_IN_MEMCPY_CHK:
  1424. case BUILT_IN_MEMPCPY_CHK:
  1425. case BUILT_IN_MEMMOVE_CHK:
  1426. case BUILT_IN_MEMSET_CHK:
  1427. case BUILT_IN_STRCPY_CHK:
  1428. case BUILT_IN_STRNCPY_CHK:
  1429. case BUILT_IN_STPCPY_CHK:
  1430. case BUILT_IN_STPNCPY_CHK:
  1431. case BUILT_IN_STRCAT_CHK:
  1432. case BUILT_IN_STRNCAT_CHK:
  1433. case BUILT_IN_MALLOC:
  1434. case BUILT_IN_CALLOC:
  1435. case BUILT_IN_REALLOC:
  1436. return 1;
  1437. default:
  1438. return 0;
  1439. }
  1440. }
  1441. /* Add bound arguments to call statement pointed by GSI.
  1442. Also performs a replacement of user checker builtins calls
  1443. with internal ones. */
  1444. static void
  1445. chkp_add_bounds_to_call_stmt (gimple_stmt_iterator *gsi)
  1446. {
  1447. gcall *call = as_a <gcall *> (gsi_stmt (*gsi));
  1448. unsigned arg_no = 0;
  1449. tree fndecl = gimple_call_fndecl (call);
  1450. tree fntype;
  1451. tree first_formal_arg;
  1452. tree arg;
  1453. bool use_fntype = false;
  1454. tree op;
  1455. ssa_op_iter iter;
  1456. gcall *new_call;
  1457. /* Do nothing for internal functions. */
  1458. if (gimple_call_internal_p (call))
  1459. return;
  1460. fntype = TREE_TYPE (TREE_TYPE (gimple_call_fn (call)));
  1461. /* Do nothing if back-end builtin is called. */
  1462. if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
  1463. return;
  1464. /* Do nothing for some middle-end builtins. */
  1465. if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
  1466. && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_OBJECT_SIZE)
  1467. return;
  1468. /* Do nothing for calls to not instrumentable functions. */
  1469. if (fndecl && !chkp_instrumentable_p (fndecl))
  1470. return;
  1471. /* Ignore CHKP_INIT_PTR_BOUNDS, CHKP_NULL_PTR_BOUNDS
  1472. and CHKP_COPY_PTR_BOUNDS. */
  1473. if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
  1474. && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS
  1475. || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS
  1476. || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS
  1477. || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS))
  1478. return;
  1479. /* Check user builtins are replaced with checks. */
  1480. if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
  1481. && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_LBOUNDS
  1482. || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_UBOUNDS
  1483. || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_CHECK_PTR_BOUNDS))
  1484. {
  1485. chkp_replace_address_check_builtin (gsi, integer_minus_one_node);
  1486. return;
  1487. }
  1488. /* Check user builtins are replaced with bound extract. */
  1489. if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
  1490. && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_LBOUND
  1491. || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_GET_PTR_UBOUND))
  1492. {
  1493. chkp_replace_extract_builtin (gsi);
  1494. return;
  1495. }
  1496. /* BUILT_IN_CHKP_NARROW_PTR_BOUNDS call is replaced with
  1497. target narrow bounds call. */
  1498. if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
  1499. && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
  1500. {
  1501. tree arg = gimple_call_arg (call, 1);
  1502. tree bounds = chkp_find_bounds (arg, gsi);
  1503. gimple_call_set_fndecl (call, chkp_narrow_bounds_fndecl);
  1504. gimple_call_set_arg (call, 1, bounds);
  1505. update_stmt (call);
  1506. return;
  1507. }
  1508. /* BUILT_IN_CHKP_STORE_PTR_BOUNDS call is replaced with
  1509. bndstx call. */
  1510. if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
  1511. && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_STORE_PTR_BOUNDS)
  1512. {
  1513. tree addr = gimple_call_arg (call, 0);
  1514. tree ptr = gimple_call_arg (call, 1);
  1515. tree bounds = chkp_find_bounds (ptr, gsi);
  1516. gimple_stmt_iterator iter = gsi_for_stmt (call);
  1517. chkp_build_bndstx (addr, ptr, bounds, gsi);
  1518. gsi_remove (&iter, true);
  1519. return;
  1520. }
  1521. if (!flag_chkp_instrument_calls)
  1522. return;
  1523. /* We instrument only some subset of builtins. We also instrument
  1524. builtin calls to be inlined. */
  1525. if (fndecl
  1526. && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
  1527. && !chkp_instrument_normal_builtin (fndecl))
  1528. {
  1529. if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)))
  1530. return;
  1531. struct cgraph_node *clone = chkp_maybe_create_clone (fndecl);
  1532. if (!clone
  1533. || !gimple_has_body_p (clone->decl))
  1534. return;
  1535. }
  1536. /* If function decl is available then use it for
  1537. formal arguments list. Otherwise use function type. */
  1538. if (fndecl && DECL_ARGUMENTS (fndecl))
  1539. first_formal_arg = DECL_ARGUMENTS (fndecl);
  1540. else
  1541. {
  1542. first_formal_arg = TYPE_ARG_TYPES (fntype);
  1543. use_fntype = true;
  1544. }
  1545. /* Fill vector of new call args. */
  1546. vec<tree> new_args = vNULL;
  1547. new_args.create (gimple_call_num_args (call));
  1548. arg = first_formal_arg;
  1549. for (arg_no = 0; arg_no < gimple_call_num_args (call); arg_no++)
  1550. {
  1551. tree call_arg = gimple_call_arg (call, arg_no);
  1552. tree type;
  1553. /* Get arg type using formal argument description
  1554. or actual argument type. */
  1555. if (arg)
  1556. if (use_fntype)
  1557. if (TREE_VALUE (arg) != void_type_node)
  1558. {
  1559. type = TREE_VALUE (arg);
  1560. arg = TREE_CHAIN (arg);
  1561. }
  1562. else
  1563. type = TREE_TYPE (call_arg);
  1564. else
  1565. {
  1566. type = TREE_TYPE (arg);
  1567. arg = TREE_CHAIN (arg);
  1568. }
  1569. else
  1570. type = TREE_TYPE (call_arg);
  1571. new_args.safe_push (call_arg);
  1572. if (BOUNDED_TYPE_P (type)
  1573. || pass_by_reference (NULL, TYPE_MODE (type), type, true))
  1574. new_args.safe_push (chkp_find_bounds (call_arg, gsi));
  1575. else if (chkp_type_has_pointer (type))
  1576. {
  1577. HOST_WIDE_INT max_bounds
  1578. = TREE_INT_CST_LOW (TYPE_SIZE (type)) / POINTER_SIZE;
  1579. tree *all_bounds = (tree *)xmalloc (sizeof (tree) * max_bounds);
  1580. HOST_WIDE_INT bnd_no;
  1581. memset (all_bounds, 0, sizeof (tree) * max_bounds);
  1582. chkp_find_bounds_for_elem (call_arg, all_bounds, 0, gsi);
  1583. for (bnd_no = 0; bnd_no < max_bounds; bnd_no++)
  1584. if (all_bounds[bnd_no])
  1585. new_args.safe_push (all_bounds[bnd_no]);
  1586. free (all_bounds);
  1587. }
  1588. }
  1589. if (new_args.length () == gimple_call_num_args (call))
  1590. new_call = call;
  1591. else
  1592. {
  1593. new_call = gimple_build_call_vec (gimple_op (call, 1), new_args);
  1594. gimple_call_set_lhs (new_call, gimple_call_lhs (call));
  1595. gimple_call_copy_flags (new_call, call);
  1596. gimple_call_set_chain (new_call, gimple_call_chain (call));
  1597. }
  1598. new_args.release ();
  1599. /* For direct calls fndecl is replaced with instrumented version. */
  1600. if (fndecl)
  1601. {
  1602. tree new_decl = chkp_maybe_create_clone (fndecl)->decl;
  1603. gimple_call_set_fndecl (new_call, new_decl);
  1604. gimple_call_set_fntype (new_call, TREE_TYPE (new_decl));
  1605. }
  1606. /* For indirect call we should fix function pointer type if
  1607. pass some bounds. */
  1608. else if (new_call != call)
  1609. {
  1610. tree type = gimple_call_fntype (call);
  1611. type = chkp_copy_function_type_adding_bounds (type);
  1612. gimple_call_set_fntype (new_call, type);
  1613. }
  1614. /* replace old call statement with the new one. */
  1615. if (call != new_call)
  1616. {
  1617. FOR_EACH_SSA_TREE_OPERAND (op, call, iter, SSA_OP_ALL_DEFS)
  1618. {
  1619. SSA_NAME_DEF_STMT (op) = new_call;
  1620. }
  1621. gsi_replace (gsi, new_call, true);
  1622. }
  1623. else
  1624. update_stmt (new_call);
  1625. gimple_call_set_with_bounds (new_call, true);
  1626. }
  1627. /* Return constant static bounds var with specified bounds LB and UB.
  1628. If such var does not exists then new var is created with specified NAME. */
  1629. static tree
  1630. chkp_make_static_const_bounds (HOST_WIDE_INT lb,
  1631. HOST_WIDE_INT ub,
  1632. const char *name)
  1633. {
  1634. tree id = get_identifier (name);
  1635. tree var;
  1636. varpool_node *node;
  1637. symtab_node *snode;
  1638. var = build_decl (UNKNOWN_LOCATION, VAR_DECL, id,
  1639. pointer_bounds_type_node);
  1640. TREE_STATIC (var) = 1;
  1641. TREE_PUBLIC (var) = 1;
  1642. /* With LTO we may have constant bounds already in varpool.
  1643. Try to find it. */
  1644. if ((snode = symtab_node::get_for_asmname (DECL_ASSEMBLER_NAME (var))))
  1645. {
  1646. /* We don't allow this symbol usage for non bounds. */
  1647. if (snode->type != SYMTAB_VARIABLE
  1648. || !POINTER_BOUNDS_P (snode->decl))
  1649. sorry ("-fcheck-pointer-bounds requires '%s' "
  1650. "name for internal usage",
  1651. IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (var)));
  1652. return snode->decl;
  1653. }
  1654. TREE_USED (var) = 1;
  1655. TREE_READONLY (var) = 1;
  1656. TREE_ADDRESSABLE (var) = 0;
  1657. DECL_ARTIFICIAL (var) = 1;
  1658. DECL_READ_P (var) = 1;
  1659. DECL_INITIAL (var) = targetm.chkp_make_bounds_constant (lb, ub);
  1660. make_decl_one_only (var, DECL_ASSEMBLER_NAME (var));
  1661. /* We may use this symbol during ctors generation in chkp_finish_file
  1662. when all symbols are emitted. Force output to avoid undefined
  1663. symbols in ctors. */
  1664. node = varpool_node::get_create (var);
  1665. node->force_output = 1;
  1666. varpool_node::finalize_decl (var);
  1667. return var;
  1668. }
  1669. /* Generate code to make bounds with specified lower bound LB and SIZE.
  1670. if AFTER is 1 then code is inserted after position pointed by ITER
  1671. otherwise code is inserted before position pointed by ITER.
  1672. If ITER is NULL then code is added to entry block. */
  1673. static tree
  1674. chkp_make_bounds (tree lb, tree size, gimple_stmt_iterator *iter, bool after)
  1675. {
  1676. gimple_seq seq;
  1677. gimple_stmt_iterator gsi;
  1678. gimple stmt;
  1679. tree bounds;
  1680. if (iter)
  1681. gsi = *iter;
  1682. else
  1683. gsi = gsi_start_bb (chkp_get_entry_block ());
  1684. seq = NULL;
  1685. lb = chkp_force_gimple_call_op (lb, &seq);
  1686. size = chkp_force_gimple_call_op (size, &seq);
  1687. stmt = gimple_build_call (chkp_bndmk_fndecl, 2, lb, size);
  1688. chkp_mark_stmt (stmt);
  1689. bounds = chkp_get_tmp_reg (stmt);
  1690. gimple_call_set_lhs (stmt, bounds);
  1691. gimple_seq_add_stmt (&seq, stmt);
  1692. if (iter && after)
  1693. gsi_insert_seq_after (&gsi, seq, GSI_SAME_STMT);
  1694. else
  1695. gsi_insert_seq_before (&gsi, seq, GSI_SAME_STMT);
  1696. if (dump_file && (dump_flags & TDF_DETAILS))
  1697. {
  1698. fprintf (dump_file, "Made bounds: ");
  1699. print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
  1700. if (iter)
  1701. {
  1702. fprintf (dump_file, " inserted before statement: ");
  1703. print_gimple_stmt (dump_file, gsi_stmt (*iter), 0, TDF_VOPS|TDF_MEMSYMS);
  1704. }
  1705. else
  1706. fprintf (dump_file, " at function entry\n");
  1707. }
  1708. /* update_stmt (stmt); */
  1709. return bounds;
  1710. }
  1711. /* Return var holding zero bounds. */
  1712. tree
  1713. chkp_get_zero_bounds_var (void)
  1714. {
  1715. if (!chkp_zero_bounds_var)
  1716. chkp_zero_bounds_var
  1717. = chkp_make_static_const_bounds (0, -1,
  1718. CHKP_ZERO_BOUNDS_VAR_NAME);
  1719. return chkp_zero_bounds_var;
  1720. }
  1721. /* Return var holding none bounds. */
  1722. tree
  1723. chkp_get_none_bounds_var (void)
  1724. {
  1725. if (!chkp_none_bounds_var)
  1726. chkp_none_bounds_var
  1727. = chkp_make_static_const_bounds (-1, 0,
  1728. CHKP_NONE_BOUNDS_VAR_NAME);
  1729. return chkp_none_bounds_var;
  1730. }
  1731. /* Return SSA_NAME used to represent zero bounds. */
  1732. static tree
  1733. chkp_get_zero_bounds (void)
  1734. {
  1735. if (zero_bounds)
  1736. return zero_bounds;
  1737. if (dump_file && (dump_flags & TDF_DETAILS))
  1738. fprintf (dump_file, "Creating zero bounds...");
  1739. if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
  1740. || flag_chkp_use_static_const_bounds > 0)
  1741. {
  1742. gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
  1743. gimple stmt;
  1744. zero_bounds = chkp_get_tmp_reg (gimple_build_nop ());
  1745. stmt = gimple_build_assign (zero_bounds, chkp_get_zero_bounds_var ());
  1746. gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
  1747. }
  1748. else
  1749. zero_bounds = chkp_make_bounds (integer_zero_node,
  1750. integer_zero_node,
  1751. NULL,
  1752. false);
  1753. return zero_bounds;
  1754. }
  1755. /* Return SSA_NAME used to represent none bounds. */
  1756. static tree
  1757. chkp_get_none_bounds (void)
  1758. {
  1759. if (none_bounds)
  1760. return none_bounds;
  1761. if (dump_file && (dump_flags & TDF_DETAILS))
  1762. fprintf (dump_file, "Creating none bounds...");
  1763. if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
  1764. || flag_chkp_use_static_const_bounds > 0)
  1765. {
  1766. gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
  1767. gimple stmt;
  1768. none_bounds = chkp_get_tmp_reg (gimple_build_nop ());
  1769. stmt = gimple_build_assign (none_bounds, chkp_get_none_bounds_var ());
  1770. gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
  1771. }
  1772. else
  1773. none_bounds = chkp_make_bounds (integer_minus_one_node,
  1774. build_int_cst (size_type_node, 2),
  1775. NULL,
  1776. false);
  1777. return none_bounds;
  1778. }
  1779. /* Return bounds to be used as a result of operation which
  1780. should not create poiunter (e.g. MULT_EXPR). */
  1781. static tree
  1782. chkp_get_invalid_op_bounds (void)
  1783. {
  1784. return chkp_get_zero_bounds ();
  1785. }
  1786. /* Return bounds to be used for loads of non-pointer values. */
  1787. static tree
  1788. chkp_get_nonpointer_load_bounds (void)
  1789. {
  1790. return chkp_get_zero_bounds ();
  1791. }
  1792. /* Return 1 if may use bndret call to get bounds for pointer
  1793. returned by CALL. */
  1794. static bool
  1795. chkp_call_returns_bounds_p (gcall *call)
  1796. {
  1797. if (gimple_call_internal_p (call))
  1798. return false;
  1799. if (gimple_call_builtin_p (call, BUILT_IN_CHKP_NARROW_PTR_BOUNDS)
  1800. || chkp_gimple_call_builtin_p (call, BUILT_IN_CHKP_NARROW))
  1801. return true;
  1802. if (gimple_call_with_bounds_p (call))
  1803. return true;
  1804. tree fndecl = gimple_call_fndecl (call);
  1805. if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
  1806. return false;
  1807. if (fndecl && !chkp_instrumentable_p (fndecl))
  1808. return false;
  1809. if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
  1810. {
  1811. if (chkp_instrument_normal_builtin (fndecl))
  1812. return true;
  1813. if (!lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)))
  1814. return false;
  1815. struct cgraph_node *clone = chkp_maybe_create_clone (fndecl);
  1816. return (clone && gimple_has_body_p (clone->decl));
  1817. }
  1818. return true;
  1819. }
  1820. /* Build bounds returned by CALL. */
  1821. static tree
  1822. chkp_build_returned_bound (gcall *call)
  1823. {
  1824. gimple_stmt_iterator gsi;
  1825. tree bounds;
  1826. gimple stmt;
  1827. tree fndecl = gimple_call_fndecl (call);
  1828. unsigned int retflags;
  1829. /* To avoid fixing alloca expands in targets we handle
  1830. it separately. */
  1831. if (fndecl
  1832. && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
  1833. && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA
  1834. || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_ALLOCA_WITH_ALIGN))
  1835. {
  1836. tree size = gimple_call_arg (call, 0);
  1837. tree lb = gimple_call_lhs (call);
  1838. gimple_stmt_iterator iter = gsi_for_stmt (call);
  1839. bounds = chkp_make_bounds (lb, size, &iter, true);
  1840. }
  1841. /* We know bounds returned by set_bounds builtin call. */
  1842. else if (fndecl
  1843. && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
  1844. && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_SET_PTR_BOUNDS)
  1845. {
  1846. tree lb = gimple_call_arg (call, 0);
  1847. tree size = gimple_call_arg (call, 1);
  1848. gimple_stmt_iterator iter = gsi_for_stmt (call);
  1849. bounds = chkp_make_bounds (lb, size, &iter, true);
  1850. }
  1851. /* Detect bounds initialization calls. */
  1852. else if (fndecl
  1853. && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
  1854. && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_INIT_PTR_BOUNDS)
  1855. bounds = chkp_get_zero_bounds ();
  1856. /* Detect bounds nullification calls. */
  1857. else if (fndecl
  1858. && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
  1859. && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_NULL_PTR_BOUNDS)
  1860. bounds = chkp_get_none_bounds ();
  1861. /* Detect bounds copy calls. */
  1862. else if (fndecl
  1863. && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
  1864. && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CHKP_COPY_PTR_BOUNDS)
  1865. {
  1866. gimple_stmt_iterator iter = gsi_for_stmt (call);
  1867. bounds = chkp_find_bounds (gimple_call_arg (call, 1), &iter);
  1868. }
  1869. /* Do not use retbnd when returned bounds are equal to some
  1870. of passed bounds. */
  1871. else if (((retflags = gimple_call_return_flags (call)) & ERF_RETURNS_ARG)
  1872. && (retflags & ERF_RETURN_ARG_MASK) < gimple_call_num_args (call))
  1873. {
  1874. gimple_stmt_iterator iter = gsi_for_stmt (call);
  1875. unsigned int retarg = retflags & ERF_RETURN_ARG_MASK, argno;
  1876. if (gimple_call_with_bounds_p (call))
  1877. {
  1878. for (argno = 0; argno < gimple_call_num_args (call); argno++)
  1879. if (!POINTER_BOUNDS_P (gimple_call_arg (call, argno)))
  1880. {
  1881. if (retarg)
  1882. retarg--;
  1883. else
  1884. break;
  1885. }
  1886. }
  1887. else
  1888. argno = retarg;
  1889. bounds = chkp_find_bounds (gimple_call_arg (call, argno), &iter);
  1890. }
  1891. else if (chkp_call_returns_bounds_p (call))
  1892. {
  1893. gcc_assert (TREE_CODE (gimple_call_lhs (call)) == SSA_NAME);
  1894. /* In general case build checker builtin call to
  1895. obtain returned bounds. */
  1896. stmt = gimple_build_call (chkp_ret_bnd_fndecl, 1,
  1897. gimple_call_lhs (call));
  1898. chkp_mark_stmt (stmt);
  1899. gsi = gsi_for_stmt (call);
  1900. gsi_insert_after (&gsi, stmt, GSI_SAME_STMT);
  1901. bounds = chkp_get_tmp_reg (stmt);
  1902. gimple_call_set_lhs (stmt, bounds);
  1903. update_stmt (stmt);
  1904. }
  1905. else
  1906. bounds = chkp_get_zero_bounds ();
  1907. if (dump_file && (dump_flags & TDF_DETAILS))
  1908. {
  1909. fprintf (dump_file, "Built returned bounds (");
  1910. print_generic_expr (dump_file, bounds, 0);
  1911. fprintf (dump_file, ") for call: ");
  1912. print_gimple_stmt (dump_file, call, 0, TDF_VOPS|TDF_MEMSYMS);
  1913. }
  1914. bounds = chkp_maybe_copy_and_register_bounds (gimple_call_lhs (call), bounds);
  1915. return bounds;
  1916. }
  1917. /* Return bounds used as returned by call
  1918. which produced SSA name VAL. */
  1919. gcall *
  1920. chkp_retbnd_call_by_val (tree val)
  1921. {
  1922. if (TREE_CODE (val) != SSA_NAME)
  1923. return NULL;
  1924. gcc_assert (gimple_code (SSA_NAME_DEF_STMT (val)) == GIMPLE_CALL);
  1925. imm_use_iterator use_iter;
  1926. use_operand_p use_p;
  1927. FOR_EACH_IMM_USE_FAST (use_p, use_iter, val)
  1928. if (gimple_code (USE_STMT (use_p)) == GIMPLE_CALL
  1929. && gimple_call_fndecl (USE_STMT (use_p)) == chkp_ret_bnd_fndecl)
  1930. return as_a <gcall *> (USE_STMT (use_p));
  1931. return NULL;
  1932. }
  1933. /* Check the next parameter for the given PARM is bounds
  1934. and return it's default SSA_NAME (create if required). */
  1935. static tree
  1936. chkp_get_next_bounds_parm (tree parm)
  1937. {
  1938. tree bounds = TREE_CHAIN (parm);
  1939. gcc_assert (POINTER_BOUNDS_P (bounds));
  1940. bounds = ssa_default_def (cfun, bounds);
  1941. if (!bounds)
  1942. {
  1943. bounds = make_ssa_name (TREE_CHAIN (parm), gimple_build_nop ());
  1944. set_ssa_default_def (cfun, TREE_CHAIN (parm), bounds);
  1945. }
  1946. return bounds;
  1947. }
  1948. /* Return bounds to be used for input argument PARM. */
  1949. static tree
  1950. chkp_get_bound_for_parm (tree parm)
  1951. {
  1952. tree decl = SSA_NAME_VAR (parm);
  1953. tree bounds;
  1954. gcc_assert (TREE_CODE (decl) == PARM_DECL);
  1955. bounds = chkp_get_registered_bounds (parm);
  1956. if (!bounds)
  1957. bounds = chkp_get_registered_bounds (decl);
  1958. if (!bounds)
  1959. {
  1960. tree orig_decl = cgraph_node::get (cfun->decl)->orig_decl;
  1961. /* For static chain param we return zero bounds
  1962. because currently we do not check dereferences
  1963. of this pointer. */
  1964. if (cfun->static_chain_decl == decl)
  1965. bounds = chkp_get_zero_bounds ();
  1966. /* If non instrumented runtime is used then it may be useful
  1967. to use zero bounds for input arguments of main
  1968. function. */
  1969. else if (flag_chkp_zero_input_bounds_for_main
  1970. && strcmp (IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (orig_decl)),
  1971. "main") == 0)
  1972. bounds = chkp_get_zero_bounds ();
  1973. else if (BOUNDED_P (parm))
  1974. {
  1975. bounds = chkp_get_next_bounds_parm (decl);
  1976. bounds = chkp_maybe_copy_and_register_bounds (decl, bounds);
  1977. if (dump_file && (dump_flags & TDF_DETAILS))
  1978. {
  1979. fprintf (dump_file, "Built arg bounds (");
  1980. print_generic_expr (dump_file, bounds, 0);
  1981. fprintf (dump_file, ") for arg: ");
  1982. print_node (dump_file, "", decl, 0);
  1983. }
  1984. }
  1985. else
  1986. bounds = chkp_get_zero_bounds ();
  1987. }
  1988. if (!chkp_get_registered_bounds (parm))
  1989. bounds = chkp_maybe_copy_and_register_bounds (parm, bounds);
  1990. if (dump_file && (dump_flags & TDF_DETAILS))
  1991. {
  1992. fprintf (dump_file, "Using bounds ");
  1993. print_generic_expr (dump_file, bounds, 0);
  1994. fprintf (dump_file, " for parm ");
  1995. print_generic_expr (dump_file, parm, 0);
  1996. fprintf (dump_file, " of type ");
  1997. print_generic_expr (dump_file, TREE_TYPE (parm), 0);
  1998. fprintf (dump_file, ".\n");
  1999. }
  2000. return bounds;
  2001. }
  2002. /* Build and return CALL_EXPR for bndstx builtin with specified
  2003. arguments. */
  2004. tree
  2005. chkp_build_bndldx_call (tree addr, tree ptr)
  2006. {
  2007. tree fn = build1 (ADDR_EXPR,
  2008. build_pointer_type (TREE_TYPE (chkp_bndldx_fndecl)),
  2009. chkp_bndldx_fndecl);
  2010. tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndldx_fndecl)),
  2011. fn, 2, addr, ptr);
  2012. CALL_WITH_BOUNDS_P (call) = true;
  2013. return call;
  2014. }
  2015. /* Insert code to load bounds for PTR located by ADDR.
  2016. Code is inserted after position pointed by GSI.
  2017. Loaded bounds are returned. */
  2018. static tree
  2019. chkp_build_bndldx (tree addr, tree ptr, gimple_stmt_iterator *gsi)
  2020. {
  2021. gimple_seq seq;
  2022. gimple stmt;
  2023. tree bounds;
  2024. seq = NULL;
  2025. addr = chkp_force_gimple_call_op (addr, &seq);
  2026. ptr = chkp_force_gimple_call_op (ptr, &seq);
  2027. stmt = gimple_build_call (chkp_bndldx_fndecl, 2, addr, ptr);
  2028. chkp_mark_stmt (stmt);
  2029. bounds = chkp_get_tmp_reg (stmt);
  2030. gimple_call_set_lhs (stmt, bounds);
  2031. gimple_seq_add_stmt (&seq, stmt);
  2032. gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
  2033. if (dump_file && (dump_flags & TDF_DETAILS))
  2034. {
  2035. fprintf (dump_file, "Generated bndldx for pointer ");
  2036. print_generic_expr (dump_file, ptr, 0);
  2037. fprintf (dump_file, ": ");
  2038. print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
  2039. }
  2040. return bounds;
  2041. }
  2042. /* Build and return CALL_EXPR for bndstx builtin with specified
  2043. arguments. */
  2044. tree
  2045. chkp_build_bndstx_call (tree addr, tree ptr, tree bounds)
  2046. {
  2047. tree fn = build1 (ADDR_EXPR,
  2048. build_pointer_type (TREE_TYPE (chkp_bndstx_fndecl)),
  2049. chkp_bndstx_fndecl);
  2050. tree call = build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndstx_fndecl)),
  2051. fn, 3, ptr, bounds, addr);
  2052. CALL_WITH_BOUNDS_P (call) = true;
  2053. return call;
  2054. }
  2055. /* Insert code to store BOUNDS for PTR stored by ADDR.
  2056. New statements are inserted after position pointed
  2057. by GSI. */
  2058. void
  2059. chkp_build_bndstx (tree addr, tree ptr, tree bounds,
  2060. gimple_stmt_iterator *gsi)
  2061. {
  2062. gimple_seq seq;
  2063. gimple stmt;
  2064. seq = NULL;
  2065. addr = chkp_force_gimple_call_op (addr, &seq);
  2066. ptr = chkp_force_gimple_call_op (ptr, &seq);
  2067. stmt = gimple_build_call (chkp_bndstx_fndecl, 3, ptr, bounds, addr);
  2068. chkp_mark_stmt (stmt);
  2069. gimple_call_set_with_bounds (stmt, true);
  2070. gimple_seq_add_stmt (&seq, stmt);
  2071. gsi_insert_seq_after (gsi, seq, GSI_CONTINUE_LINKING);
  2072. if (dump_file && (dump_flags & TDF_DETAILS))
  2073. {
  2074. fprintf (dump_file, "Generated bndstx for pointer store ");
  2075. print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_VOPS|TDF_MEMSYMS);
  2076. print_gimple_stmt (dump_file, stmt, 2, TDF_VOPS|TDF_MEMSYMS);
  2077. }
  2078. }
  2079. /* Compute bounds for pointer NODE which was assigned in
  2080. assignment statement ASSIGN. Return computed bounds. */
  2081. static tree
  2082. chkp_compute_bounds_for_assignment (tree node, gimple assign)
  2083. {
  2084. enum tree_code rhs_code = gimple_assign_rhs_code (assign);
  2085. tree rhs1 = gimple_assign_rhs1 (assign);
  2086. tree bounds = NULL_TREE;
  2087. gimple_stmt_iterator iter = gsi_for_stmt (assign);
  2088. if (dump_file && (dump_flags & TDF_DETAILS))
  2089. {
  2090. fprintf (dump_file, "Computing bounds for assignment: ");
  2091. print_gimple_stmt (dump_file, assign, 0, TDF_VOPS|TDF_MEMSYMS);
  2092. }
  2093. switch (rhs_code)
  2094. {
  2095. case MEM_REF:
  2096. case TARGET_MEM_REF:
  2097. case COMPONENT_REF:
  2098. case ARRAY_REF:
  2099. /* We need to load bounds from the bounds table. */
  2100. bounds = chkp_find_bounds_loaded (node, rhs1, &iter);
  2101. break;
  2102. case VAR_DECL:
  2103. case SSA_NAME:
  2104. case ADDR_EXPR:
  2105. case POINTER_PLUS_EXPR:
  2106. case NOP_EXPR:
  2107. case CONVERT_EXPR:
  2108. case INTEGER_CST:
  2109. /* Bounds are just propagated from RHS. */
  2110. bounds = chkp_find_bounds (rhs1, &iter);
  2111. break;
  2112. case VIEW_CONVERT_EXPR:
  2113. /* Bounds are just propagated from RHS. */
  2114. bounds = chkp_find_bounds (TREE_OPERAND (rhs1, 0), &iter);
  2115. break;
  2116. case PARM_DECL:
  2117. if (BOUNDED_P (rhs1))
  2118. {
  2119. /* We need to load bounds from the bounds table. */
  2120. bounds = chkp_build_bndldx (chkp_build_addr_expr (rhs1),
  2121. node, &iter);
  2122. TREE_ADDRESSABLE (rhs1) = 1;
  2123. }
  2124. else
  2125. bounds = chkp_get_nonpointer_load_bounds ();
  2126. break;
  2127. case MINUS_EXPR:
  2128. case PLUS_EXPR:
  2129. case BIT_AND_EXPR:
  2130. case BIT_IOR_EXPR:
  2131. case BIT_XOR_EXPR:
  2132. {
  2133. tree rhs2 = gimple_assign_rhs2 (assign);
  2134. tree bnd1 = chkp_find_bounds (rhs1, &iter);
  2135. tree bnd2 = chkp_find_bounds (rhs2, &iter);
  2136. /* First we try to check types of operands. If it
  2137. does not help then look at bound values.
  2138. If some bounds are incomplete and other are
  2139. not proven to be valid (i.e. also incomplete
  2140. or invalid because value is not pointer) then
  2141. resulting value is incomplete and will be
  2142. recomputed later in chkp_finish_incomplete_bounds. */
  2143. if (BOUNDED_P (rhs1)
  2144. && !BOUNDED_P (rhs2))
  2145. bounds = bnd1;
  2146. else if (BOUNDED_P (rhs2)
  2147. && !BOUNDED_P (rhs1)
  2148. && rhs_code != MINUS_EXPR)
  2149. bounds = bnd2;
  2150. else if (chkp_incomplete_bounds (bnd1))
  2151. if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR
  2152. && !chkp_incomplete_bounds (bnd2))
  2153. bounds = bnd2;
  2154. else
  2155. bounds = incomplete_bounds;
  2156. else if (chkp_incomplete_bounds (bnd2))
  2157. if (chkp_valid_bounds (bnd1)
  2158. && !chkp_incomplete_bounds (bnd1))
  2159. bounds = bnd1;
  2160. else
  2161. bounds = incomplete_bounds;
  2162. else if (!chkp_valid_bounds (bnd1))
  2163. if (chkp_valid_bounds (bnd2) && rhs_code != MINUS_EXPR)
  2164. bounds = bnd2;
  2165. else if (bnd2 == chkp_get_zero_bounds ())
  2166. bounds = bnd2;
  2167. else
  2168. bounds = bnd1;
  2169. else if (!chkp_valid_bounds (bnd2))
  2170. bounds = bnd1;
  2171. else
  2172. /* Seems both operands may have valid bounds
  2173. (e.g. pointer minus pointer). In such case
  2174. use default invalid op bounds. */
  2175. bounds = chkp_get_invalid_op_bounds ();
  2176. }
  2177. break;
  2178. case BIT_NOT_EXPR:
  2179. case NEGATE_EXPR:
  2180. case LSHIFT_EXPR:
  2181. case RSHIFT_EXPR:
  2182. case LROTATE_EXPR:
  2183. case RROTATE_EXPR:
  2184. case EQ_EXPR:
  2185. case NE_EXPR:
  2186. case LT_EXPR:
  2187. case LE_EXPR:
  2188. case GT_EXPR:
  2189. case GE_EXPR:
  2190. case MULT_EXPR:
  2191. case RDIV_EXPR:
  2192. case TRUNC_DIV_EXPR:
  2193. case FLOOR_DIV_EXPR:
  2194. case CEIL_DIV_EXPR:
  2195. case ROUND_DIV_EXPR:
  2196. case TRUNC_MOD_EXPR:
  2197. case FLOOR_MOD_EXPR:
  2198. case CEIL_MOD_EXPR:
  2199. case ROUND_MOD_EXPR:
  2200. case EXACT_DIV_EXPR:
  2201. case FIX_TRUNC_EXPR:
  2202. case FLOAT_EXPR:
  2203. case REALPART_EXPR:
  2204. case IMAGPART_EXPR:
  2205. /* No valid bounds may be produced by these exprs. */
  2206. bounds = chkp_get_invalid_op_bounds ();
  2207. break;
  2208. case COND_EXPR:
  2209. {
  2210. tree val1 = gimple_assign_rhs2 (assign);
  2211. tree val2 = gimple_assign_rhs3 (assign);
  2212. tree bnd1 = chkp_find_bounds (val1, &iter);
  2213. tree bnd2 = chkp_find_bounds (val2, &iter);
  2214. gimple stmt;
  2215. if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
  2216. bounds = incomplete_bounds;
  2217. else if (bnd1 == bnd2)
  2218. bounds = bnd1;
  2219. else
  2220. {
  2221. rhs1 = unshare_expr (rhs1);
  2222. bounds = chkp_get_tmp_reg (assign);
  2223. stmt = gimple_build_assign (bounds, COND_EXPR, rhs1, bnd1, bnd2);
  2224. gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
  2225. if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
  2226. chkp_mark_invalid_bounds (bounds);
  2227. }
  2228. }
  2229. break;
  2230. case MAX_EXPR:
  2231. case MIN_EXPR:
  2232. {
  2233. tree rhs2 = gimple_assign_rhs2 (assign);
  2234. tree bnd1 = chkp_find_bounds (rhs1, &iter);
  2235. tree bnd2 = chkp_find_bounds (rhs2, &iter);
  2236. if (chkp_incomplete_bounds (bnd1) || chkp_incomplete_bounds (bnd2))
  2237. bounds = incomplete_bounds;
  2238. else if (bnd1 == bnd2)
  2239. bounds = bnd1;
  2240. else
  2241. {
  2242. gimple stmt;
  2243. tree cond = build2 (rhs_code == MAX_EXPR ? GT_EXPR : LT_EXPR,
  2244. boolean_type_node, rhs1, rhs2);
  2245. bounds = chkp_get_tmp_reg (assign);
  2246. stmt = gimple_build_assign (bounds, COND_EXPR, cond, bnd1, bnd2);
  2247. gsi_insert_after (&iter, stmt, GSI_SAME_STMT);
  2248. if (!chkp_valid_bounds (bnd1) && !chkp_valid_bounds (bnd2))
  2249. chkp_mark_invalid_bounds (bounds);
  2250. }
  2251. }
  2252. break;
  2253. default:
  2254. bounds = chkp_get_zero_bounds ();
  2255. warning (0, "pointer bounds were lost due to unexpected expression %s",
  2256. get_tree_code_name (rhs_code));
  2257. }
  2258. gcc_assert (bounds);
  2259. if (node)
  2260. bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
  2261. return bounds;
  2262. }
  2263. /* Compute bounds for ssa name NODE defined by DEF_STMT pointed by ITER.
  2264. There are just few statement codes allowed: NOP (for default ssa names),
  2265. ASSIGN, CALL, PHI, ASM.
  2266. Return computed bounds. */
  2267. static tree
  2268. chkp_get_bounds_by_definition (tree node, gimple def_stmt,
  2269. gphi_iterator *iter)
  2270. {
  2271. tree var, bounds;
  2272. enum gimple_code code = gimple_code (def_stmt);
  2273. gphi *stmt;
  2274. if (dump_file && (dump_flags & TDF_DETAILS))
  2275. {
  2276. fprintf (dump_file, "Searching for bounds for node: ");
  2277. print_generic_expr (dump_file, node, 0);
  2278. fprintf (dump_file, " using its definition: ");
  2279. print_gimple_stmt (dump_file, def_stmt, 0, TDF_VOPS|TDF_MEMSYMS);
  2280. }
  2281. switch (code)
  2282. {
  2283. case GIMPLE_NOP:
  2284. var = SSA_NAME_VAR (node);
  2285. switch (TREE_CODE (var))
  2286. {
  2287. case PARM_DECL:
  2288. bounds = chkp_get_bound_for_parm (node);
  2289. break;
  2290. case VAR_DECL:
  2291. /* For uninitialized pointers use none bounds. */
  2292. bounds = chkp_get_none_bounds ();
  2293. bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
  2294. break;
  2295. case RESULT_DECL:
  2296. {
  2297. tree base_type;
  2298. gcc_assert (TREE_CODE (TREE_TYPE (node)) == REFERENCE_TYPE);
  2299. base_type = TREE_TYPE (TREE_TYPE (node));
  2300. gcc_assert (TYPE_SIZE (base_type)
  2301. && TREE_CODE (TYPE_SIZE (base_type)) == INTEGER_CST
  2302. && tree_to_uhwi (TYPE_SIZE (base_type)) != 0);
  2303. bounds = chkp_make_bounds (node, TYPE_SIZE_UNIT (base_type),
  2304. NULL, false);
  2305. bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
  2306. }
  2307. break;
  2308. default:
  2309. if (dump_file && (dump_flags & TDF_DETAILS))
  2310. {
  2311. fprintf (dump_file, "Unexpected var with no definition\n");
  2312. print_generic_expr (dump_file, var, 0);
  2313. }
  2314. internal_error ("chkp_get_bounds_by_definition: Unexpected var of type %s",
  2315. get_tree_code_name (TREE_CODE (var)));
  2316. }
  2317. break;
  2318. case GIMPLE_ASSIGN:
  2319. bounds = chkp_compute_bounds_for_assignment (node, def_stmt);
  2320. break;
  2321. case GIMPLE_CALL:
  2322. bounds = chkp_build_returned_bound (as_a <gcall *> (def_stmt));
  2323. break;
  2324. case GIMPLE_PHI:
  2325. if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (node))
  2326. if (SSA_NAME_VAR (node))
  2327. var = chkp_get_bounds_var (SSA_NAME_VAR (node));
  2328. else
  2329. var = make_temp_ssa_name (pointer_bounds_type_node,
  2330. gimple_build_nop (),
  2331. CHKP_BOUND_TMP_NAME);
  2332. else
  2333. var = chkp_get_tmp_var ();
  2334. stmt = create_phi_node (var, gimple_bb (def_stmt));
  2335. bounds = gimple_phi_result (stmt);
  2336. *iter = gsi_for_phi (stmt);
  2337. bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
  2338. /* Created bounds do not have all phi args computed and
  2339. therefore we do not know if there is a valid source
  2340. of bounds for that node. Therefore we mark bounds
  2341. as incomplete and then recompute them when all phi
  2342. args are computed. */
  2343. chkp_register_incomplete_bounds (bounds, node);
  2344. break;
  2345. case GIMPLE_ASM:
  2346. bounds = chkp_get_zero_bounds ();
  2347. bounds = chkp_maybe_copy_and_register_bounds (node, bounds);
  2348. break;
  2349. default:
  2350. internal_error ("chkp_get_bounds_by_definition: Unexpected GIMPLE code %s",
  2351. gimple_code_name[code]);
  2352. }
  2353. return bounds;
  2354. }
  2355. /* Return CALL_EXPR for bndmk with specified LOWER_BOUND and SIZE. */
  2356. tree
  2357. chkp_build_make_bounds_call (tree lower_bound, tree size)
  2358. {
  2359. tree call = build1 (ADDR_EXPR,
  2360. build_pointer_type (TREE_TYPE (chkp_bndmk_fndecl)),
  2361. chkp_bndmk_fndecl);
  2362. return build_call_nary (TREE_TYPE (TREE_TYPE (chkp_bndmk_fndecl)),
  2363. call, 2, lower_bound, size);
  2364. }
  2365. /* Create static bounds var of specfified OBJ which is
  2366. is either VAR_DECL or string constant. */
  2367. static tree
  2368. chkp_make_static_bounds (tree obj)
  2369. {
  2370. static int string_id = 1;
  2371. static int var_id = 1;
  2372. tree *slot;
  2373. const char *var_name;
  2374. char *bnd_var_name;
  2375. tree bnd_var;
  2376. /* First check if we already have required var. */
  2377. if (chkp_static_var_bounds)
  2378. {
  2379. /* For vars we use assembler name as a key in
  2380. chkp_static_var_bounds map. It allows to
  2381. avoid duplicating bound vars for decls
  2382. sharing assembler name. */
  2383. if (TREE_CODE (obj) == VAR_DECL)
  2384. {
  2385. tree name = DECL_ASSEMBLER_NAME (obj);
  2386. slot = chkp_static_var_bounds->get (name);
  2387. if (slot)
  2388. return *slot;
  2389. }
  2390. else
  2391. {
  2392. slot = chkp_static_var_bounds->get (obj);
  2393. if (slot)
  2394. return *slot;
  2395. }
  2396. }
  2397. /* Build decl for bounds var. */
  2398. if (TREE_CODE (obj) == VAR_DECL)
  2399. {
  2400. if (DECL_IGNORED_P (obj))
  2401. {
  2402. bnd_var_name = (char *) xmalloc (strlen (CHKP_VAR_BOUNDS_PREFIX) + 10);
  2403. sprintf (bnd_var_name, "%s%d", CHKP_VAR_BOUNDS_PREFIX, var_id++);
  2404. }
  2405. else
  2406. {
  2407. var_name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (obj));
  2408. /* For hidden symbols we want to skip first '*' char. */
  2409. if (*var_name == '*')
  2410. var_name++;
  2411. bnd_var_name = (char *) xmalloc (strlen (var_name)
  2412. + strlen (CHKP_BOUNDS_OF_SYMBOL_PREFIX) + 1);
  2413. strcpy (bnd_var_name, CHKP_BOUNDS_OF_SYMBOL_PREFIX);
  2414. strcat (bnd_var_name, var_name);
  2415. }
  2416. bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
  2417. get_identifier (bnd_var_name),
  2418. pointer_bounds_type_node);
  2419. /* Address of the obj will be used as lower bound. */
  2420. TREE_ADDRESSABLE (obj) = 1;
  2421. }
  2422. else
  2423. {
  2424. bnd_var_name = (char *) xmalloc (strlen (CHKP_STRING_BOUNDS_PREFIX) + 10);
  2425. sprintf (bnd_var_name, "%s%d", CHKP_STRING_BOUNDS_PREFIX, string_id++);
  2426. bnd_var = build_decl (UNKNOWN_LOCATION, VAR_DECL,
  2427. get_identifier (bnd_var_name),
  2428. pointer_bounds_type_node);
  2429. }
  2430. TREE_PUBLIC (bnd_var) = 0;
  2431. TREE_USED (bnd_var) = 1;
  2432. TREE_READONLY (bnd_var) = 0;
  2433. TREE_STATIC (bnd_var) = 1;
  2434. TREE_ADDRESSABLE (bnd_var) = 0;
  2435. DECL_ARTIFICIAL (bnd_var) = 1;
  2436. DECL_COMMON (bnd_var) = 1;
  2437. DECL_COMDAT (bnd_var) = 1;
  2438. DECL_READ_P (bnd_var) = 1;
  2439. DECL_INITIAL (bnd_var) = chkp_build_addr_expr (obj);
  2440. /* Force output similar to constant bounds.
  2441. See chkp_make_static_const_bounds. */
  2442. varpool_node::get_create (bnd_var)->force_output = 1;
  2443. /* Mark symbol as requiring bounds initialization. */
  2444. varpool_node::get_create (bnd_var)->need_bounds_init = 1;
  2445. varpool_node::finalize_decl (bnd_var);
  2446. /* Add created var to the map to use it for other references
  2447. to obj. */
  2448. if (!chkp_static_var_bounds)
  2449. chkp_static_var_bounds = new hash_map<tree, tree>;
  2450. if (TREE_CODE (obj) == VAR_DECL)
  2451. {
  2452. tree name = DECL_ASSEMBLER_NAME (obj);
  2453. chkp_static_var_bounds->put (name, bnd_var);
  2454. }
  2455. else
  2456. chkp_static_var_bounds->put (obj, bnd_var);
  2457. return bnd_var;
  2458. }
  2459. /* When var has incomplete type we cannot get size to
  2460. compute its bounds. In such cases we use checker
  2461. builtin call which determines object size at runtime. */
  2462. static tree
  2463. chkp_generate_extern_var_bounds (tree var)
  2464. {
  2465. tree bounds, size_reloc, lb, size, max_size, cond;
  2466. gimple_stmt_iterator gsi;
  2467. gimple_seq seq = NULL;
  2468. gimple stmt;
  2469. /* If instrumentation is not enabled for vars having
  2470. incomplete type then just return zero bounds to avoid
  2471. checks for this var. */
  2472. if (!flag_chkp_incomplete_type)
  2473. return chkp_get_zero_bounds ();
  2474. if (dump_file && (dump_flags & TDF_DETAILS))
  2475. {
  2476. fprintf (dump_file, "Generating bounds for extern symbol '");
  2477. print_generic_expr (dump_file, var, 0);
  2478. fprintf (dump_file, "'\n");
  2479. }
  2480. stmt = gimple_build_call (chkp_sizeof_fndecl, 1, var);
  2481. size_reloc = create_tmp_reg (chkp_uintptr_type, CHKP_SIZE_TMP_NAME);
  2482. gimple_call_set_lhs (stmt, size_reloc);
  2483. gimple_seq_add_stmt (&seq, stmt);
  2484. lb = chkp_build_addr_expr (var);
  2485. size = make_ssa_name (chkp_get_size_tmp_var (), gimple_build_nop ());
  2486. if (flag_chkp_zero_dynamic_size_as_infinite)
  2487. {
  2488. /* We should check that size relocation was resolved.
  2489. If it was not then use maximum possible size for the var. */
  2490. max_size = build2 (MINUS_EXPR, chkp_uintptr_type, integer_zero_node,
  2491. fold_convert (chkp_uintptr_type, lb));
  2492. max_size = chkp_force_gimple_call_op (max_size, &seq);
  2493. cond = build2 (NE_EXPR, boolean_type_node,
  2494. size_reloc, integer_zero_node);
  2495. stmt = gimple_build_assign (size, COND_EXPR, cond, size_reloc, max_size);
  2496. gimple_seq_add_stmt (&seq, stmt);
  2497. }
  2498. else
  2499. {
  2500. stmt = gimple_build_assign (size, size_reloc);
  2501. gimple_seq_add_stmt (&seq, stmt);
  2502. }
  2503. gsi = gsi_start_bb (chkp_get_entry_block ());
  2504. gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
  2505. bounds = chkp_make_bounds (lb, size, &gsi, true);
  2506. return bounds;
  2507. }
  2508. /* Return 1 if TYPE has fields with zero size or fields
  2509. marked with chkp_variable_size attribute. */
  2510. bool
  2511. chkp_variable_size_type (tree type)
  2512. {
  2513. bool res = false;
  2514. tree field;
  2515. if (RECORD_OR_UNION_TYPE_P (type))
  2516. for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
  2517. {
  2518. if (TREE_CODE (field) == FIELD_DECL)
  2519. res = res
  2520. || lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
  2521. || chkp_variable_size_type (TREE_TYPE (field));
  2522. }
  2523. else
  2524. res = !TYPE_SIZE (type)
  2525. || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
  2526. || tree_to_uhwi (TYPE_SIZE (type)) == 0;
  2527. return res;
  2528. }
  2529. /* Compute and return bounds for address of DECL which is
  2530. one of VAR_DECL, PARM_DECL, RESULT_DECL. */
  2531. static tree
  2532. chkp_get_bounds_for_decl_addr (tree decl)
  2533. {
  2534. tree bounds;
  2535. gcc_assert (TREE_CODE (decl) == VAR_DECL
  2536. || TREE_CODE (decl) == PARM_DECL
  2537. || TREE_CODE (decl) == RESULT_DECL);
  2538. bounds = chkp_get_registered_addr_bounds (decl);
  2539. if (bounds)
  2540. return bounds;
  2541. if (dump_file && (dump_flags & TDF_DETAILS))
  2542. {
  2543. fprintf (dump_file, "Building bounds for address of decl ");
  2544. print_generic_expr (dump_file, decl, 0);
  2545. fprintf (dump_file, "\n");
  2546. }
  2547. /* Use zero bounds if size is unknown and checks for
  2548. unknown sizes are restricted. */
  2549. if ((!DECL_SIZE (decl)
  2550. || (chkp_variable_size_type (TREE_TYPE (decl))
  2551. && (TREE_STATIC (decl)
  2552. || DECL_EXTERNAL (decl)
  2553. || TREE_PUBLIC (decl))))
  2554. && !flag_chkp_incomplete_type)
  2555. return chkp_get_zero_bounds ();
  2556. if (flag_chkp_use_static_bounds
  2557. && TREE_CODE (decl) == VAR_DECL
  2558. && (TREE_STATIC (decl)
  2559. || DECL_EXTERNAL (decl)
  2560. || TREE_PUBLIC (decl))
  2561. && !DECL_THREAD_LOCAL_P (decl))
  2562. {
  2563. tree bnd_var = chkp_make_static_bounds (decl);
  2564. gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
  2565. gimple stmt;
  2566. bounds = chkp_get_tmp_reg (gimple_build_nop ());
  2567. stmt = gimple_build_assign (bounds, bnd_var);
  2568. gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
  2569. }
  2570. else if (!DECL_SIZE (decl)
  2571. || (chkp_variable_size_type (TREE_TYPE (decl))
  2572. && (TREE_STATIC (decl)
  2573. || DECL_EXTERNAL (decl)
  2574. || TREE_PUBLIC (decl))))
  2575. {
  2576. gcc_assert (TREE_CODE (decl) == VAR_DECL);
  2577. bounds = chkp_generate_extern_var_bounds (decl);
  2578. }
  2579. else
  2580. {
  2581. tree lb = chkp_build_addr_expr (decl);
  2582. bounds = chkp_make_bounds (lb, DECL_SIZE_UNIT (decl), NULL, false);
  2583. }
  2584. return bounds;
  2585. }
  2586. /* Compute and return bounds for constant string. */
  2587. static tree
  2588. chkp_get_bounds_for_string_cst (tree cst)
  2589. {
  2590. tree bounds;
  2591. tree lb;
  2592. tree size;
  2593. gcc_assert (TREE_CODE (cst) == STRING_CST);
  2594. bounds = chkp_get_registered_bounds (cst);
  2595. if (bounds)
  2596. return bounds;
  2597. if ((flag_chkp_use_static_bounds && flag_chkp_use_static_const_bounds)
  2598. || flag_chkp_use_static_const_bounds > 0)
  2599. {
  2600. tree bnd_var = chkp_make_static_bounds (cst);
  2601. gimple_stmt_iterator gsi = gsi_start_bb (chkp_get_entry_block ());
  2602. gimple stmt;
  2603. bounds = chkp_get_tmp_reg (gimple_build_nop ());
  2604. stmt = gimple_build_assign (bounds, bnd_var);
  2605. gsi_insert_before (&gsi, stmt, GSI_SAME_STMT);
  2606. }
  2607. else
  2608. {
  2609. lb = chkp_build_addr_expr (cst);
  2610. size = build_int_cst (chkp_uintptr_type, TREE_STRING_LENGTH (cst));
  2611. bounds = chkp_make_bounds (lb, size, NULL, false);
  2612. }
  2613. bounds = chkp_maybe_copy_and_register_bounds (cst, bounds);
  2614. return bounds;
  2615. }
  2616. /* Generate code to instersect bounds BOUNDS1 and BOUNDS2 and
  2617. return the result. if ITER is not NULL then Code is inserted
  2618. before position pointed by ITER. Otherwise code is added to
  2619. entry block. */
  2620. static tree
  2621. chkp_intersect_bounds (tree bounds1, tree bounds2, gimple_stmt_iterator *iter)
  2622. {
  2623. if (!bounds1 || bounds1 == chkp_get_zero_bounds ())
  2624. return bounds2 ? bounds2 : bounds1;
  2625. else if (!bounds2 || bounds2 == chkp_get_zero_bounds ())
  2626. return bounds1;
  2627. else
  2628. {
  2629. gimple_seq seq;
  2630. gimple stmt;
  2631. tree bounds;
  2632. seq = NULL;
  2633. stmt = gimple_build_call (chkp_intersect_fndecl, 2, bounds1, bounds2);
  2634. chkp_mark_stmt (stmt);
  2635. bounds = chkp_get_tmp_reg (stmt);
  2636. gimple_call_set_lhs (stmt, bounds);
  2637. gimple_seq_add_stmt (&seq, stmt);
  2638. /* We are probably doing narrowing for constant expression.
  2639. In such case iter may be undefined. */
  2640. if (!iter)
  2641. {
  2642. gimple_stmt_iterator gsi = gsi_last_bb (chkp_get_entry_block ());
  2643. iter = &gsi;
  2644. gsi_insert_seq_after (iter, seq, GSI_SAME_STMT);
  2645. }
  2646. else
  2647. gsi_insert_seq_before (iter, seq, GSI_SAME_STMT);
  2648. if (dump_file && (dump_flags & TDF_DETAILS))
  2649. {
  2650. fprintf (dump_file, "Bounds intersection: ");
  2651. print_gimple_stmt (dump_file, stmt, 0, TDF_VOPS|TDF_MEMSYMS);
  2652. fprintf (dump_file, " inserted before statement: ");
  2653. print_gimple_stmt (dump_file, gsi_stmt (*iter), 0,
  2654. TDF_VOPS|TDF_MEMSYMS);
  2655. }
  2656. return bounds;
  2657. }
  2658. }
  2659. /* Return 1 if we are allowed to narrow bounds for addressed FIELD
  2660. and 0 othersize. */
  2661. static bool
  2662. chkp_may_narrow_to_field (tree field)
  2663. {
  2664. return DECL_SIZE (field) && TREE_CODE (DECL_SIZE (field)) == INTEGER_CST
  2665. && tree_to_uhwi (DECL_SIZE (field)) != 0
  2666. && (!DECL_FIELD_OFFSET (field)
  2667. || TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST)
  2668. && (!DECL_FIELD_BIT_OFFSET (field)
  2669. || TREE_CODE (DECL_FIELD_BIT_OFFSET (field)) == INTEGER_CST)
  2670. && !lookup_attribute ("bnd_variable_size", DECL_ATTRIBUTES (field))
  2671. && !chkp_variable_size_type (TREE_TYPE (field));
  2672. }
  2673. /* Return 1 if bounds for FIELD should be narrowed to
  2674. field's own size. */
  2675. static bool
  2676. chkp_narrow_bounds_for_field (tree field)
  2677. {
  2678. HOST_WIDE_INT offs;
  2679. HOST_WIDE_INT bit_offs;
  2680. if (!chkp_may_narrow_to_field (field))
  2681. return false;
  2682. /* Accesse to compiler generated fields should not cause
  2683. bounds narrowing. */
  2684. if (DECL_ARTIFICIAL (field))
  2685. return false;
  2686. offs = tree_to_uhwi (DECL_FIELD_OFFSET (field));
  2687. bit_offs = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
  2688. return (flag_chkp_narrow_bounds
  2689. && (flag_chkp_first_field_has_own_bounds
  2690. || offs
  2691. || bit_offs));
  2692. }
  2693. /* Perform narrowing for BOUNDS using bounds computed for field
  2694. access COMPONENT. ITER meaning is the same as for
  2695. chkp_intersect_bounds. */
  2696. static tree
  2697. chkp_narrow_bounds_to_field (tree bounds, tree component,
  2698. gimple_stmt_iterator *iter)
  2699. {
  2700. tree field = TREE_OPERAND (component, 1);
  2701. tree size = DECL_SIZE_UNIT (field);
  2702. tree field_ptr = chkp_build_addr_expr (component);
  2703. tree field_bounds;
  2704. field_bounds = chkp_make_bounds (field_ptr, size, iter, false);
  2705. return chkp_intersect_bounds (field_bounds, bounds, iter);
  2706. }
  2707. /* Parse field or array access NODE.
  2708. PTR ouput parameter holds a pointer to the outermost
  2709. object.
  2710. BITFIELD output parameter is set to 1 if bitfield is
  2711. accessed and to 0 otherwise. If it is 1 then ELT holds
  2712. outer component for accessed bit field.
  2713. SAFE outer parameter is set to 1 if access is safe and
  2714. checks are not required.
  2715. BOUNDS outer parameter holds bounds to be used to check
  2716. access (may be NULL).
  2717. If INNERMOST_BOUNDS is 1 then try to narrow bounds to the
  2718. innermost accessed component. */
  2719. static void
  2720. chkp_parse_array_and_component_ref (tree node, tree *ptr,
  2721. tree *elt, bool *safe,
  2722. bool *bitfield,
  2723. tree *bounds,
  2724. gimple_stmt_iterator *iter,
  2725. bool innermost_bounds)
  2726. {
  2727. tree comp_to_narrow = NULL_TREE;
  2728. tree last_comp = NULL_TREE;
  2729. bool array_ref_found = false;
  2730. tree *nodes;
  2731. tree var;
  2732. int len;
  2733. int i;
  2734. /* Compute tree height for expression. */
  2735. var = node;
  2736. len = 1;
  2737. while (TREE_CODE (var) == COMPONENT_REF
  2738. || TREE_CODE (var) == ARRAY_REF
  2739. || TREE_CODE (var) == VIEW_CONVERT_EXPR)
  2740. {
  2741. var = TREE_OPERAND (var, 0);
  2742. len++;
  2743. }
  2744. gcc_assert (len > 1);
  2745. /* It is more convenient for us to scan left-to-right,
  2746. so walk tree again and put all node to nodes vector
  2747. in reversed order. */
  2748. nodes = XALLOCAVEC (tree, len);
  2749. nodes[len - 1] = node;
  2750. for (i = len - 2; i >= 0; i--)
  2751. nodes[i] = TREE_OPERAND (nodes[i + 1], 0);
  2752. if (bounds)
  2753. *bounds = NULL;
  2754. *safe = true;
  2755. *bitfield = (TREE_CODE (node) == COMPONENT_REF
  2756. && DECL_BIT_FIELD_TYPE (TREE_OPERAND (node, 1)));
  2757. /* To get bitfield address we will need outer elemnt. */
  2758. if (*bitfield)
  2759. *elt = nodes[len - 2];
  2760. else
  2761. *elt = NULL_TREE;
  2762. /* If we have indirection in expression then compute
  2763. outermost structure bounds. Computed bounds may be
  2764. narrowed later. */
  2765. if (TREE_CODE (nodes[0]) == MEM_REF || INDIRECT_REF_P (nodes[0]))
  2766. {
  2767. *safe = false;
  2768. *ptr = TREE_OPERAND (nodes[0], 0);
  2769. if (bounds)
  2770. *bounds = chkp_find_bounds (*ptr, iter);
  2771. }
  2772. else
  2773. {
  2774. gcc_assert (TREE_CODE (var) == VAR_DECL
  2775. || TREE_CODE (var) == PARM_DECL
  2776. || TREE_CODE (var) == RESULT_DECL
  2777. || TREE_CODE (var) == STRING_CST
  2778. || TREE_CODE (var) == SSA_NAME);
  2779. *ptr = chkp_build_addr_expr (var);
  2780. }
  2781. /* In this loop we are trying to find a field access
  2782. requiring narrowing. There are two simple rules
  2783. for search:
  2784. 1. Leftmost array_ref is chosen if any.
  2785. 2. Rightmost suitable component_ref is chosen if innermost
  2786. bounds are required and no array_ref exists. */
  2787. for (i = 1; i < len; i++)
  2788. {
  2789. var = nodes[i];
  2790. if (TREE_CODE (var) == ARRAY_REF)
  2791. {
  2792. *safe = false;
  2793. array_ref_found = true;
  2794. if (flag_chkp_narrow_bounds
  2795. && !flag_chkp_narrow_to_innermost_arrray
  2796. && (!last_comp
  2797. || chkp_may_narrow_to_field (TREE_OPERAND (last_comp, 1))))
  2798. {
  2799. comp_to_narrow = last_comp;
  2800. break;
  2801. }
  2802. }
  2803. else if (TREE_CODE (var) == COMPONENT_REF)
  2804. {
  2805. tree field = TREE_OPERAND (var, 1);
  2806. if (innermost_bounds
  2807. && !array_ref_found
  2808. && chkp_narrow_bounds_for_field (field))
  2809. comp_to_narrow = var;
  2810. last_comp = var;
  2811. if (flag_chkp_narrow_bounds
  2812. && flag_chkp_narrow_to_innermost_arrray
  2813. && TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
  2814. {
  2815. if (bounds)
  2816. *bounds = chkp_narrow_bounds_to_field (*bounds, var, iter);
  2817. comp_to_narrow = NULL;
  2818. }
  2819. }
  2820. else if (TREE_CODE (var) == VIEW_CONVERT_EXPR)
  2821. /* Nothing to do for it. */
  2822. ;
  2823. else
  2824. gcc_unreachable ();
  2825. }
  2826. if (comp_to_narrow && DECL_SIZE (TREE_OPERAND (comp_to_narrow, 1)) && bounds)
  2827. *bounds = chkp_narrow_bounds_to_field (*bounds, comp_to_narrow, iter);
  2828. if (innermost_bounds && bounds && !*bounds)
  2829. *bounds = chkp_find_bounds (*ptr, iter);
  2830. }
  2831. /* Compute and return bounds for address of OBJ. */
  2832. static tree
  2833. chkp_make_addressed_object_bounds (tree obj, gimple_stmt_iterator *iter)
  2834. {
  2835. tree bounds = chkp_get_registered_addr_bounds (obj);
  2836. if (bounds)
  2837. return bounds;
  2838. switch (TREE_CODE (obj))
  2839. {
  2840. case VAR_DECL:
  2841. case PARM_DECL:
  2842. case RESULT_DECL:
  2843. bounds = chkp_get_bounds_for_decl_addr (obj);
  2844. break;
  2845. case STRING_CST:
  2846. bounds = chkp_get_bounds_for_string_cst (obj);
  2847. break;
  2848. case ARRAY_REF:
  2849. case COMPONENT_REF:
  2850. {
  2851. tree elt;
  2852. tree ptr;
  2853. bool safe;
  2854. bool bitfield;
  2855. chkp_parse_array_and_component_ref (obj, &ptr, &elt, &safe,
  2856. &bitfield, &bounds, iter, true);
  2857. gcc_assert (bounds);
  2858. }
  2859. break;
  2860. case FUNCTION_DECL:
  2861. case LABEL_DECL:
  2862. bounds = chkp_get_zero_bounds ();
  2863. break;
  2864. case MEM_REF:
  2865. bounds = chkp_find_bounds (TREE_OPERAND (obj, 0), iter);
  2866. break;
  2867. case REALPART_EXPR:
  2868. case IMAGPART_EXPR:
  2869. bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (obj, 0), iter);
  2870. break;
  2871. default:
  2872. if (dump_file && (dump_flags & TDF_DETAILS))
  2873. {
  2874. fprintf (dump_file, "chkp_make_addressed_object_bounds: "
  2875. "unexpected object of type %s\n",
  2876. get_tree_code_name (TREE_CODE (obj)));
  2877. print_node (dump_file, "", obj, 0);
  2878. }
  2879. internal_error ("chkp_make_addressed_object_bounds: "
  2880. "Unexpected tree code %s",
  2881. get_tree_code_name (TREE_CODE (obj)));
  2882. }
  2883. chkp_register_addr_bounds (obj, bounds);
  2884. return bounds;
  2885. }
  2886. /* Compute bounds for pointer PTR loaded from PTR_SRC. Generate statements
  2887. to compute bounds if required. Computed bounds should be available at
  2888. position pointed by ITER.
  2889. If PTR_SRC is NULL_TREE then pointer definition is identified.
  2890. If PTR_SRC is not NULL_TREE then ITER points to statements which loads
  2891. PTR. If PTR is a any memory reference then ITER points to a statement
  2892. after which bndldx will be inserterd. In both cases ITER will be updated
  2893. to point to the inserted bndldx statement. */
  2894. static tree
  2895. chkp_find_bounds_1 (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
  2896. {
  2897. tree addr = NULL_TREE;
  2898. tree bounds = NULL_TREE;
  2899. if (!ptr_src)
  2900. ptr_src = ptr;
  2901. bounds = chkp_get_registered_bounds (ptr_src);
  2902. if (bounds)
  2903. return bounds;
  2904. switch (TREE_CODE (ptr_src))
  2905. {
  2906. case MEM_REF:
  2907. case VAR_DECL:
  2908. if (BOUNDED_P (ptr_src))
  2909. if (TREE_CODE (ptr) == VAR_DECL && DECL_REGISTER (ptr))
  2910. bounds = chkp_get_zero_bounds ();
  2911. else
  2912. {
  2913. addr = chkp_build_addr_expr (ptr_src);
  2914. bounds = chkp_build_bndldx (addr, ptr, iter);
  2915. }
  2916. else
  2917. bounds = chkp_get_nonpointer_load_bounds ();
  2918. break;
  2919. case ARRAY_REF:
  2920. case COMPONENT_REF:
  2921. addr = get_base_address (ptr_src);
  2922. if (DECL_P (addr)
  2923. || TREE_CODE (addr) == MEM_REF
  2924. || TREE_CODE (addr) == TARGET_MEM_REF)
  2925. {
  2926. if (BOUNDED_P (ptr_src))
  2927. if (TREE_CODE (ptr) == VAR_DECL && DECL_REGISTER (ptr))
  2928. bounds = chkp_get_zero_bounds ();
  2929. else
  2930. {
  2931. addr = chkp_build_addr_expr (ptr_src);
  2932. bounds = chkp_build_bndldx (addr, ptr, iter);
  2933. }
  2934. else
  2935. bounds = chkp_get_nonpointer_load_bounds ();
  2936. }
  2937. else
  2938. {
  2939. gcc_assert (TREE_CODE (addr) == SSA_NAME);
  2940. bounds = chkp_find_bounds (addr, iter);
  2941. }
  2942. break;
  2943. case PARM_DECL:
  2944. gcc_unreachable ();
  2945. bounds = chkp_get_bound_for_parm (ptr_src);
  2946. break;
  2947. case TARGET_MEM_REF:
  2948. addr = chkp_build_addr_expr (ptr_src);
  2949. bounds = chkp_build_bndldx (addr, ptr, iter);
  2950. break;
  2951. case SSA_NAME:
  2952. bounds = chkp_get_registered_bounds (ptr_src);
  2953. if (!bounds)
  2954. {
  2955. gimple def_stmt = SSA_NAME_DEF_STMT (ptr_src);
  2956. gphi_iterator phi_iter;
  2957. bounds = chkp_get_bounds_by_definition (ptr_src, def_stmt, &phi_iter);
  2958. gcc_assert (bounds);
  2959. if (gphi *def_phi = dyn_cast <gphi *> (def_stmt))
  2960. {
  2961. unsigned i;
  2962. for (i = 0; i < gimple_phi_num_args (def_phi); i++)
  2963. {
  2964. tree arg = gimple_phi_arg_def (def_phi, i);
  2965. tree arg_bnd;
  2966. gphi *phi_bnd;
  2967. arg_bnd = chkp_find_bounds (arg, NULL);
  2968. /* chkp_get_bounds_by_definition created new phi
  2969. statement and phi_iter points to it.
  2970. Previous call to chkp_find_bounds could create
  2971. new basic block and therefore change phi statement
  2972. phi_iter points to. */
  2973. phi_bnd = phi_iter.phi ();
  2974. add_phi_arg (phi_bnd, arg_bnd,
  2975. gimple_phi_arg_edge (def_phi, i),
  2976. UNKNOWN_LOCATION);
  2977. }
  2978. /* If all bound phi nodes have their arg computed
  2979. then we may finish its computation. See
  2980. chkp_finish_incomplete_bounds for more details. */
  2981. if (chkp_may_finish_incomplete_bounds ())
  2982. chkp_finish_incomplete_bounds ();
  2983. }
  2984. gcc_assert (bounds == chkp_get_registered_bounds (ptr_src)
  2985. || chkp_incomplete_bounds (bounds));
  2986. }
  2987. break;
  2988. case ADDR_EXPR:
  2989. bounds = chkp_make_addressed_object_bounds (TREE_OPERAND (ptr_src, 0), iter);
  2990. break;
  2991. case INTEGER_CST:
  2992. if (integer_zerop (ptr_src))
  2993. bounds = chkp_get_none_bounds ();
  2994. else
  2995. bounds = chkp_get_invalid_op_bounds ();
  2996. break;
  2997. default:
  2998. if (dump_file && (dump_flags & TDF_DETAILS))
  2999. {
  3000. fprintf (dump_file, "chkp_find_bounds: unexpected ptr of type %s\n",
  3001. get_tree_code_name (TREE_CODE (ptr_src)));
  3002. print_node (dump_file, "", ptr_src, 0);
  3003. }
  3004. internal_error ("chkp_find_bounds: Unexpected tree code %s",
  3005. get_tree_code_name (TREE_CODE (ptr_src)));
  3006. }
  3007. if (!bounds)
  3008. {
  3009. if (dump_file && (dump_flags & TDF_DETAILS))
  3010. {
  3011. fprintf (stderr, "chkp_find_bounds: cannot find bounds for pointer\n");
  3012. print_node (dump_file, "", ptr_src, 0);
  3013. }
  3014. internal_error ("chkp_find_bounds: Cannot find bounds for pointer");
  3015. }
  3016. return bounds;
  3017. }
  3018. /* Normal case for bounds search without forced narrowing. */
  3019. static tree
  3020. chkp_find_bounds (tree ptr, gimple_stmt_iterator *iter)
  3021. {
  3022. return chkp_find_bounds_1 (ptr, NULL_TREE, iter);
  3023. }
  3024. /* Search bounds for pointer PTR loaded from PTR_SRC
  3025. by statement *ITER points to. */
  3026. static tree
  3027. chkp_find_bounds_loaded (tree ptr, tree ptr_src, gimple_stmt_iterator *iter)
  3028. {
  3029. return chkp_find_bounds_1 (ptr, ptr_src, iter);
  3030. }
  3031. /* Helper function which checks type of RHS and finds all pointers in
  3032. it. For each found pointer we build it's accesses in LHS and RHS
  3033. objects and then call HANDLER for them. Function is used to copy
  3034. or initilize bounds for copied object. */
  3035. static void
  3036. chkp_walk_pointer_assignments (tree lhs, tree rhs, void *arg,
  3037. assign_handler handler)
  3038. {
  3039. tree type = TREE_TYPE (lhs);
  3040. /* We have nothing to do with clobbers. */
  3041. if (TREE_CLOBBER_P (rhs))
  3042. return;
  3043. if (BOUNDED_TYPE_P (type))
  3044. handler (lhs, rhs, arg);
  3045. else if (RECORD_OR_UNION_TYPE_P (type))
  3046. {
  3047. tree field;
  3048. if (TREE_CODE (rhs) == CONSTRUCTOR)
  3049. {
  3050. unsigned HOST_WIDE_INT cnt;
  3051. tree val;
  3052. FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, field, val)
  3053. {
  3054. if (chkp_type_has_pointer (TREE_TYPE (field)))
  3055. {
  3056. tree lhs_field = chkp_build_component_ref (lhs, field);
  3057. chkp_walk_pointer_assignments (lhs_field, val, arg, handler);
  3058. }
  3059. }
  3060. }
  3061. else
  3062. for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
  3063. if (TREE_CODE (field) == FIELD_DECL
  3064. && chkp_type_has_pointer (TREE_TYPE (field)))
  3065. {
  3066. tree rhs_field = chkp_build_component_ref (rhs, field);
  3067. tree lhs_field = chkp_build_component_ref (lhs, field);
  3068. chkp_walk_pointer_assignments (lhs_field, rhs_field, arg, handler);
  3069. }
  3070. }
  3071. else if (TREE_CODE (type) == ARRAY_TYPE)
  3072. {
  3073. unsigned HOST_WIDE_INT cur = 0;
  3074. tree maxval = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
  3075. tree etype = TREE_TYPE (type);
  3076. tree esize = TYPE_SIZE (etype);
  3077. if (TREE_CODE (rhs) == CONSTRUCTOR)
  3078. {
  3079. unsigned HOST_WIDE_INT cnt;
  3080. tree purp, val, lhs_elem;
  3081. FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (rhs), cnt, purp, val)
  3082. {
  3083. if (purp && TREE_CODE (purp) == RANGE_EXPR)
  3084. {
  3085. tree lo_index = TREE_OPERAND (purp, 0);
  3086. tree hi_index = TREE_OPERAND (purp, 1);
  3087. for (cur = (unsigned)tree_to_uhwi (lo_index);
  3088. cur <= (unsigned)tree_to_uhwi (hi_index);
  3089. cur++)
  3090. {
  3091. lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
  3092. chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
  3093. }
  3094. }
  3095. else
  3096. {
  3097. if (purp)
  3098. {
  3099. gcc_assert (TREE_CODE (purp) == INTEGER_CST);
  3100. cur = tree_to_uhwi (purp);
  3101. }
  3102. lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur++);
  3103. chkp_walk_pointer_assignments (lhs_elem, val, arg, handler);
  3104. }
  3105. }
  3106. }
  3107. /* Copy array only when size is known. */
  3108. else if (maxval && !integer_minus_onep (maxval))
  3109. for (cur = 0; cur <= TREE_INT_CST_LOW (maxval); cur++)
  3110. {
  3111. tree lhs_elem = chkp_build_array_ref (lhs, etype, esize, cur);
  3112. tree rhs_elem = chkp_build_array_ref (rhs, etype, esize, cur);
  3113. chkp_walk_pointer_assignments (lhs_elem, rhs_elem, arg, handler);
  3114. }
  3115. }
  3116. else
  3117. internal_error("chkp_walk_pointer_assignments: unexpected RHS type: %s",
  3118. get_tree_code_name (TREE_CODE (type)));
  3119. }
  3120. /* Add code to copy bounds for assignment of RHS to LHS.
  3121. ARG is an iterator pointing ne code position. */
  3122. static void
  3123. chkp_copy_bounds_for_elem (tree lhs, tree rhs, void *arg)
  3124. {
  3125. gimple_stmt_iterator *iter = (gimple_stmt_iterator *)arg;
  3126. tree bounds = chkp_find_bounds (rhs, iter);
  3127. tree addr = chkp_build_addr_expr(lhs);
  3128. chkp_build_bndstx (addr, rhs, bounds, iter);
  3129. }
  3130. /* Emit static bound initilizers and size vars. */
  3131. void
  3132. chkp_finish_file (void)
  3133. {
  3134. struct varpool_node *node;
  3135. struct chkp_ctor_stmt_list stmts;
  3136. if (seen_error ())
  3137. return;
  3138. /* Iterate through varpool and generate bounds initialization
  3139. constructors for all statically initialized pointers. */
  3140. stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
  3141. stmts.stmts = NULL;
  3142. FOR_EACH_VARIABLE (node)
  3143. /* Check that var is actually emitted and we need and may initialize
  3144. its bounds. */
  3145. if (node->need_bounds_init
  3146. && !POINTER_BOUNDS_P (node->decl)
  3147. && DECL_RTL (node->decl)
  3148. && MEM_P (DECL_RTL (node->decl))
  3149. && TREE_ASM_WRITTEN (node->decl))
  3150. {
  3151. chkp_walk_pointer_assignments (node->decl,
  3152. DECL_INITIAL (node->decl),
  3153. &stmts,
  3154. chkp_add_modification_to_stmt_list);
  3155. if (stmts.avail <= 0)
  3156. {
  3157. cgraph_build_static_cdtor ('P', stmts.stmts,
  3158. MAX_RESERVED_INIT_PRIORITY + 3);
  3159. stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
  3160. stmts.stmts = NULL;
  3161. }
  3162. }
  3163. if (stmts.stmts)
  3164. cgraph_build_static_cdtor ('P', stmts.stmts,
  3165. MAX_RESERVED_INIT_PRIORITY + 3);
  3166. /* Iterate through varpool and generate bounds initialization
  3167. constructors for all static bounds vars. */
  3168. stmts.avail = MAX_STMTS_IN_STATIC_CHKP_CTOR;
  3169. stmts.stmts = NULL;
  3170. FOR_EACH_VARIABLE (node)
  3171. if (node->need_bounds_init
  3172. && POINTER_BOUNDS_P (node->decl)
  3173. && TREE_ASM_WRITTEN (node->decl))
  3174. {
  3175. tree bnd = node->decl;
  3176. tree var;
  3177. gcc_assert (DECL_INITIAL (bnd)
  3178. && TREE_CODE (DECL_INITIAL (bnd)) == ADDR_EXPR);
  3179. var = TREE_OPERAND (DECL_INITIAL (bnd), 0);
  3180. chkp_output_static_bounds (bnd, var, &stmts);
  3181. }
  3182. if (stmts.stmts)
  3183. cgraph_build_static_cdtor ('B', stmts.stmts,
  3184. MAX_RESERVED_INIT_PRIORITY + 2);
  3185. delete chkp_static_var_bounds;
  3186. delete chkp_bounds_map;
  3187. }
  3188. /* An instrumentation function which is called for each statement
  3189. having memory access we want to instrument. It inserts check
  3190. code and bounds copy code.
  3191. ITER points to statement to instrument.
  3192. NODE holds memory access in statement to check.
  3193. LOC holds the location information for statement.
  3194. DIRFLAGS determines whether access is read or write.
  3195. ACCESS_OFFS should be added to address used in NODE
  3196. before check.
  3197. ACCESS_SIZE holds size of checked access.
  3198. SAFE indicates if NODE access is safe and should not be
  3199. checked. */
  3200. static void
  3201. chkp_process_stmt (gimple_stmt_iterator *iter, tree node,
  3202. location_t loc, tree dirflag,
  3203. tree access_offs, tree access_size,
  3204. bool safe)
  3205. {
  3206. tree node_type = TREE_TYPE (node);
  3207. tree size = access_size ? access_size : TYPE_SIZE_UNIT (node_type);
  3208. tree addr_first = NULL_TREE; /* address of the first accessed byte */
  3209. tree addr_last = NULL_TREE; /* address of the last accessed byte */
  3210. tree ptr = NULL_TREE; /* a pointer used for dereference */
  3211. tree bounds = NULL_TREE;
  3212. /* We do not need instrumentation for clobbers. */
  3213. if (dirflag == integer_one_node
  3214. && gimple_code (gsi_stmt (*iter)) == GIMPLE_ASSIGN
  3215. && TREE_CLOBBER_P (gimple_assign_rhs1 (gsi_stmt (*iter))))
  3216. return;
  3217. switch (TREE_CODE (node))
  3218. {
  3219. case ARRAY_REF:
  3220. case COMPONENT_REF:
  3221. {
  3222. bool bitfield;
  3223. tree elt;
  3224. if (safe)
  3225. {
  3226. /* We are not going to generate any checks, so do not
  3227. generate bounds as well. */
  3228. addr_first = chkp_build_addr_expr (node);
  3229. break;
  3230. }
  3231. chkp_parse_array_and_component_ref (node, &ptr, &elt, &safe,
  3232. &bitfield, &bounds, iter, false);
  3233. /* Break if there is no dereference and operation is safe. */
  3234. if (bitfield)
  3235. {
  3236. tree field = TREE_OPERAND (node, 1);
  3237. if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST)
  3238. size = DECL_SIZE_UNIT (field);
  3239. if (elt)
  3240. elt = chkp_build_addr_expr (elt);
  3241. addr_first = fold_convert_loc (loc, ptr_type_node, elt ? elt : ptr);
  3242. addr_first = fold_build_pointer_plus_loc (loc,
  3243. addr_first,
  3244. byte_position (field));
  3245. }
  3246. else
  3247. addr_first = chkp_build_addr_expr (node);
  3248. }
  3249. break;
  3250. case INDIRECT_REF:
  3251. ptr = TREE_OPERAND (node, 0);
  3252. addr_first = ptr;
  3253. break;
  3254. case MEM_REF:
  3255. ptr = TREE_OPERAND (node, 0);
  3256. addr_first = chkp_build_addr_expr (node);
  3257. break;
  3258. case TARGET_MEM_REF:
  3259. ptr = TMR_BASE (node);
  3260. addr_first = chkp_build_addr_expr (node);
  3261. break;
  3262. case ARRAY_RANGE_REF:
  3263. printf("ARRAY_RANGE_REF\n");
  3264. debug_gimple_stmt(gsi_stmt(*iter));
  3265. debug_tree(node);
  3266. gcc_unreachable ();
  3267. break;
  3268. case BIT_FIELD_REF:
  3269. {
  3270. tree offs, rem, bpu;
  3271. gcc_assert (!access_offs);
  3272. gcc_assert (!access_size);
  3273. bpu = fold_convert (size_type_node, bitsize_int (BITS_PER_UNIT));
  3274. offs = fold_convert (size_type_node, TREE_OPERAND (node, 2));
  3275. rem = size_binop_loc (loc, TRUNC_MOD_EXPR, offs, bpu);
  3276. offs = size_binop_loc (loc, TRUNC_DIV_EXPR, offs, bpu);
  3277. size = fold_convert (size_type_node, TREE_OPERAND (node, 1));
  3278. size = size_binop_loc (loc, PLUS_EXPR, size, rem);
  3279. size = size_binop_loc (loc, CEIL_DIV_EXPR, size, bpu);
  3280. size = fold_convert (size_type_node, size);
  3281. chkp_process_stmt (iter, TREE_OPERAND (node, 0), loc,
  3282. dirflag, offs, size, safe);
  3283. return;
  3284. }
  3285. break;
  3286. case VAR_DECL:
  3287. case RESULT_DECL:
  3288. case PARM_DECL:
  3289. if (dirflag != integer_one_node
  3290. || DECL_REGISTER (node))
  3291. return;
  3292. safe = true;
  3293. addr_first = chkp_build_addr_expr (node);
  3294. break;
  3295. default:
  3296. return;
  3297. }
  3298. /* If addr_last was not computed then use (addr_first + size - 1)
  3299. expression to compute it. */
  3300. if (!addr_last)
  3301. {
  3302. addr_last = fold_build_pointer_plus_loc (loc, addr_first, size);
  3303. addr_last = fold_build_pointer_plus_hwi_loc (loc, addr_last, -1);
  3304. }
  3305. /* Shift both first_addr and last_addr by access_offs if specified. */
  3306. if (access_offs)
  3307. {
  3308. addr_first = fold_build_pointer_plus_loc (loc, addr_first, access_offs);
  3309. addr_last = fold_build_pointer_plus_loc (loc, addr_last, access_offs);
  3310. }
  3311. /* Generate bndcl/bndcu checks if memory access is not safe. */
  3312. if (!safe)
  3313. {
  3314. gimple_stmt_iterator stmt_iter = *iter;
  3315. if (!bounds)
  3316. bounds = chkp_find_bounds (ptr, iter);
  3317. chkp_check_mem_access (addr_first, addr_last, bounds,
  3318. stmt_iter, loc, dirflag);
  3319. }
  3320. /* We need to store bounds in case pointer is stored. */
  3321. if (dirflag == integer_one_node
  3322. && chkp_type_has_pointer (node_type)
  3323. && flag_chkp_store_bounds)
  3324. {
  3325. gimple stmt = gsi_stmt (*iter);
  3326. tree rhs1 = gimple_assign_rhs1 (stmt);
  3327. enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
  3328. if (get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS)
  3329. chkp_walk_pointer_assignments (node, rhs1, iter,
  3330. chkp_copy_bounds_for_elem);
  3331. else
  3332. {
  3333. bounds = chkp_compute_bounds_for_assignment (NULL_TREE, stmt);
  3334. chkp_build_bndstx (addr_first, rhs1, bounds, iter);
  3335. }
  3336. }
  3337. }
  3338. /* Add code to copy bounds for all pointers copied
  3339. in ASSIGN created during inline of EDGE. */
  3340. void
  3341. chkp_copy_bounds_for_assign (gimple assign, struct cgraph_edge *edge)
  3342. {
  3343. tree lhs = gimple_assign_lhs (assign);
  3344. tree rhs = gimple_assign_rhs1 (assign);
  3345. gimple_stmt_iterator iter = gsi_for_stmt (assign);
  3346. if (!flag_chkp_store_bounds)
  3347. return;
  3348. chkp_walk_pointer_assignments (lhs, rhs, &iter, chkp_copy_bounds_for_elem);
  3349. /* We should create edges for all created calls to bndldx and bndstx. */
  3350. while (gsi_stmt (iter) != assign)
  3351. {
  3352. gimple stmt = gsi_stmt (iter);
  3353. if (gimple_code (stmt) == GIMPLE_CALL)
  3354. {
  3355. tree fndecl = gimple_call_fndecl (stmt);
  3356. struct cgraph_node *callee = cgraph_node::get_create (fndecl);
  3357. struct cgraph_edge *new_edge;
  3358. gcc_assert (fndecl == chkp_bndstx_fndecl
  3359. || fndecl == chkp_bndldx_fndecl
  3360. || fndecl == chkp_ret_bnd_fndecl);
  3361. new_edge = edge->caller->create_edge (callee,
  3362. as_a <gcall *> (stmt),
  3363. edge->count,
  3364. edge->frequency);
  3365. new_edge->frequency = compute_call_stmt_bb_frequency
  3366. (edge->caller->decl, gimple_bb (stmt));
  3367. }
  3368. gsi_prev (&iter);
  3369. }
  3370. }
  3371. /* Some code transformation made during instrumentation pass
  3372. may put code into inconsistent state. Here we find and fix
  3373. such flaws. */
  3374. void
  3375. chkp_fix_cfg ()
  3376. {
  3377. basic_block bb;
  3378. gimple_stmt_iterator i;
  3379. /* We could insert some code right after stmt which ends bb.
  3380. We wanted to put this code on fallthru edge but did not
  3381. add new edges from the beginning because it may cause new
  3382. phi node creation which may be incorrect due to incomplete
  3383. bound phi nodes. */
  3384. FOR_ALL_BB_FN (bb, cfun)
  3385. for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
  3386. {
  3387. gimple stmt = gsi_stmt (i);
  3388. gimple_stmt_iterator next = i;
  3389. gsi_next (&next);
  3390. if (stmt_ends_bb_p (stmt)
  3391. && !gsi_end_p (next))
  3392. {
  3393. edge fall = find_fallthru_edge (bb->succs);
  3394. basic_block dest = NULL;
  3395. int flags = 0;
  3396. gcc_assert (fall);
  3397. /* We cannot split abnormal edge. Therefore we
  3398. store its params, make it regular and then
  3399. rebuild abnormal edge after split. */
  3400. if (fall->flags & EDGE_ABNORMAL)
  3401. {
  3402. flags = fall->flags & ~EDGE_FALLTHRU;
  3403. dest = fall->dest;
  3404. fall->flags &= ~EDGE_COMPLEX;
  3405. }
  3406. while (!gsi_end_p (next))
  3407. {
  3408. gimple next_stmt = gsi_stmt (next);
  3409. gsi_remove (&next, false);
  3410. gsi_insert_on_edge (fall, next_stmt);
  3411. }
  3412. gsi_commit_edge_inserts ();
  3413. /* Re-create abnormal edge. */
  3414. if (dest)
  3415. make_edge (bb, dest, flags);
  3416. }
  3417. }
  3418. }
  3419. /* Walker callback for chkp_replace_function_pointers. Replaces
  3420. function pointer in the specified operand with pointer to the
  3421. instrumented function version. */
  3422. static tree
  3423. chkp_replace_function_pointer (tree *op, int *walk_subtrees,
  3424. void *data ATTRIBUTE_UNUSED)
  3425. {
  3426. if (TREE_CODE (*op) == FUNCTION_DECL
  3427. && !lookup_attribute ("bnd_legacy", DECL_ATTRIBUTES (*op))
  3428. && (DECL_BUILT_IN_CLASS (*op) == NOT_BUILT_IN
  3429. /* For builtins we replace pointers only for selected
  3430. function and functions having definitions. */
  3431. || (DECL_BUILT_IN_CLASS (*op) == BUILT_IN_NORMAL
  3432. && (chkp_instrument_normal_builtin (*op)
  3433. || gimple_has_body_p (*op)))))
  3434. {
  3435. struct cgraph_node *node = cgraph_node::get_create (*op);
  3436. struct cgraph_node *clone = NULL;
  3437. if (!node->instrumentation_clone)
  3438. clone = chkp_maybe_create_clone (*op);
  3439. if (clone)
  3440. *op = clone->decl;
  3441. *walk_subtrees = 0;
  3442. }
  3443. return NULL;
  3444. }
  3445. /* This function searches for function pointers in statement
  3446. pointed by GSI and replaces them with pointers to instrumented
  3447. function versions. */
  3448. static void
  3449. chkp_replace_function_pointers (gimple_stmt_iterator *gsi)
  3450. {
  3451. gimple stmt = gsi_stmt (*gsi);
  3452. /* For calls we want to walk call args only. */
  3453. if (gimple_code (stmt) == GIMPLE_CALL)
  3454. {
  3455. unsigned i;
  3456. for (i = 0; i < gimple_call_num_args (stmt); i++)
  3457. walk_tree (gimple_call_arg_ptr (stmt, i),
  3458. chkp_replace_function_pointer, NULL, NULL);
  3459. }
  3460. else
  3461. walk_gimple_stmt (gsi, NULL, chkp_replace_function_pointer, NULL);
  3462. }
  3463. /* This function instruments all statements working with memory,
  3464. calls and rets.
  3465. It also removes excess statements from static initializers. */
  3466. static void
  3467. chkp_instrument_function (void)
  3468. {
  3469. basic_block bb, next;
  3470. gimple_stmt_iterator i;
  3471. enum gimple_rhs_class grhs_class;
  3472. bool safe = lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl));
  3473. bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb;
  3474. do
  3475. {
  3476. next = bb->next_bb;
  3477. for (i = gsi_start_bb (bb); !gsi_end_p (i); )
  3478. {
  3479. gimple s = gsi_stmt (i);
  3480. /* Skip statement marked to not be instrumented. */
  3481. if (chkp_marked_stmt_p (s))
  3482. {
  3483. gsi_next (&i);
  3484. continue;
  3485. }
  3486. chkp_replace_function_pointers (&i);
  3487. switch (gimple_code (s))
  3488. {
  3489. case GIMPLE_ASSIGN:
  3490. chkp_process_stmt (&i, gimple_assign_lhs (s),
  3491. gimple_location (s), integer_one_node,
  3492. NULL_TREE, NULL_TREE, safe);
  3493. chkp_process_stmt (&i, gimple_assign_rhs1 (s),
  3494. gimple_location (s), integer_zero_node,
  3495. NULL_TREE, NULL_TREE, safe);
  3496. grhs_class = get_gimple_rhs_class (gimple_assign_rhs_code (s));
  3497. if (grhs_class == GIMPLE_BINARY_RHS)
  3498. chkp_process_stmt (&i, gimple_assign_rhs2 (s),
  3499. gimple_location (s), integer_zero_node,
  3500. NULL_TREE, NULL_TREE, safe);
  3501. break;
  3502. case GIMPLE_RETURN:
  3503. {
  3504. greturn *r = as_a <greturn *> (s);
  3505. if (gimple_return_retval (r) != NULL_TREE)
  3506. {
  3507. chkp_process_stmt (&i, gimple_return_retval (r),
  3508. gimple_location (r),
  3509. integer_zero_node,
  3510. NULL_TREE, NULL_TREE, safe);
  3511. /* Additionally we need to add bounds
  3512. to return statement. */
  3513. chkp_add_bounds_to_ret_stmt (&i);
  3514. }
  3515. }
  3516. break;
  3517. case GIMPLE_CALL:
  3518. chkp_add_bounds_to_call_stmt (&i);
  3519. break;
  3520. default:
  3521. ;
  3522. }
  3523. gsi_next (&i);
  3524. /* We do not need any actual pointer stores in checker
  3525. static initializer. */
  3526. if (lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl))
  3527. && gimple_code (s) == GIMPLE_ASSIGN
  3528. && gimple_store_p (s))
  3529. {
  3530. gimple_stmt_iterator del_iter = gsi_for_stmt (s);
  3531. gsi_remove (&del_iter, true);
  3532. unlink_stmt_vdef (s);
  3533. release_defs(s);
  3534. }
  3535. }
  3536. bb = next;
  3537. }
  3538. while (bb);
  3539. /* Some input params may have bounds and be address taken. In this case
  3540. we should store incoming bounds into bounds table. */
  3541. tree arg;
  3542. if (flag_chkp_store_bounds)
  3543. for (arg = DECL_ARGUMENTS (cfun->decl); arg; arg = DECL_CHAIN (arg))
  3544. if (TREE_ADDRESSABLE (arg))
  3545. {
  3546. if (BOUNDED_P (arg))
  3547. {
  3548. tree bounds = chkp_get_next_bounds_parm (arg);
  3549. tree def_ptr = ssa_default_def (cfun, arg);
  3550. gimple_stmt_iterator iter
  3551. = gsi_start_bb (chkp_get_entry_block ());
  3552. chkp_build_bndstx (chkp_build_addr_expr (arg),
  3553. def_ptr ? def_ptr : arg,
  3554. bounds, &iter);
  3555. /* Skip bounds arg. */
  3556. arg = TREE_CHAIN (arg);
  3557. }
  3558. else if (chkp_type_has_pointer (TREE_TYPE (arg)))
  3559. {
  3560. tree orig_arg = arg;
  3561. bitmap slots = BITMAP_ALLOC (NULL);
  3562. gimple_stmt_iterator iter
  3563. = gsi_start_bb (chkp_get_entry_block ());
  3564. bitmap_iterator bi;
  3565. unsigned bnd_no;
  3566. chkp_find_bound_slots (TREE_TYPE (arg), slots);
  3567. EXECUTE_IF_SET_IN_BITMAP (slots, 0, bnd_no, bi)
  3568. {
  3569. tree bounds = chkp_get_next_bounds_parm (arg);
  3570. HOST_WIDE_INT offs = bnd_no * POINTER_SIZE / BITS_PER_UNIT;
  3571. tree addr = chkp_build_addr_expr (orig_arg);
  3572. tree ptr = build2 (MEM_REF, ptr_type_node, addr,
  3573. build_int_cst (ptr_type_node, offs));
  3574. chkp_build_bndstx (chkp_build_addr_expr (ptr), ptr,
  3575. bounds, &iter);
  3576. arg = DECL_CHAIN (arg);
  3577. }
  3578. BITMAP_FREE (slots);
  3579. }
  3580. }
  3581. }
  3582. /* Find init/null/copy_ptr_bounds calls and replace them
  3583. with assignments. It should allow better code
  3584. optimization. */
  3585. static void
  3586. chkp_remove_useless_builtins ()
  3587. {
  3588. basic_block bb;
  3589. gimple_stmt_iterator gsi;
  3590. FOR_EACH_BB_FN (bb, cfun)
  3591. {
  3592. for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
  3593. {
  3594. gimple stmt = gsi_stmt (gsi);
  3595. tree fndecl;
  3596. enum built_in_function fcode;
  3597. /* Find builtins returning first arg and replace
  3598. them with assignments. */
  3599. if (gimple_code (stmt) == GIMPLE_CALL
  3600. && (fndecl = gimple_call_fndecl (stmt))
  3601. && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
  3602. && (fcode = DECL_FUNCTION_CODE (fndecl))
  3603. && (fcode == BUILT_IN_CHKP_INIT_PTR_BOUNDS
  3604. || fcode == BUILT_IN_CHKP_NULL_PTR_BOUNDS
  3605. || fcode == BUILT_IN_CHKP_COPY_PTR_BOUNDS
  3606. || fcode == BUILT_IN_CHKP_SET_PTR_BOUNDS))
  3607. {
  3608. tree res = gimple_call_arg (stmt, 0);
  3609. update_call_from_tree (&gsi, res);
  3610. stmt = gsi_stmt (gsi);
  3611. update_stmt (stmt);
  3612. }
  3613. }
  3614. }
  3615. }
  3616. /* Initialize pass. */
  3617. static void
  3618. chkp_init (void)
  3619. {
  3620. basic_block bb;
  3621. gimple_stmt_iterator i;
  3622. in_chkp_pass = true;
  3623. for (bb = ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb; bb; bb = bb->next_bb)
  3624. for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
  3625. chkp_unmark_stmt (gsi_stmt (i));
  3626. chkp_invalid_bounds = new hash_set<tree>;
  3627. chkp_completed_bounds_set = new hash_set<tree>;
  3628. delete chkp_reg_bounds;
  3629. chkp_reg_bounds = new hash_map<tree, tree>;
  3630. delete chkp_bound_vars;
  3631. chkp_bound_vars = new hash_map<tree, tree>;
  3632. chkp_reg_addr_bounds = new hash_map<tree, tree>;
  3633. chkp_incomplete_bounds_map = new hash_map<tree, tree>;
  3634. delete chkp_bounds_map;
  3635. chkp_bounds_map = new hash_map<tree, tree>;
  3636. chkp_abnormal_copies = BITMAP_GGC_ALLOC ();
  3637. entry_block = NULL;
  3638. zero_bounds = NULL_TREE;
  3639. none_bounds = NULL_TREE;
  3640. incomplete_bounds = integer_zero_node;
  3641. tmp_var = NULL_TREE;
  3642. size_tmp_var = NULL_TREE;
  3643. chkp_uintptr_type = lang_hooks.types.type_for_mode (ptr_mode, true);
  3644. /* We create these constant bounds once for each object file.
  3645. These symbols go to comdat section and result in single copy
  3646. of each one in the final binary. */
  3647. chkp_get_zero_bounds_var ();
  3648. chkp_get_none_bounds_var ();
  3649. calculate_dominance_info (CDI_DOMINATORS);
  3650. calculate_dominance_info (CDI_POST_DOMINATORS);
  3651. bitmap_obstack_initialize (NULL);
  3652. }
  3653. /* Finalize instrumentation pass. */
  3654. static void
  3655. chkp_fini (void)
  3656. {
  3657. in_chkp_pass = false;
  3658. delete chkp_invalid_bounds;
  3659. delete chkp_completed_bounds_set;
  3660. delete chkp_reg_addr_bounds;
  3661. delete chkp_incomplete_bounds_map;
  3662. free_dominance_info (CDI_DOMINATORS);
  3663. free_dominance_info (CDI_POST_DOMINATORS);
  3664. bitmap_obstack_release (NULL);
  3665. entry_block = NULL;
  3666. zero_bounds = NULL_TREE;
  3667. none_bounds = NULL_TREE;
  3668. }
  3669. /* Main instrumentation pass function. */
  3670. static unsigned int
  3671. chkp_execute (void)
  3672. {
  3673. chkp_init ();
  3674. chkp_instrument_function ();
  3675. chkp_remove_useless_builtins ();
  3676. chkp_function_mark_instrumented (cfun->decl);
  3677. chkp_fix_cfg ();
  3678. chkp_fini ();
  3679. return 0;
  3680. }
  3681. /* Instrumentation pass gate. */
  3682. static bool
  3683. chkp_gate (void)
  3684. {
  3685. return cgraph_node::get (cfun->decl)->instrumentation_clone
  3686. || lookup_attribute ("chkp ctor", DECL_ATTRIBUTES (cfun->decl));
  3687. }
  3688. namespace {
  3689. const pass_data pass_data_chkp =
  3690. {
  3691. GIMPLE_PASS, /* type */
  3692. "chkp", /* name */
  3693. OPTGROUP_NONE, /* optinfo_flags */
  3694. TV_NONE, /* tv_id */
  3695. PROP_ssa | PROP_cfg, /* properties_required */
  3696. 0, /* properties_provided */
  3697. 0, /* properties_destroyed */
  3698. 0, /* todo_flags_start */
  3699. TODO_verify_il
  3700. | TODO_update_ssa /* todo_flags_finish */
  3701. };
  3702. class pass_chkp : public gimple_opt_pass
  3703. {
  3704. public:
  3705. pass_chkp (gcc::context *ctxt)
  3706. : gimple_opt_pass (pass_data_chkp, ctxt)
  3707. {}
  3708. /* opt_pass methods: */
  3709. virtual opt_pass * clone ()
  3710. {
  3711. return new pass_chkp (m_ctxt);
  3712. }
  3713. virtual bool gate (function *)
  3714. {
  3715. return chkp_gate ();
  3716. }
  3717. virtual unsigned int execute (function *)
  3718. {
  3719. return chkp_execute ();
  3720. }
  3721. }; // class pass_chkp
  3722. } // anon namespace
  3723. gimple_opt_pass *
  3724. make_pass_chkp (gcc::context *ctxt)
  3725. {
  3726. return new pass_chkp (ctxt);
  3727. }
  3728. #include "gt-tree-chkp.h"