asan.c 89 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838
  1. /* AddressSanitizer, a fast memory error detector.
  2. Copyright (C) 2012-2015 Free Software Foundation, Inc.
  3. Contributed by Kostya Serebryany <kcc@google.com>
  4. This file is part of GCC.
  5. GCC is free software; you can redistribute it and/or modify it under
  6. the terms of the GNU General Public License as published by the Free
  7. Software Foundation; either version 3, or (at your option) any later
  8. version.
  9. GCC is distributed in the hope that it will be useful, but WITHOUT ANY
  10. WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11. FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
  12. for more details.
  13. You should have received a copy of the GNU General Public License
  14. along with GCC; see the file COPYING3. If not see
  15. <http://www.gnu.org/licenses/>. */
  16. #include "config.h"
  17. #include "system.h"
  18. #include "coretypes.h"
  19. #include "hash-set.h"
  20. #include "machmode.h"
  21. #include "vec.h"
  22. #include "double-int.h"
  23. #include "input.h"
  24. #include "alias.h"
  25. #include "symtab.h"
  26. #include "options.h"
  27. #include "wide-int.h"
  28. #include "inchash.h"
  29. #include "tree.h"
  30. #include "fold-const.h"
  31. #include "hash-table.h"
  32. #include "predict.h"
  33. #include "tm.h"
  34. #include "hard-reg-set.h"
  35. #include "function.h"
  36. #include "dominance.h"
  37. #include "cfg.h"
  38. #include "cfganal.h"
  39. #include "basic-block.h"
  40. #include "tree-ssa-alias.h"
  41. #include "internal-fn.h"
  42. #include "gimple-expr.h"
  43. #include "is-a.h"
  44. #include "gimple.h"
  45. #include "gimplify.h"
  46. #include "gimple-iterator.h"
  47. #include "calls.h"
  48. #include "varasm.h"
  49. #include "stor-layout.h"
  50. #include "tree-iterator.h"
  51. #include "hash-map.h"
  52. #include "plugin-api.h"
  53. #include "ipa-ref.h"
  54. #include "cgraph.h"
  55. #include "stringpool.h"
  56. #include "tree-ssanames.h"
  57. #include "tree-pass.h"
  58. #include "asan.h"
  59. #include "gimple-pretty-print.h"
  60. #include "target.h"
  61. #include "hashtab.h"
  62. #include "rtl.h"
  63. #include "flags.h"
  64. #include "statistics.h"
  65. #include "real.h"
  66. #include "fixed-value.h"
  67. #include "insn-config.h"
  68. #include "expmed.h"
  69. #include "dojump.h"
  70. #include "explow.h"
  71. #include "emit-rtl.h"
  72. #include "stmt.h"
  73. #include "expr.h"
  74. #include "insn-codes.h"
  75. #include "optabs.h"
  76. #include "output.h"
  77. #include "tm_p.h"
  78. #include "langhooks.h"
  79. #include "alloc-pool.h"
  80. #include "cfgloop.h"
  81. #include "gimple-builder.h"
  82. #include "ubsan.h"
  83. #include "params.h"
  84. #include "builtins.h"
  85. /* AddressSanitizer finds out-of-bounds and use-after-free bugs
  86. with <2x slowdown on average.
  87. The tool consists of two parts:
  88. instrumentation module (this file) and a run-time library.
  89. The instrumentation module adds a run-time check before every memory insn.
  90. For a 8- or 16- byte load accessing address X:
  91. ShadowAddr = (X >> 3) + Offset
  92. ShadowValue = *(char*)ShadowAddr; // *(short*) for 16-byte access.
  93. if (ShadowValue)
  94. __asan_report_load8(X);
  95. For a load of N bytes (N=1, 2 or 4) from address X:
  96. ShadowAddr = (X >> 3) + Offset
  97. ShadowValue = *(char*)ShadowAddr;
  98. if (ShadowValue)
  99. if ((X & 7) + N - 1 > ShadowValue)
  100. __asan_report_loadN(X);
  101. Stores are instrumented similarly, but using __asan_report_storeN functions.
  102. A call too __asan_init_vN() is inserted to the list of module CTORs.
  103. N is the version number of the AddressSanitizer API. The changes between the
  104. API versions are listed in libsanitizer/asan/asan_interface_internal.h.
  105. The run-time library redefines malloc (so that redzone are inserted around
  106. the allocated memory) and free (so that reuse of free-ed memory is delayed),
  107. provides __asan_report* and __asan_init_vN functions.
  108. Read more:
  109. http://code.google.com/p/address-sanitizer/wiki/AddressSanitizerAlgorithm
  110. The current implementation supports detection of out-of-bounds and
  111. use-after-free in the heap, on the stack and for global variables.
  112. [Protection of stack variables]
  113. To understand how detection of out-of-bounds and use-after-free works
  114. for stack variables, lets look at this example on x86_64 where the
  115. stack grows downward:
  116. int
  117. foo ()
  118. {
  119. char a[23] = {0};
  120. int b[2] = {0};
  121. a[5] = 1;
  122. b[1] = 2;
  123. return a[5] + b[1];
  124. }
  125. For this function, the stack protected by asan will be organized as
  126. follows, from the top of the stack to the bottom:
  127. Slot 1/ [red zone of 32 bytes called 'RIGHT RedZone']
  128. Slot 2/ [8 bytes of red zone, that adds up to the space of 'a' to make
  129. the next slot be 32 bytes aligned; this one is called Partial
  130. Redzone; this 32 bytes alignment is an asan constraint]
  131. Slot 3/ [24 bytes for variable 'a']
  132. Slot 4/ [red zone of 32 bytes called 'Middle RedZone']
  133. Slot 5/ [24 bytes of Partial Red Zone (similar to slot 2]
  134. Slot 6/ [8 bytes for variable 'b']
  135. Slot 7/ [32 bytes of Red Zone at the bottom of the stack, called
  136. 'LEFT RedZone']
  137. The 32 bytes of LEFT red zone at the bottom of the stack can be
  138. decomposed as such:
  139. 1/ The first 8 bytes contain a magical asan number that is always
  140. 0x41B58AB3.
  141. 2/ The following 8 bytes contains a pointer to a string (to be
  142. parsed at runtime by the runtime asan library), which format is
  143. the following:
  144. "<function-name> <space> <num-of-variables-on-the-stack>
  145. (<32-bytes-aligned-offset-in-bytes-of-variable> <space>
  146. <length-of-var-in-bytes> ){n} "
  147. where '(...){n}' means the content inside the parenthesis occurs 'n'
  148. times, with 'n' being the number of variables on the stack.
  149. 3/ The following 8 bytes contain the PC of the current function which
  150. will be used by the run-time library to print an error message.
  151. 4/ The following 8 bytes are reserved for internal use by the run-time.
  152. The shadow memory for that stack layout is going to look like this:
  153. - content of shadow memory 8 bytes for slot 7: 0xF1F1F1F1.
  154. The F1 byte pattern is a magic number called
  155. ASAN_STACK_MAGIC_LEFT and is a way for the runtime to know that
  156. the memory for that shadow byte is part of a the LEFT red zone
  157. intended to seat at the bottom of the variables on the stack.
  158. - content of shadow memory 8 bytes for slots 6 and 5:
  159. 0xF4F4F400. The F4 byte pattern is a magic number
  160. called ASAN_STACK_MAGIC_PARTIAL. It flags the fact that the
  161. memory region for this shadow byte is a PARTIAL red zone
  162. intended to pad a variable A, so that the slot following
  163. {A,padding} is 32 bytes aligned.
  164. Note that the fact that the least significant byte of this
  165. shadow memory content is 00 means that 8 bytes of its
  166. corresponding memory (which corresponds to the memory of
  167. variable 'b') is addressable.
  168. - content of shadow memory 8 bytes for slot 4: 0xF2F2F2F2.
  169. The F2 byte pattern is a magic number called
  170. ASAN_STACK_MAGIC_MIDDLE. It flags the fact that the memory
  171. region for this shadow byte is a MIDDLE red zone intended to
  172. seat between two 32 aligned slots of {variable,padding}.
  173. - content of shadow memory 8 bytes for slot 3 and 2:
  174. 0xF4000000. This represents is the concatenation of
  175. variable 'a' and the partial red zone following it, like what we
  176. had for variable 'b'. The least significant 3 bytes being 00
  177. means that the 3 bytes of variable 'a' are addressable.
  178. - content of shadow memory 8 bytes for slot 1: 0xF3F3F3F3.
  179. The F3 byte pattern is a magic number called
  180. ASAN_STACK_MAGIC_RIGHT. It flags the fact that the memory
  181. region for this shadow byte is a RIGHT red zone intended to seat
  182. at the top of the variables of the stack.
  183. Note that the real variable layout is done in expand_used_vars in
  184. cfgexpand.c. As far as Address Sanitizer is concerned, it lays out
  185. stack variables as well as the different red zones, emits some
  186. prologue code to populate the shadow memory as to poison (mark as
  187. non-accessible) the regions of the red zones and mark the regions of
  188. stack variables as accessible, and emit some epilogue code to
  189. un-poison (mark as accessible) the regions of red zones right before
  190. the function exits.
  191. [Protection of global variables]
  192. The basic idea is to insert a red zone between two global variables
  193. and install a constructor function that calls the asan runtime to do
  194. the populating of the relevant shadow memory regions at load time.
  195. So the global variables are laid out as to insert a red zone between
  196. them. The size of the red zones is so that each variable starts on a
  197. 32 bytes boundary.
  198. Then a constructor function is installed so that, for each global
  199. variable, it calls the runtime asan library function
  200. __asan_register_globals_with an instance of this type:
  201. struct __asan_global
  202. {
  203. // Address of the beginning of the global variable.
  204. const void *__beg;
  205. // Initial size of the global variable.
  206. uptr __size;
  207. // Size of the global variable + size of the red zone. This
  208. // size is 32 bytes aligned.
  209. uptr __size_with_redzone;
  210. // Name of the global variable.
  211. const void *__name;
  212. // Name of the module where the global variable is declared.
  213. const void *__module_name;
  214. // 1 if it has dynamic initialization, 0 otherwise.
  215. uptr __has_dynamic_init;
  216. // A pointer to struct that contains source location, could be NULL.
  217. __asan_global_source_location *__location;
  218. }
  219. A destructor function that calls the runtime asan library function
  220. _asan_unregister_globals is also installed. */
  221. static unsigned HOST_WIDE_INT asan_shadow_offset_value;
  222. static bool asan_shadow_offset_computed;
  223. /* Sets shadow offset to value in string VAL. */
  224. bool
  225. set_asan_shadow_offset (const char *val)
  226. {
  227. char *endp;
  228. errno = 0;
  229. #ifdef HAVE_LONG_LONG
  230. asan_shadow_offset_value = strtoull (val, &endp, 0);
  231. #else
  232. asan_shadow_offset_value = strtoul (val, &endp, 0);
  233. #endif
  234. if (!(*val != '\0' && *endp == '\0' && errno == 0))
  235. return false;
  236. asan_shadow_offset_computed = true;
  237. return true;
  238. }
  239. /* Returns Asan shadow offset. */
  240. static unsigned HOST_WIDE_INT
  241. asan_shadow_offset ()
  242. {
  243. if (!asan_shadow_offset_computed)
  244. {
  245. asan_shadow_offset_computed = true;
  246. asan_shadow_offset_value = targetm.asan_shadow_offset ();
  247. }
  248. return asan_shadow_offset_value;
  249. }
  250. alias_set_type asan_shadow_set = -1;
  251. /* Pointer types to 1 resp. 2 byte integers in shadow memory. A separate
  252. alias set is used for all shadow memory accesses. */
  253. static GTY(()) tree shadow_ptr_types[2];
  254. /* Decl for __asan_option_detect_stack_use_after_return. */
  255. static GTY(()) tree asan_detect_stack_use_after_return;
  256. /* Various flags for Asan builtins. */
  257. enum asan_check_flags
  258. {
  259. ASAN_CHECK_STORE = 1 << 0,
  260. ASAN_CHECK_SCALAR_ACCESS = 1 << 1,
  261. ASAN_CHECK_NON_ZERO_LEN = 1 << 2,
  262. ASAN_CHECK_LAST = 1 << 3
  263. };
  264. /* Hashtable support for memory references used by gimple
  265. statements. */
  266. /* This type represents a reference to a memory region. */
  267. struct asan_mem_ref
  268. {
  269. /* The expression of the beginning of the memory region. */
  270. tree start;
  271. /* The size of the access. */
  272. HOST_WIDE_INT access_size;
  273. };
  274. static alloc_pool asan_mem_ref_alloc_pool;
  275. /* This creates the alloc pool used to store the instances of
  276. asan_mem_ref that are stored in the hash table asan_mem_ref_ht. */
  277. static alloc_pool
  278. asan_mem_ref_get_alloc_pool ()
  279. {
  280. if (asan_mem_ref_alloc_pool == NULL)
  281. asan_mem_ref_alloc_pool = create_alloc_pool ("asan_mem_ref",
  282. sizeof (asan_mem_ref),
  283. 10);
  284. return asan_mem_ref_alloc_pool;
  285. }
  286. /* Initializes an instance of asan_mem_ref. */
  287. static void
  288. asan_mem_ref_init (asan_mem_ref *ref, tree start, HOST_WIDE_INT access_size)
  289. {
  290. ref->start = start;
  291. ref->access_size = access_size;
  292. }
  293. /* Allocates memory for an instance of asan_mem_ref into the memory
  294. pool returned by asan_mem_ref_get_alloc_pool and initialize it.
  295. START is the address of (or the expression pointing to) the
  296. beginning of memory reference. ACCESS_SIZE is the size of the
  297. access to the referenced memory. */
  298. static asan_mem_ref*
  299. asan_mem_ref_new (tree start, HOST_WIDE_INT access_size)
  300. {
  301. asan_mem_ref *ref =
  302. (asan_mem_ref *) pool_alloc (asan_mem_ref_get_alloc_pool ());
  303. asan_mem_ref_init (ref, start, access_size);
  304. return ref;
  305. }
  306. /* This builds and returns a pointer to the end of the memory region
  307. that starts at START and of length LEN. */
  308. tree
  309. asan_mem_ref_get_end (tree start, tree len)
  310. {
  311. if (len == NULL_TREE || integer_zerop (len))
  312. return start;
  313. if (!ptrofftype_p (len))
  314. len = convert_to_ptrofftype (len);
  315. return fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (start), start, len);
  316. }
  317. /* Return a tree expression that represents the end of the referenced
  318. memory region. Beware that this function can actually build a new
  319. tree expression. */
  320. tree
  321. asan_mem_ref_get_end (const asan_mem_ref *ref, tree len)
  322. {
  323. return asan_mem_ref_get_end (ref->start, len);
  324. }
  325. struct asan_mem_ref_hasher
  326. : typed_noop_remove <asan_mem_ref>
  327. {
  328. typedef asan_mem_ref value_type;
  329. typedef asan_mem_ref compare_type;
  330. static inline hashval_t hash (const value_type *);
  331. static inline bool equal (const value_type *, const compare_type *);
  332. };
  333. /* Hash a memory reference. */
  334. inline hashval_t
  335. asan_mem_ref_hasher::hash (const asan_mem_ref *mem_ref)
  336. {
  337. return iterative_hash_expr (mem_ref->start, 0);
  338. }
  339. /* Compare two memory references. We accept the length of either
  340. memory references to be NULL_TREE. */
  341. inline bool
  342. asan_mem_ref_hasher::equal (const asan_mem_ref *m1,
  343. const asan_mem_ref *m2)
  344. {
  345. return operand_equal_p (m1->start, m2->start, 0);
  346. }
  347. static hash_table<asan_mem_ref_hasher> *asan_mem_ref_ht;
  348. /* Returns a reference to the hash table containing memory references.
  349. This function ensures that the hash table is created. Note that
  350. this hash table is updated by the function
  351. update_mem_ref_hash_table. */
  352. static hash_table<asan_mem_ref_hasher> *
  353. get_mem_ref_hash_table ()
  354. {
  355. if (!asan_mem_ref_ht)
  356. asan_mem_ref_ht = new hash_table<asan_mem_ref_hasher> (10);
  357. return asan_mem_ref_ht;
  358. }
  359. /* Clear all entries from the memory references hash table. */
  360. static void
  361. empty_mem_ref_hash_table ()
  362. {
  363. if (asan_mem_ref_ht)
  364. asan_mem_ref_ht->empty ();
  365. }
  366. /* Free the memory references hash table. */
  367. static void
  368. free_mem_ref_resources ()
  369. {
  370. delete asan_mem_ref_ht;
  371. asan_mem_ref_ht = NULL;
  372. if (asan_mem_ref_alloc_pool)
  373. {
  374. free_alloc_pool (asan_mem_ref_alloc_pool);
  375. asan_mem_ref_alloc_pool = NULL;
  376. }
  377. }
  378. /* Return true iff the memory reference REF has been instrumented. */
  379. static bool
  380. has_mem_ref_been_instrumented (tree ref, HOST_WIDE_INT access_size)
  381. {
  382. asan_mem_ref r;
  383. asan_mem_ref_init (&r, ref, access_size);
  384. asan_mem_ref *saved_ref = get_mem_ref_hash_table ()->find (&r);
  385. return saved_ref && saved_ref->access_size >= access_size;
  386. }
  387. /* Return true iff the memory reference REF has been instrumented. */
  388. static bool
  389. has_mem_ref_been_instrumented (const asan_mem_ref *ref)
  390. {
  391. return has_mem_ref_been_instrumented (ref->start, ref->access_size);
  392. }
  393. /* Return true iff access to memory region starting at REF and of
  394. length LEN has been instrumented. */
  395. static bool
  396. has_mem_ref_been_instrumented (const asan_mem_ref *ref, tree len)
  397. {
  398. HOST_WIDE_INT size_in_bytes
  399. = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
  400. return size_in_bytes != -1
  401. && has_mem_ref_been_instrumented (ref->start, size_in_bytes);
  402. }
  403. /* Set REF to the memory reference present in a gimple assignment
  404. ASSIGNMENT. Return true upon successful completion, false
  405. otherwise. */
  406. static bool
  407. get_mem_ref_of_assignment (const gassign *assignment,
  408. asan_mem_ref *ref,
  409. bool *ref_is_store)
  410. {
  411. gcc_assert (gimple_assign_single_p (assignment));
  412. if (gimple_store_p (assignment)
  413. && !gimple_clobber_p (assignment))
  414. {
  415. ref->start = gimple_assign_lhs (assignment);
  416. *ref_is_store = true;
  417. }
  418. else if (gimple_assign_load_p (assignment))
  419. {
  420. ref->start = gimple_assign_rhs1 (assignment);
  421. *ref_is_store = false;
  422. }
  423. else
  424. return false;
  425. ref->access_size = int_size_in_bytes (TREE_TYPE (ref->start));
  426. return true;
  427. }
  428. /* Return the memory references contained in a gimple statement
  429. representing a builtin call that has to do with memory access. */
  430. static bool
  431. get_mem_refs_of_builtin_call (const gcall *call,
  432. asan_mem_ref *src0,
  433. tree *src0_len,
  434. bool *src0_is_store,
  435. asan_mem_ref *src1,
  436. tree *src1_len,
  437. bool *src1_is_store,
  438. asan_mem_ref *dst,
  439. tree *dst_len,
  440. bool *dst_is_store,
  441. bool *dest_is_deref,
  442. bool *intercepted_p)
  443. {
  444. gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
  445. tree callee = gimple_call_fndecl (call);
  446. tree source0 = NULL_TREE, source1 = NULL_TREE,
  447. dest = NULL_TREE, len = NULL_TREE;
  448. bool is_store = true, got_reference_p = false;
  449. HOST_WIDE_INT access_size = 1;
  450. *intercepted_p = asan_intercepted_p ((DECL_FUNCTION_CODE (callee)));
  451. switch (DECL_FUNCTION_CODE (callee))
  452. {
  453. /* (s, s, n) style memops. */
  454. case BUILT_IN_BCMP:
  455. case BUILT_IN_MEMCMP:
  456. source0 = gimple_call_arg (call, 0);
  457. source1 = gimple_call_arg (call, 1);
  458. len = gimple_call_arg (call, 2);
  459. break;
  460. /* (src, dest, n) style memops. */
  461. case BUILT_IN_BCOPY:
  462. source0 = gimple_call_arg (call, 0);
  463. dest = gimple_call_arg (call, 1);
  464. len = gimple_call_arg (call, 2);
  465. break;
  466. /* (dest, src, n) style memops. */
  467. case BUILT_IN_MEMCPY:
  468. case BUILT_IN_MEMCPY_CHK:
  469. case BUILT_IN_MEMMOVE:
  470. case BUILT_IN_MEMMOVE_CHK:
  471. case BUILT_IN_MEMPCPY:
  472. case BUILT_IN_MEMPCPY_CHK:
  473. dest = gimple_call_arg (call, 0);
  474. source0 = gimple_call_arg (call, 1);
  475. len = gimple_call_arg (call, 2);
  476. break;
  477. /* (dest, n) style memops. */
  478. case BUILT_IN_BZERO:
  479. dest = gimple_call_arg (call, 0);
  480. len = gimple_call_arg (call, 1);
  481. break;
  482. /* (dest, x, n) style memops*/
  483. case BUILT_IN_MEMSET:
  484. case BUILT_IN_MEMSET_CHK:
  485. dest = gimple_call_arg (call, 0);
  486. len = gimple_call_arg (call, 2);
  487. break;
  488. case BUILT_IN_STRLEN:
  489. source0 = gimple_call_arg (call, 0);
  490. len = gimple_call_lhs (call);
  491. break ;
  492. /* And now the __atomic* and __sync builtins.
  493. These are handled differently from the classical memory memory
  494. access builtins above. */
  495. case BUILT_IN_ATOMIC_LOAD_1:
  496. case BUILT_IN_ATOMIC_LOAD_2:
  497. case BUILT_IN_ATOMIC_LOAD_4:
  498. case BUILT_IN_ATOMIC_LOAD_8:
  499. case BUILT_IN_ATOMIC_LOAD_16:
  500. is_store = false;
  501. /* fall through. */
  502. case BUILT_IN_SYNC_FETCH_AND_ADD_1:
  503. case BUILT_IN_SYNC_FETCH_AND_ADD_2:
  504. case BUILT_IN_SYNC_FETCH_AND_ADD_4:
  505. case BUILT_IN_SYNC_FETCH_AND_ADD_8:
  506. case BUILT_IN_SYNC_FETCH_AND_ADD_16:
  507. case BUILT_IN_SYNC_FETCH_AND_SUB_1:
  508. case BUILT_IN_SYNC_FETCH_AND_SUB_2:
  509. case BUILT_IN_SYNC_FETCH_AND_SUB_4:
  510. case BUILT_IN_SYNC_FETCH_AND_SUB_8:
  511. case BUILT_IN_SYNC_FETCH_AND_SUB_16:
  512. case BUILT_IN_SYNC_FETCH_AND_OR_1:
  513. case BUILT_IN_SYNC_FETCH_AND_OR_2:
  514. case BUILT_IN_SYNC_FETCH_AND_OR_4:
  515. case BUILT_IN_SYNC_FETCH_AND_OR_8:
  516. case BUILT_IN_SYNC_FETCH_AND_OR_16:
  517. case BUILT_IN_SYNC_FETCH_AND_AND_1:
  518. case BUILT_IN_SYNC_FETCH_AND_AND_2:
  519. case BUILT_IN_SYNC_FETCH_AND_AND_4:
  520. case BUILT_IN_SYNC_FETCH_AND_AND_8:
  521. case BUILT_IN_SYNC_FETCH_AND_AND_16:
  522. case BUILT_IN_SYNC_FETCH_AND_XOR_1:
  523. case BUILT_IN_SYNC_FETCH_AND_XOR_2:
  524. case BUILT_IN_SYNC_FETCH_AND_XOR_4:
  525. case BUILT_IN_SYNC_FETCH_AND_XOR_8:
  526. case BUILT_IN_SYNC_FETCH_AND_XOR_16:
  527. case BUILT_IN_SYNC_FETCH_AND_NAND_1:
  528. case BUILT_IN_SYNC_FETCH_AND_NAND_2:
  529. case BUILT_IN_SYNC_FETCH_AND_NAND_4:
  530. case BUILT_IN_SYNC_FETCH_AND_NAND_8:
  531. case BUILT_IN_SYNC_ADD_AND_FETCH_1:
  532. case BUILT_IN_SYNC_ADD_AND_FETCH_2:
  533. case BUILT_IN_SYNC_ADD_AND_FETCH_4:
  534. case BUILT_IN_SYNC_ADD_AND_FETCH_8:
  535. case BUILT_IN_SYNC_ADD_AND_FETCH_16:
  536. case BUILT_IN_SYNC_SUB_AND_FETCH_1:
  537. case BUILT_IN_SYNC_SUB_AND_FETCH_2:
  538. case BUILT_IN_SYNC_SUB_AND_FETCH_4:
  539. case BUILT_IN_SYNC_SUB_AND_FETCH_8:
  540. case BUILT_IN_SYNC_SUB_AND_FETCH_16:
  541. case BUILT_IN_SYNC_OR_AND_FETCH_1:
  542. case BUILT_IN_SYNC_OR_AND_FETCH_2:
  543. case BUILT_IN_SYNC_OR_AND_FETCH_4:
  544. case BUILT_IN_SYNC_OR_AND_FETCH_8:
  545. case BUILT_IN_SYNC_OR_AND_FETCH_16:
  546. case BUILT_IN_SYNC_AND_AND_FETCH_1:
  547. case BUILT_IN_SYNC_AND_AND_FETCH_2:
  548. case BUILT_IN_SYNC_AND_AND_FETCH_4:
  549. case BUILT_IN_SYNC_AND_AND_FETCH_8:
  550. case BUILT_IN_SYNC_AND_AND_FETCH_16:
  551. case BUILT_IN_SYNC_XOR_AND_FETCH_1:
  552. case BUILT_IN_SYNC_XOR_AND_FETCH_2:
  553. case BUILT_IN_SYNC_XOR_AND_FETCH_4:
  554. case BUILT_IN_SYNC_XOR_AND_FETCH_8:
  555. case BUILT_IN_SYNC_XOR_AND_FETCH_16:
  556. case BUILT_IN_SYNC_NAND_AND_FETCH_1:
  557. case BUILT_IN_SYNC_NAND_AND_FETCH_2:
  558. case BUILT_IN_SYNC_NAND_AND_FETCH_4:
  559. case BUILT_IN_SYNC_NAND_AND_FETCH_8:
  560. case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
  561. case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
  562. case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
  563. case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
  564. case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
  565. case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
  566. case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
  567. case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
  568. case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
  569. case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
  570. case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
  571. case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
  572. case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
  573. case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
  574. case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
  575. case BUILT_IN_SYNC_LOCK_RELEASE_1:
  576. case BUILT_IN_SYNC_LOCK_RELEASE_2:
  577. case BUILT_IN_SYNC_LOCK_RELEASE_4:
  578. case BUILT_IN_SYNC_LOCK_RELEASE_8:
  579. case BUILT_IN_SYNC_LOCK_RELEASE_16:
  580. case BUILT_IN_ATOMIC_EXCHANGE_1:
  581. case BUILT_IN_ATOMIC_EXCHANGE_2:
  582. case BUILT_IN_ATOMIC_EXCHANGE_4:
  583. case BUILT_IN_ATOMIC_EXCHANGE_8:
  584. case BUILT_IN_ATOMIC_EXCHANGE_16:
  585. case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
  586. case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
  587. case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
  588. case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
  589. case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
  590. case BUILT_IN_ATOMIC_STORE_1:
  591. case BUILT_IN_ATOMIC_STORE_2:
  592. case BUILT_IN_ATOMIC_STORE_4:
  593. case BUILT_IN_ATOMIC_STORE_8:
  594. case BUILT_IN_ATOMIC_STORE_16:
  595. case BUILT_IN_ATOMIC_ADD_FETCH_1:
  596. case BUILT_IN_ATOMIC_ADD_FETCH_2:
  597. case BUILT_IN_ATOMIC_ADD_FETCH_4:
  598. case BUILT_IN_ATOMIC_ADD_FETCH_8:
  599. case BUILT_IN_ATOMIC_ADD_FETCH_16:
  600. case BUILT_IN_ATOMIC_SUB_FETCH_1:
  601. case BUILT_IN_ATOMIC_SUB_FETCH_2:
  602. case BUILT_IN_ATOMIC_SUB_FETCH_4:
  603. case BUILT_IN_ATOMIC_SUB_FETCH_8:
  604. case BUILT_IN_ATOMIC_SUB_FETCH_16:
  605. case BUILT_IN_ATOMIC_AND_FETCH_1:
  606. case BUILT_IN_ATOMIC_AND_FETCH_2:
  607. case BUILT_IN_ATOMIC_AND_FETCH_4:
  608. case BUILT_IN_ATOMIC_AND_FETCH_8:
  609. case BUILT_IN_ATOMIC_AND_FETCH_16:
  610. case BUILT_IN_ATOMIC_NAND_FETCH_1:
  611. case BUILT_IN_ATOMIC_NAND_FETCH_2:
  612. case BUILT_IN_ATOMIC_NAND_FETCH_4:
  613. case BUILT_IN_ATOMIC_NAND_FETCH_8:
  614. case BUILT_IN_ATOMIC_NAND_FETCH_16:
  615. case BUILT_IN_ATOMIC_XOR_FETCH_1:
  616. case BUILT_IN_ATOMIC_XOR_FETCH_2:
  617. case BUILT_IN_ATOMIC_XOR_FETCH_4:
  618. case BUILT_IN_ATOMIC_XOR_FETCH_8:
  619. case BUILT_IN_ATOMIC_XOR_FETCH_16:
  620. case BUILT_IN_ATOMIC_OR_FETCH_1:
  621. case BUILT_IN_ATOMIC_OR_FETCH_2:
  622. case BUILT_IN_ATOMIC_OR_FETCH_4:
  623. case BUILT_IN_ATOMIC_OR_FETCH_8:
  624. case BUILT_IN_ATOMIC_OR_FETCH_16:
  625. case BUILT_IN_ATOMIC_FETCH_ADD_1:
  626. case BUILT_IN_ATOMIC_FETCH_ADD_2:
  627. case BUILT_IN_ATOMIC_FETCH_ADD_4:
  628. case BUILT_IN_ATOMIC_FETCH_ADD_8:
  629. case BUILT_IN_ATOMIC_FETCH_ADD_16:
  630. case BUILT_IN_ATOMIC_FETCH_SUB_1:
  631. case BUILT_IN_ATOMIC_FETCH_SUB_2:
  632. case BUILT_IN_ATOMIC_FETCH_SUB_4:
  633. case BUILT_IN_ATOMIC_FETCH_SUB_8:
  634. case BUILT_IN_ATOMIC_FETCH_SUB_16:
  635. case BUILT_IN_ATOMIC_FETCH_AND_1:
  636. case BUILT_IN_ATOMIC_FETCH_AND_2:
  637. case BUILT_IN_ATOMIC_FETCH_AND_4:
  638. case BUILT_IN_ATOMIC_FETCH_AND_8:
  639. case BUILT_IN_ATOMIC_FETCH_AND_16:
  640. case BUILT_IN_ATOMIC_FETCH_NAND_1:
  641. case BUILT_IN_ATOMIC_FETCH_NAND_2:
  642. case BUILT_IN_ATOMIC_FETCH_NAND_4:
  643. case BUILT_IN_ATOMIC_FETCH_NAND_8:
  644. case BUILT_IN_ATOMIC_FETCH_NAND_16:
  645. case BUILT_IN_ATOMIC_FETCH_XOR_1:
  646. case BUILT_IN_ATOMIC_FETCH_XOR_2:
  647. case BUILT_IN_ATOMIC_FETCH_XOR_4:
  648. case BUILT_IN_ATOMIC_FETCH_XOR_8:
  649. case BUILT_IN_ATOMIC_FETCH_XOR_16:
  650. case BUILT_IN_ATOMIC_FETCH_OR_1:
  651. case BUILT_IN_ATOMIC_FETCH_OR_2:
  652. case BUILT_IN_ATOMIC_FETCH_OR_4:
  653. case BUILT_IN_ATOMIC_FETCH_OR_8:
  654. case BUILT_IN_ATOMIC_FETCH_OR_16:
  655. {
  656. dest = gimple_call_arg (call, 0);
  657. /* DEST represents the address of a memory location.
  658. instrument_derefs wants the memory location, so lets
  659. dereference the address DEST before handing it to
  660. instrument_derefs. */
  661. if (TREE_CODE (dest) == ADDR_EXPR)
  662. dest = TREE_OPERAND (dest, 0);
  663. else if (TREE_CODE (dest) == SSA_NAME || TREE_CODE (dest) == INTEGER_CST)
  664. dest = build2 (MEM_REF, TREE_TYPE (TREE_TYPE (dest)),
  665. dest, build_int_cst (TREE_TYPE (dest), 0));
  666. else
  667. gcc_unreachable ();
  668. access_size = int_size_in_bytes (TREE_TYPE (dest));
  669. }
  670. default:
  671. /* The other builtins memory access are not instrumented in this
  672. function because they either don't have any length parameter,
  673. or their length parameter is just a limit. */
  674. break;
  675. }
  676. if (len != NULL_TREE)
  677. {
  678. if (source0 != NULL_TREE)
  679. {
  680. src0->start = source0;
  681. src0->access_size = access_size;
  682. *src0_len = len;
  683. *src0_is_store = false;
  684. }
  685. if (source1 != NULL_TREE)
  686. {
  687. src1->start = source1;
  688. src1->access_size = access_size;
  689. *src1_len = len;
  690. *src1_is_store = false;
  691. }
  692. if (dest != NULL_TREE)
  693. {
  694. dst->start = dest;
  695. dst->access_size = access_size;
  696. *dst_len = len;
  697. *dst_is_store = true;
  698. }
  699. got_reference_p = true;
  700. }
  701. else if (dest)
  702. {
  703. dst->start = dest;
  704. dst->access_size = access_size;
  705. *dst_len = NULL_TREE;
  706. *dst_is_store = is_store;
  707. *dest_is_deref = true;
  708. got_reference_p = true;
  709. }
  710. return got_reference_p;
  711. }
  712. /* Return true iff a given gimple statement has been instrumented.
  713. Note that the statement is "defined" by the memory references it
  714. contains. */
  715. static bool
  716. has_stmt_been_instrumented_p (gimple stmt)
  717. {
  718. if (gimple_assign_single_p (stmt))
  719. {
  720. bool r_is_store;
  721. asan_mem_ref r;
  722. asan_mem_ref_init (&r, NULL, 1);
  723. if (get_mem_ref_of_assignment (as_a <gassign *> (stmt), &r,
  724. &r_is_store))
  725. return has_mem_ref_been_instrumented (&r);
  726. }
  727. else if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
  728. {
  729. asan_mem_ref src0, src1, dest;
  730. asan_mem_ref_init (&src0, NULL, 1);
  731. asan_mem_ref_init (&src1, NULL, 1);
  732. asan_mem_ref_init (&dest, NULL, 1);
  733. tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
  734. bool src0_is_store = false, src1_is_store = false,
  735. dest_is_store = false, dest_is_deref = false, intercepted_p = true;
  736. if (get_mem_refs_of_builtin_call (as_a <gcall *> (stmt),
  737. &src0, &src0_len, &src0_is_store,
  738. &src1, &src1_len, &src1_is_store,
  739. &dest, &dest_len, &dest_is_store,
  740. &dest_is_deref, &intercepted_p))
  741. {
  742. if (src0.start != NULL_TREE
  743. && !has_mem_ref_been_instrumented (&src0, src0_len))
  744. return false;
  745. if (src1.start != NULL_TREE
  746. && !has_mem_ref_been_instrumented (&src1, src1_len))
  747. return false;
  748. if (dest.start != NULL_TREE
  749. && !has_mem_ref_been_instrumented (&dest, dest_len))
  750. return false;
  751. return true;
  752. }
  753. }
  754. return false;
  755. }
  756. /* Insert a memory reference into the hash table. */
  757. static void
  758. update_mem_ref_hash_table (tree ref, HOST_WIDE_INT access_size)
  759. {
  760. hash_table<asan_mem_ref_hasher> *ht = get_mem_ref_hash_table ();
  761. asan_mem_ref r;
  762. asan_mem_ref_init (&r, ref, access_size);
  763. asan_mem_ref **slot = ht->find_slot (&r, INSERT);
  764. if (*slot == NULL || (*slot)->access_size < access_size)
  765. *slot = asan_mem_ref_new (ref, access_size);
  766. }
  767. /* Initialize shadow_ptr_types array. */
  768. static void
  769. asan_init_shadow_ptr_types (void)
  770. {
  771. asan_shadow_set = new_alias_set ();
  772. shadow_ptr_types[0] = build_distinct_type_copy (signed_char_type_node);
  773. TYPE_ALIAS_SET (shadow_ptr_types[0]) = asan_shadow_set;
  774. shadow_ptr_types[0] = build_pointer_type (shadow_ptr_types[0]);
  775. shadow_ptr_types[1] = build_distinct_type_copy (short_integer_type_node);
  776. TYPE_ALIAS_SET (shadow_ptr_types[1]) = asan_shadow_set;
  777. shadow_ptr_types[1] = build_pointer_type (shadow_ptr_types[1]);
  778. initialize_sanitizer_builtins ();
  779. }
  780. /* Create ADDR_EXPR of STRING_CST with the PP pretty printer text. */
  781. static tree
  782. asan_pp_string (pretty_printer *pp)
  783. {
  784. const char *buf = pp_formatted_text (pp);
  785. size_t len = strlen (buf);
  786. tree ret = build_string (len + 1, buf);
  787. TREE_TYPE (ret)
  788. = build_array_type (TREE_TYPE (shadow_ptr_types[0]),
  789. build_index_type (size_int (len)));
  790. TREE_READONLY (ret) = 1;
  791. TREE_STATIC (ret) = 1;
  792. return build1 (ADDR_EXPR, shadow_ptr_types[0], ret);
  793. }
  794. /* Return a CONST_INT representing 4 subsequent shadow memory bytes. */
  795. static rtx
  796. asan_shadow_cst (unsigned char shadow_bytes[4])
  797. {
  798. int i;
  799. unsigned HOST_WIDE_INT val = 0;
  800. gcc_assert (WORDS_BIG_ENDIAN == BYTES_BIG_ENDIAN);
  801. for (i = 0; i < 4; i++)
  802. val |= (unsigned HOST_WIDE_INT) shadow_bytes[BYTES_BIG_ENDIAN ? 3 - i : i]
  803. << (BITS_PER_UNIT * i);
  804. return gen_int_mode (val, SImode);
  805. }
  806. /* Clear shadow memory at SHADOW_MEM, LEN bytes. Can't call a library call here
  807. though. */
  808. static void
  809. asan_clear_shadow (rtx shadow_mem, HOST_WIDE_INT len)
  810. {
  811. rtx_insn *insn, *insns, *jump;
  812. rtx_code_label *top_label;
  813. rtx end, addr, tmp;
  814. start_sequence ();
  815. clear_storage (shadow_mem, GEN_INT (len), BLOCK_OP_NORMAL);
  816. insns = get_insns ();
  817. end_sequence ();
  818. for (insn = insns; insn; insn = NEXT_INSN (insn))
  819. if (CALL_P (insn))
  820. break;
  821. if (insn == NULL_RTX)
  822. {
  823. emit_insn (insns);
  824. return;
  825. }
  826. gcc_assert ((len & 3) == 0);
  827. top_label = gen_label_rtx ();
  828. addr = copy_to_mode_reg (Pmode, XEXP (shadow_mem, 0));
  829. shadow_mem = adjust_automodify_address (shadow_mem, SImode, addr, 0);
  830. end = force_reg (Pmode, plus_constant (Pmode, addr, len));
  831. emit_label (top_label);
  832. emit_move_insn (shadow_mem, const0_rtx);
  833. tmp = expand_simple_binop (Pmode, PLUS, addr, gen_int_mode (4, Pmode), addr,
  834. true, OPTAB_LIB_WIDEN);
  835. if (tmp != addr)
  836. emit_move_insn (addr, tmp);
  837. emit_cmp_and_jump_insns (addr, end, LT, NULL_RTX, Pmode, true, top_label);
  838. jump = get_last_insn ();
  839. gcc_assert (JUMP_P (jump));
  840. add_int_reg_note (jump, REG_BR_PROB, REG_BR_PROB_BASE * 80 / 100);
  841. }
  842. void
  843. asan_function_start (void)
  844. {
  845. section *fnsec = function_section (current_function_decl);
  846. switch_to_section (fnsec);
  847. ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LASANPC",
  848. current_function_funcdef_no);
  849. }
  850. /* Insert code to protect stack vars. The prologue sequence should be emitted
  851. directly, epilogue sequence returned. BASE is the register holding the
  852. stack base, against which OFFSETS array offsets are relative to, OFFSETS
  853. array contains pairs of offsets in reverse order, always the end offset
  854. of some gap that needs protection followed by starting offset,
  855. and DECLS is an array of representative decls for each var partition.
  856. LENGTH is the length of the OFFSETS array, DECLS array is LENGTH / 2 - 1
  857. elements long (OFFSETS include gap before the first variable as well
  858. as gaps after each stack variable). PBASE is, if non-NULL, some pseudo
  859. register which stack vars DECL_RTLs are based on. Either BASE should be
  860. assigned to PBASE, when not doing use after return protection, or
  861. corresponding address based on __asan_stack_malloc* return value. */
  862. rtx_insn *
  863. asan_emit_stack_protection (rtx base, rtx pbase, unsigned int alignb,
  864. HOST_WIDE_INT *offsets, tree *decls, int length)
  865. {
  866. rtx shadow_base, shadow_mem, ret, mem, orig_base;
  867. rtx_code_label *lab;
  868. rtx_insn *insns;
  869. char buf[30];
  870. unsigned char shadow_bytes[4];
  871. HOST_WIDE_INT base_offset = offsets[length - 1];
  872. HOST_WIDE_INT base_align_bias = 0, offset, prev_offset;
  873. HOST_WIDE_INT asan_frame_size = offsets[0] - base_offset;
  874. HOST_WIDE_INT last_offset, last_size;
  875. int l;
  876. unsigned char cur_shadow_byte = ASAN_STACK_MAGIC_LEFT;
  877. tree str_cst, decl, id;
  878. int use_after_return_class = -1;
  879. if (shadow_ptr_types[0] == NULL_TREE)
  880. asan_init_shadow_ptr_types ();
  881. /* First of all, prepare the description string. */
  882. pretty_printer asan_pp;
  883. pp_decimal_int (&asan_pp, length / 2 - 1);
  884. pp_space (&asan_pp);
  885. for (l = length - 2; l; l -= 2)
  886. {
  887. tree decl = decls[l / 2 - 1];
  888. pp_wide_integer (&asan_pp, offsets[l] - base_offset);
  889. pp_space (&asan_pp);
  890. pp_wide_integer (&asan_pp, offsets[l - 1] - offsets[l]);
  891. pp_space (&asan_pp);
  892. if (DECL_P (decl) && DECL_NAME (decl))
  893. {
  894. pp_decimal_int (&asan_pp, IDENTIFIER_LENGTH (DECL_NAME (decl)));
  895. pp_space (&asan_pp);
  896. pp_tree_identifier (&asan_pp, DECL_NAME (decl));
  897. }
  898. else
  899. pp_string (&asan_pp, "9 <unknown>");
  900. pp_space (&asan_pp);
  901. }
  902. str_cst = asan_pp_string (&asan_pp);
  903. /* Emit the prologue sequence. */
  904. if (asan_frame_size > 32 && asan_frame_size <= 65536 && pbase
  905. && ASAN_USE_AFTER_RETURN)
  906. {
  907. use_after_return_class = floor_log2 (asan_frame_size - 1) - 5;
  908. /* __asan_stack_malloc_N guarantees alignment
  909. N < 6 ? (64 << N) : 4096 bytes. */
  910. if (alignb > (use_after_return_class < 6
  911. ? (64U << use_after_return_class) : 4096U))
  912. use_after_return_class = -1;
  913. else if (alignb > ASAN_RED_ZONE_SIZE && (asan_frame_size & (alignb - 1)))
  914. base_align_bias = ((asan_frame_size + alignb - 1)
  915. & ~(alignb - HOST_WIDE_INT_1)) - asan_frame_size;
  916. }
  917. /* Align base if target is STRICT_ALIGNMENT. */
  918. if (STRICT_ALIGNMENT)
  919. base = expand_binop (Pmode, and_optab, base,
  920. gen_int_mode (-((GET_MODE_ALIGNMENT (SImode)
  921. << ASAN_SHADOW_SHIFT)
  922. / BITS_PER_UNIT), Pmode), NULL_RTX,
  923. 1, OPTAB_DIRECT);
  924. if (use_after_return_class == -1 && pbase)
  925. emit_move_insn (pbase, base);
  926. base = expand_binop (Pmode, add_optab, base,
  927. gen_int_mode (base_offset - base_align_bias, Pmode),
  928. NULL_RTX, 1, OPTAB_DIRECT);
  929. orig_base = NULL_RTX;
  930. if (use_after_return_class != -1)
  931. {
  932. if (asan_detect_stack_use_after_return == NULL_TREE)
  933. {
  934. id = get_identifier ("__asan_option_detect_stack_use_after_return");
  935. decl = build_decl (BUILTINS_LOCATION, VAR_DECL, id,
  936. integer_type_node);
  937. SET_DECL_ASSEMBLER_NAME (decl, id);
  938. TREE_ADDRESSABLE (decl) = 1;
  939. DECL_ARTIFICIAL (decl) = 1;
  940. DECL_IGNORED_P (decl) = 1;
  941. DECL_EXTERNAL (decl) = 1;
  942. TREE_STATIC (decl) = 1;
  943. TREE_PUBLIC (decl) = 1;
  944. TREE_USED (decl) = 1;
  945. asan_detect_stack_use_after_return = decl;
  946. }
  947. orig_base = gen_reg_rtx (Pmode);
  948. emit_move_insn (orig_base, base);
  949. ret = expand_normal (asan_detect_stack_use_after_return);
  950. lab = gen_label_rtx ();
  951. int very_likely = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
  952. emit_cmp_and_jump_insns (ret, const0_rtx, EQ, NULL_RTX,
  953. VOIDmode, 0, lab, very_likely);
  954. snprintf (buf, sizeof buf, "__asan_stack_malloc_%d",
  955. use_after_return_class);
  956. ret = init_one_libfunc (buf);
  957. rtx addr = convert_memory_address (ptr_mode, base);
  958. ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode, 2,
  959. GEN_INT (asan_frame_size
  960. + base_align_bias),
  961. TYPE_MODE (pointer_sized_int_node),
  962. addr, ptr_mode);
  963. ret = convert_memory_address (Pmode, ret);
  964. emit_move_insn (base, ret);
  965. emit_label (lab);
  966. emit_move_insn (pbase, expand_binop (Pmode, add_optab, base,
  967. gen_int_mode (base_align_bias
  968. - base_offset, Pmode),
  969. NULL_RTX, 1, OPTAB_DIRECT));
  970. }
  971. mem = gen_rtx_MEM (ptr_mode, base);
  972. mem = adjust_address (mem, VOIDmode, base_align_bias);
  973. emit_move_insn (mem, gen_int_mode (ASAN_STACK_FRAME_MAGIC, ptr_mode));
  974. mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
  975. emit_move_insn (mem, expand_normal (str_cst));
  976. mem = adjust_address (mem, VOIDmode, GET_MODE_SIZE (ptr_mode));
  977. ASM_GENERATE_INTERNAL_LABEL (buf, "LASANPC", current_function_funcdef_no);
  978. id = get_identifier (buf);
  979. decl = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
  980. VAR_DECL, id, char_type_node);
  981. SET_DECL_ASSEMBLER_NAME (decl, id);
  982. TREE_ADDRESSABLE (decl) = 1;
  983. TREE_READONLY (decl) = 1;
  984. DECL_ARTIFICIAL (decl) = 1;
  985. DECL_IGNORED_P (decl) = 1;
  986. TREE_STATIC (decl) = 1;
  987. TREE_PUBLIC (decl) = 0;
  988. TREE_USED (decl) = 1;
  989. DECL_INITIAL (decl) = decl;
  990. TREE_ASM_WRITTEN (decl) = 1;
  991. TREE_ASM_WRITTEN (id) = 1;
  992. emit_move_insn (mem, expand_normal (build_fold_addr_expr (decl)));
  993. shadow_base = expand_binop (Pmode, lshr_optab, base,
  994. GEN_INT (ASAN_SHADOW_SHIFT),
  995. NULL_RTX, 1, OPTAB_DIRECT);
  996. shadow_base
  997. = plus_constant (Pmode, shadow_base,
  998. asan_shadow_offset ()
  999. + (base_align_bias >> ASAN_SHADOW_SHIFT));
  1000. gcc_assert (asan_shadow_set != -1
  1001. && (ASAN_RED_ZONE_SIZE >> ASAN_SHADOW_SHIFT) == 4);
  1002. shadow_mem = gen_rtx_MEM (SImode, shadow_base);
  1003. set_mem_alias_set (shadow_mem, asan_shadow_set);
  1004. if (STRICT_ALIGNMENT)
  1005. set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
  1006. prev_offset = base_offset;
  1007. for (l = length; l; l -= 2)
  1008. {
  1009. if (l == 2)
  1010. cur_shadow_byte = ASAN_STACK_MAGIC_RIGHT;
  1011. offset = offsets[l - 1];
  1012. if ((offset - base_offset) & (ASAN_RED_ZONE_SIZE - 1))
  1013. {
  1014. int i;
  1015. HOST_WIDE_INT aoff
  1016. = base_offset + ((offset - base_offset)
  1017. & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
  1018. shadow_mem = adjust_address (shadow_mem, VOIDmode,
  1019. (aoff - prev_offset)
  1020. >> ASAN_SHADOW_SHIFT);
  1021. prev_offset = aoff;
  1022. for (i = 0; i < 4; i++, aoff += (1 << ASAN_SHADOW_SHIFT))
  1023. if (aoff < offset)
  1024. {
  1025. if (aoff < offset - (1 << ASAN_SHADOW_SHIFT) + 1)
  1026. shadow_bytes[i] = 0;
  1027. else
  1028. shadow_bytes[i] = offset - aoff;
  1029. }
  1030. else
  1031. shadow_bytes[i] = ASAN_STACK_MAGIC_PARTIAL;
  1032. emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
  1033. offset = aoff;
  1034. }
  1035. while (offset <= offsets[l - 2] - ASAN_RED_ZONE_SIZE)
  1036. {
  1037. shadow_mem = adjust_address (shadow_mem, VOIDmode,
  1038. (offset - prev_offset)
  1039. >> ASAN_SHADOW_SHIFT);
  1040. prev_offset = offset;
  1041. memset (shadow_bytes, cur_shadow_byte, 4);
  1042. emit_move_insn (shadow_mem, asan_shadow_cst (shadow_bytes));
  1043. offset += ASAN_RED_ZONE_SIZE;
  1044. }
  1045. cur_shadow_byte = ASAN_STACK_MAGIC_MIDDLE;
  1046. }
  1047. do_pending_stack_adjust ();
  1048. /* Construct epilogue sequence. */
  1049. start_sequence ();
  1050. lab = NULL;
  1051. if (use_after_return_class != -1)
  1052. {
  1053. rtx_code_label *lab2 = gen_label_rtx ();
  1054. char c = (char) ASAN_STACK_MAGIC_USE_AFTER_RET;
  1055. int very_likely = REG_BR_PROB_BASE - (REG_BR_PROB_BASE / 2000 - 1);
  1056. emit_cmp_and_jump_insns (orig_base, base, EQ, NULL_RTX,
  1057. VOIDmode, 0, lab2, very_likely);
  1058. shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
  1059. set_mem_alias_set (shadow_mem, asan_shadow_set);
  1060. mem = gen_rtx_MEM (ptr_mode, base);
  1061. mem = adjust_address (mem, VOIDmode, base_align_bias);
  1062. emit_move_insn (mem, gen_int_mode (ASAN_STACK_RETIRED_MAGIC, ptr_mode));
  1063. unsigned HOST_WIDE_INT sz = asan_frame_size >> ASAN_SHADOW_SHIFT;
  1064. if (use_after_return_class < 5
  1065. && can_store_by_pieces (sz, builtin_memset_read_str, &c,
  1066. BITS_PER_UNIT, true))
  1067. store_by_pieces (shadow_mem, sz, builtin_memset_read_str, &c,
  1068. BITS_PER_UNIT, true, 0);
  1069. else if (use_after_return_class >= 5
  1070. || !set_storage_via_setmem (shadow_mem,
  1071. GEN_INT (sz),
  1072. gen_int_mode (c, QImode),
  1073. BITS_PER_UNIT, BITS_PER_UNIT,
  1074. -1, sz, sz, sz))
  1075. {
  1076. snprintf (buf, sizeof buf, "__asan_stack_free_%d",
  1077. use_after_return_class);
  1078. ret = init_one_libfunc (buf);
  1079. rtx addr = convert_memory_address (ptr_mode, base);
  1080. rtx orig_addr = convert_memory_address (ptr_mode, orig_base);
  1081. emit_library_call (ret, LCT_NORMAL, ptr_mode, 3, addr, ptr_mode,
  1082. GEN_INT (asan_frame_size + base_align_bias),
  1083. TYPE_MODE (pointer_sized_int_node),
  1084. orig_addr, ptr_mode);
  1085. }
  1086. lab = gen_label_rtx ();
  1087. emit_jump (lab);
  1088. emit_label (lab2);
  1089. }
  1090. shadow_mem = gen_rtx_MEM (BLKmode, shadow_base);
  1091. set_mem_alias_set (shadow_mem, asan_shadow_set);
  1092. if (STRICT_ALIGNMENT)
  1093. set_mem_align (shadow_mem, (GET_MODE_ALIGNMENT (SImode)));
  1094. prev_offset = base_offset;
  1095. last_offset = base_offset;
  1096. last_size = 0;
  1097. for (l = length; l; l -= 2)
  1098. {
  1099. offset = base_offset + ((offsets[l - 1] - base_offset)
  1100. & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1));
  1101. if (last_offset + last_size != offset)
  1102. {
  1103. shadow_mem = adjust_address (shadow_mem, VOIDmode,
  1104. (last_offset - prev_offset)
  1105. >> ASAN_SHADOW_SHIFT);
  1106. prev_offset = last_offset;
  1107. asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
  1108. last_offset = offset;
  1109. last_size = 0;
  1110. }
  1111. last_size += base_offset + ((offsets[l - 2] - base_offset)
  1112. & ~(ASAN_RED_ZONE_SIZE - HOST_WIDE_INT_1))
  1113. - offset;
  1114. }
  1115. if (last_size)
  1116. {
  1117. shadow_mem = adjust_address (shadow_mem, VOIDmode,
  1118. (last_offset - prev_offset)
  1119. >> ASAN_SHADOW_SHIFT);
  1120. asan_clear_shadow (shadow_mem, last_size >> ASAN_SHADOW_SHIFT);
  1121. }
  1122. do_pending_stack_adjust ();
  1123. if (lab)
  1124. emit_label (lab);
  1125. insns = get_insns ();
  1126. end_sequence ();
  1127. return insns;
  1128. }
  1129. /* Return true if DECL, a global var, might be overridden and needs
  1130. therefore a local alias. */
  1131. static bool
  1132. asan_needs_local_alias (tree decl)
  1133. {
  1134. return DECL_WEAK (decl) || !targetm.binds_local_p (decl);
  1135. }
  1136. /* Return true if DECL is a VAR_DECL that should be protected
  1137. by Address Sanitizer, by appending a red zone with protected
  1138. shadow memory after it and aligning it to at least
  1139. ASAN_RED_ZONE_SIZE bytes. */
  1140. bool
  1141. asan_protect_global (tree decl)
  1142. {
  1143. if (!ASAN_GLOBALS)
  1144. return false;
  1145. rtx rtl, symbol;
  1146. if (TREE_CODE (decl) == STRING_CST)
  1147. {
  1148. /* Instrument all STRING_CSTs except those created
  1149. by asan_pp_string here. */
  1150. if (shadow_ptr_types[0] != NULL_TREE
  1151. && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
  1152. && TREE_TYPE (TREE_TYPE (decl)) == TREE_TYPE (shadow_ptr_types[0]))
  1153. return false;
  1154. return true;
  1155. }
  1156. if (TREE_CODE (decl) != VAR_DECL
  1157. /* TLS vars aren't statically protectable. */
  1158. || DECL_THREAD_LOCAL_P (decl)
  1159. /* Externs will be protected elsewhere. */
  1160. || DECL_EXTERNAL (decl)
  1161. || !DECL_RTL_SET_P (decl)
  1162. /* Comdat vars pose an ABI problem, we can't know if
  1163. the var that is selected by the linker will have
  1164. padding or not. */
  1165. || DECL_ONE_ONLY (decl)
  1166. /* Similarly for common vars. People can use -fno-common.
  1167. Note: Linux kernel is built with -fno-common, so we do instrument
  1168. globals there even if it is C. */
  1169. || (DECL_COMMON (decl) && TREE_PUBLIC (decl))
  1170. /* Don't protect if using user section, often vars placed
  1171. into user section from multiple TUs are then assumed
  1172. to be an array of such vars, putting padding in there
  1173. breaks this assumption. */
  1174. || (DECL_SECTION_NAME (decl) != NULL
  1175. && !symtab_node::get (decl)->implicit_section)
  1176. || DECL_SIZE (decl) == 0
  1177. || ASAN_RED_ZONE_SIZE * BITS_PER_UNIT > MAX_OFILE_ALIGNMENT
  1178. || !valid_constant_size_p (DECL_SIZE_UNIT (decl))
  1179. || DECL_ALIGN_UNIT (decl) > 2 * ASAN_RED_ZONE_SIZE
  1180. || TREE_TYPE (decl) == ubsan_get_source_location_type ())
  1181. return false;
  1182. rtl = DECL_RTL (decl);
  1183. if (!MEM_P (rtl) || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF)
  1184. return false;
  1185. symbol = XEXP (rtl, 0);
  1186. if (CONSTANT_POOL_ADDRESS_P (symbol)
  1187. || TREE_CONSTANT_POOL_ADDRESS_P (symbol))
  1188. return false;
  1189. if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl)))
  1190. return false;
  1191. #ifndef ASM_OUTPUT_DEF
  1192. if (asan_needs_local_alias (decl))
  1193. return false;
  1194. #endif
  1195. return true;
  1196. }
  1197. /* Construct a function tree for __asan_report_{load,store}{1,2,4,8,16,_n}.
  1198. IS_STORE is either 1 (for a store) or 0 (for a load). */
  1199. static tree
  1200. report_error_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
  1201. int *nargs)
  1202. {
  1203. static enum built_in_function report[2][2][6]
  1204. = { { { BUILT_IN_ASAN_REPORT_LOAD1, BUILT_IN_ASAN_REPORT_LOAD2,
  1205. BUILT_IN_ASAN_REPORT_LOAD4, BUILT_IN_ASAN_REPORT_LOAD8,
  1206. BUILT_IN_ASAN_REPORT_LOAD16, BUILT_IN_ASAN_REPORT_LOAD_N },
  1207. { BUILT_IN_ASAN_REPORT_STORE1, BUILT_IN_ASAN_REPORT_STORE2,
  1208. BUILT_IN_ASAN_REPORT_STORE4, BUILT_IN_ASAN_REPORT_STORE8,
  1209. BUILT_IN_ASAN_REPORT_STORE16, BUILT_IN_ASAN_REPORT_STORE_N } },
  1210. { { BUILT_IN_ASAN_REPORT_LOAD1_NOABORT,
  1211. BUILT_IN_ASAN_REPORT_LOAD2_NOABORT,
  1212. BUILT_IN_ASAN_REPORT_LOAD4_NOABORT,
  1213. BUILT_IN_ASAN_REPORT_LOAD8_NOABORT,
  1214. BUILT_IN_ASAN_REPORT_LOAD16_NOABORT,
  1215. BUILT_IN_ASAN_REPORT_LOAD_N_NOABORT },
  1216. { BUILT_IN_ASAN_REPORT_STORE1_NOABORT,
  1217. BUILT_IN_ASAN_REPORT_STORE2_NOABORT,
  1218. BUILT_IN_ASAN_REPORT_STORE4_NOABORT,
  1219. BUILT_IN_ASAN_REPORT_STORE8_NOABORT,
  1220. BUILT_IN_ASAN_REPORT_STORE16_NOABORT,
  1221. BUILT_IN_ASAN_REPORT_STORE_N_NOABORT } } };
  1222. if (size_in_bytes == -1)
  1223. {
  1224. *nargs = 2;
  1225. return builtin_decl_implicit (report[recover_p][is_store][5]);
  1226. }
  1227. *nargs = 1;
  1228. int size_log2 = exact_log2 (size_in_bytes);
  1229. return builtin_decl_implicit (report[recover_p][is_store][size_log2]);
  1230. }
  1231. /* Construct a function tree for __asan_{load,store}{1,2,4,8,16,_n}.
  1232. IS_STORE is either 1 (for a store) or 0 (for a load). */
  1233. static tree
  1234. check_func (bool is_store, bool recover_p, HOST_WIDE_INT size_in_bytes,
  1235. int *nargs)
  1236. {
  1237. static enum built_in_function check[2][2][6]
  1238. = { { { BUILT_IN_ASAN_LOAD1, BUILT_IN_ASAN_LOAD2,
  1239. BUILT_IN_ASAN_LOAD4, BUILT_IN_ASAN_LOAD8,
  1240. BUILT_IN_ASAN_LOAD16, BUILT_IN_ASAN_LOADN },
  1241. { BUILT_IN_ASAN_STORE1, BUILT_IN_ASAN_STORE2,
  1242. BUILT_IN_ASAN_STORE4, BUILT_IN_ASAN_STORE8,
  1243. BUILT_IN_ASAN_STORE16, BUILT_IN_ASAN_STOREN } },
  1244. { { BUILT_IN_ASAN_LOAD1_NOABORT,
  1245. BUILT_IN_ASAN_LOAD2_NOABORT,
  1246. BUILT_IN_ASAN_LOAD4_NOABORT,
  1247. BUILT_IN_ASAN_LOAD8_NOABORT,
  1248. BUILT_IN_ASAN_LOAD16_NOABORT,
  1249. BUILT_IN_ASAN_LOADN_NOABORT },
  1250. { BUILT_IN_ASAN_STORE1_NOABORT,
  1251. BUILT_IN_ASAN_STORE2_NOABORT,
  1252. BUILT_IN_ASAN_STORE4_NOABORT,
  1253. BUILT_IN_ASAN_STORE8_NOABORT,
  1254. BUILT_IN_ASAN_STORE16_NOABORT,
  1255. BUILT_IN_ASAN_STOREN_NOABORT } } };
  1256. if (size_in_bytes == -1)
  1257. {
  1258. *nargs = 2;
  1259. return builtin_decl_implicit (check[recover_p][is_store][5]);
  1260. }
  1261. *nargs = 1;
  1262. int size_log2 = exact_log2 (size_in_bytes);
  1263. return builtin_decl_implicit (check[recover_p][is_store][size_log2]);
  1264. }
  1265. /* Split the current basic block and create a condition statement
  1266. insertion point right before or after the statement pointed to by
  1267. ITER. Return an iterator to the point at which the caller might
  1268. safely insert the condition statement.
  1269. THEN_BLOCK must be set to the address of an uninitialized instance
  1270. of basic_block. The function will then set *THEN_BLOCK to the
  1271. 'then block' of the condition statement to be inserted by the
  1272. caller.
  1273. If CREATE_THEN_FALLTHRU_EDGE is false, no edge will be created from
  1274. *THEN_BLOCK to *FALLTHROUGH_BLOCK.
  1275. Similarly, the function will set *FALLTRHOUGH_BLOCK to the 'else
  1276. block' of the condition statement to be inserted by the caller.
  1277. Note that *FALLTHROUGH_BLOCK is a new block that contains the
  1278. statements starting from *ITER, and *THEN_BLOCK is a new empty
  1279. block.
  1280. *ITER is adjusted to point to always point to the first statement
  1281. of the basic block * FALLTHROUGH_BLOCK. That statement is the
  1282. same as what ITER was pointing to prior to calling this function,
  1283. if BEFORE_P is true; otherwise, it is its following statement. */
  1284. gimple_stmt_iterator
  1285. create_cond_insert_point (gimple_stmt_iterator *iter,
  1286. bool before_p,
  1287. bool then_more_likely_p,
  1288. bool create_then_fallthru_edge,
  1289. basic_block *then_block,
  1290. basic_block *fallthrough_block)
  1291. {
  1292. gimple_stmt_iterator gsi = *iter;
  1293. if (!gsi_end_p (gsi) && before_p)
  1294. gsi_prev (&gsi);
  1295. basic_block cur_bb = gsi_bb (*iter);
  1296. edge e = split_block (cur_bb, gsi_stmt (gsi));
  1297. /* Get a hold on the 'condition block', the 'then block' and the
  1298. 'else block'. */
  1299. basic_block cond_bb = e->src;
  1300. basic_block fallthru_bb = e->dest;
  1301. basic_block then_bb = create_empty_bb (cond_bb);
  1302. if (current_loops)
  1303. {
  1304. add_bb_to_loop (then_bb, cond_bb->loop_father);
  1305. loops_state_set (LOOPS_NEED_FIXUP);
  1306. }
  1307. /* Set up the newly created 'then block'. */
  1308. e = make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
  1309. int fallthrough_probability
  1310. = then_more_likely_p
  1311. ? PROB_VERY_UNLIKELY
  1312. : PROB_ALWAYS - PROB_VERY_UNLIKELY;
  1313. e->probability = PROB_ALWAYS - fallthrough_probability;
  1314. if (create_then_fallthru_edge)
  1315. make_single_succ_edge (then_bb, fallthru_bb, EDGE_FALLTHRU);
  1316. /* Set up the fallthrough basic block. */
  1317. e = find_edge (cond_bb, fallthru_bb);
  1318. e->flags = EDGE_FALSE_VALUE;
  1319. e->count = cond_bb->count;
  1320. e->probability = fallthrough_probability;
  1321. /* Update dominance info for the newly created then_bb; note that
  1322. fallthru_bb's dominance info has already been updated by
  1323. split_bock. */
  1324. if (dom_info_available_p (CDI_DOMINATORS))
  1325. set_immediate_dominator (CDI_DOMINATORS, then_bb, cond_bb);
  1326. *then_block = then_bb;
  1327. *fallthrough_block = fallthru_bb;
  1328. *iter = gsi_start_bb (fallthru_bb);
  1329. return gsi_last_bb (cond_bb);
  1330. }
  1331. /* Insert an if condition followed by a 'then block' right before the
  1332. statement pointed to by ITER. The fallthrough block -- which is the
  1333. else block of the condition as well as the destination of the
  1334. outcoming edge of the 'then block' -- starts with the statement
  1335. pointed to by ITER.
  1336. COND is the condition of the if.
  1337. If THEN_MORE_LIKELY_P is true, the probability of the edge to the
  1338. 'then block' is higher than the probability of the edge to the
  1339. fallthrough block.
  1340. Upon completion of the function, *THEN_BB is set to the newly
  1341. inserted 'then block' and similarly, *FALLTHROUGH_BB is set to the
  1342. fallthrough block.
  1343. *ITER is adjusted to still point to the same statement it was
  1344. pointing to initially. */
  1345. static void
  1346. insert_if_then_before_iter (gcond *cond,
  1347. gimple_stmt_iterator *iter,
  1348. bool then_more_likely_p,
  1349. basic_block *then_bb,
  1350. basic_block *fallthrough_bb)
  1351. {
  1352. gimple_stmt_iterator cond_insert_point =
  1353. create_cond_insert_point (iter,
  1354. /*before_p=*/true,
  1355. then_more_likely_p,
  1356. /*create_then_fallthru_edge=*/true,
  1357. then_bb,
  1358. fallthrough_bb);
  1359. gsi_insert_after (&cond_insert_point, cond, GSI_NEW_STMT);
  1360. }
  1361. /* Build
  1362. (base_addr >> ASAN_SHADOW_SHIFT) + asan_shadow_offset (). */
  1363. static tree
  1364. build_shadow_mem_access (gimple_stmt_iterator *gsi, location_t location,
  1365. tree base_addr, tree shadow_ptr_type)
  1366. {
  1367. tree t, uintptr_type = TREE_TYPE (base_addr);
  1368. tree shadow_type = TREE_TYPE (shadow_ptr_type);
  1369. gimple g;
  1370. t = build_int_cst (uintptr_type, ASAN_SHADOW_SHIFT);
  1371. g = gimple_build_assign (make_ssa_name (uintptr_type), RSHIFT_EXPR,
  1372. base_addr, t);
  1373. gimple_set_location (g, location);
  1374. gsi_insert_after (gsi, g, GSI_NEW_STMT);
  1375. t = build_int_cst (uintptr_type, asan_shadow_offset ());
  1376. g = gimple_build_assign (make_ssa_name (uintptr_type), PLUS_EXPR,
  1377. gimple_assign_lhs (g), t);
  1378. gimple_set_location (g, location);
  1379. gsi_insert_after (gsi, g, GSI_NEW_STMT);
  1380. g = gimple_build_assign (make_ssa_name (shadow_ptr_type), NOP_EXPR,
  1381. gimple_assign_lhs (g));
  1382. gimple_set_location (g, location);
  1383. gsi_insert_after (gsi, g, GSI_NEW_STMT);
  1384. t = build2 (MEM_REF, shadow_type, gimple_assign_lhs (g),
  1385. build_int_cst (shadow_ptr_type, 0));
  1386. g = gimple_build_assign (make_ssa_name (shadow_type), MEM_REF, t);
  1387. gimple_set_location (g, location);
  1388. gsi_insert_after (gsi, g, GSI_NEW_STMT);
  1389. return gimple_assign_lhs (g);
  1390. }
  1391. /* BASE can already be an SSA_NAME; in that case, do not create a
  1392. new SSA_NAME for it. */
  1393. static tree
  1394. maybe_create_ssa_name (location_t loc, tree base, gimple_stmt_iterator *iter,
  1395. bool before_p)
  1396. {
  1397. if (TREE_CODE (base) == SSA_NAME)
  1398. return base;
  1399. gimple g = gimple_build_assign (make_ssa_name (TREE_TYPE (base)),
  1400. TREE_CODE (base), base);
  1401. gimple_set_location (g, loc);
  1402. if (before_p)
  1403. gsi_insert_before (iter, g, GSI_SAME_STMT);
  1404. else
  1405. gsi_insert_after (iter, g, GSI_NEW_STMT);
  1406. return gimple_assign_lhs (g);
  1407. }
  1408. /* LEN can already have necessary size and precision;
  1409. in that case, do not create a new variable. */
  1410. tree
  1411. maybe_cast_to_ptrmode (location_t loc, tree len, gimple_stmt_iterator *iter,
  1412. bool before_p)
  1413. {
  1414. if (ptrofftype_p (len))
  1415. return len;
  1416. gimple g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
  1417. NOP_EXPR, len);
  1418. gimple_set_location (g, loc);
  1419. if (before_p)
  1420. gsi_insert_before (iter, g, GSI_SAME_STMT);
  1421. else
  1422. gsi_insert_after (iter, g, GSI_NEW_STMT);
  1423. return gimple_assign_lhs (g);
  1424. }
  1425. /* Instrument the memory access instruction BASE. Insert new
  1426. statements before or after ITER.
  1427. Note that the memory access represented by BASE can be either an
  1428. SSA_NAME, or a non-SSA expression. LOCATION is the source code
  1429. location. IS_STORE is TRUE for a store, FALSE for a load.
  1430. BEFORE_P is TRUE for inserting the instrumentation code before
  1431. ITER, FALSE for inserting it after ITER. IS_SCALAR_ACCESS is TRUE
  1432. for a scalar memory access and FALSE for memory region access.
  1433. NON_ZERO_P is TRUE if memory region is guaranteed to have non-zero
  1434. length. ALIGN tells alignment of accessed memory object.
  1435. START_INSTRUMENTED and END_INSTRUMENTED are TRUE if start/end of
  1436. memory region have already been instrumented.
  1437. If BEFORE_P is TRUE, *ITER is arranged to still point to the
  1438. statement it was pointing to prior to calling this function,
  1439. otherwise, it points to the statement logically following it. */
  1440. static void
  1441. build_check_stmt (location_t loc, tree base, tree len,
  1442. HOST_WIDE_INT size_in_bytes, gimple_stmt_iterator *iter,
  1443. bool is_non_zero_len, bool before_p, bool is_store,
  1444. bool is_scalar_access, unsigned int align = 0)
  1445. {
  1446. gimple_stmt_iterator gsi = *iter;
  1447. gimple g;
  1448. gcc_assert (!(size_in_bytes > 0 && !is_non_zero_len));
  1449. gsi = *iter;
  1450. base = unshare_expr (base);
  1451. base = maybe_create_ssa_name (loc, base, &gsi, before_p);
  1452. if (len)
  1453. {
  1454. len = unshare_expr (len);
  1455. len = maybe_cast_to_ptrmode (loc, len, iter, before_p);
  1456. }
  1457. else
  1458. {
  1459. gcc_assert (size_in_bytes != -1);
  1460. len = build_int_cst (pointer_sized_int_node, size_in_bytes);
  1461. }
  1462. if (size_in_bytes > 1)
  1463. {
  1464. if ((size_in_bytes & (size_in_bytes - 1)) != 0
  1465. || size_in_bytes > 16)
  1466. is_scalar_access = false;
  1467. else if (align && align < size_in_bytes * BITS_PER_UNIT)
  1468. {
  1469. /* On non-strict alignment targets, if
  1470. 16-byte access is just 8-byte aligned,
  1471. this will result in misaligned shadow
  1472. memory 2 byte load, but otherwise can
  1473. be handled using one read. */
  1474. if (size_in_bytes != 16
  1475. || STRICT_ALIGNMENT
  1476. || align < 8 * BITS_PER_UNIT)
  1477. is_scalar_access = false;
  1478. }
  1479. }
  1480. HOST_WIDE_INT flags = 0;
  1481. if (is_store)
  1482. flags |= ASAN_CHECK_STORE;
  1483. if (is_non_zero_len)
  1484. flags |= ASAN_CHECK_NON_ZERO_LEN;
  1485. if (is_scalar_access)
  1486. flags |= ASAN_CHECK_SCALAR_ACCESS;
  1487. g = gimple_build_call_internal (IFN_ASAN_CHECK, 4,
  1488. build_int_cst (integer_type_node, flags),
  1489. base, len,
  1490. build_int_cst (integer_type_node,
  1491. align / BITS_PER_UNIT));
  1492. gimple_set_location (g, loc);
  1493. if (before_p)
  1494. gsi_insert_before (&gsi, g, GSI_SAME_STMT);
  1495. else
  1496. {
  1497. gsi_insert_after (&gsi, g, GSI_NEW_STMT);
  1498. gsi_next (&gsi);
  1499. *iter = gsi;
  1500. }
  1501. }
  1502. /* If T represents a memory access, add instrumentation code before ITER.
  1503. LOCATION is source code location.
  1504. IS_STORE is either TRUE (for a store) or FALSE (for a load). */
  1505. static void
  1506. instrument_derefs (gimple_stmt_iterator *iter, tree t,
  1507. location_t location, bool is_store)
  1508. {
  1509. if (is_store && !ASAN_INSTRUMENT_WRITES)
  1510. return;
  1511. if (!is_store && !ASAN_INSTRUMENT_READS)
  1512. return;
  1513. tree type, base;
  1514. HOST_WIDE_INT size_in_bytes;
  1515. type = TREE_TYPE (t);
  1516. switch (TREE_CODE (t))
  1517. {
  1518. case ARRAY_REF:
  1519. case COMPONENT_REF:
  1520. case INDIRECT_REF:
  1521. case MEM_REF:
  1522. case VAR_DECL:
  1523. case BIT_FIELD_REF:
  1524. break;
  1525. /* FALLTHRU */
  1526. default:
  1527. return;
  1528. }
  1529. size_in_bytes = int_size_in_bytes (type);
  1530. if (size_in_bytes <= 0)
  1531. return;
  1532. HOST_WIDE_INT bitsize, bitpos;
  1533. tree offset;
  1534. machine_mode mode;
  1535. int volatilep = 0, unsignedp = 0;
  1536. tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset,
  1537. &mode, &unsignedp, &volatilep, false);
  1538. if (TREE_CODE (t) == COMPONENT_REF
  1539. && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)) != NULL_TREE)
  1540. {
  1541. tree repr = DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1));
  1542. instrument_derefs (iter, build3 (COMPONENT_REF, TREE_TYPE (repr),
  1543. TREE_OPERAND (t, 0), repr,
  1544. NULL_TREE), location, is_store);
  1545. return;
  1546. }
  1547. if (bitpos % BITS_PER_UNIT
  1548. || bitsize != size_in_bytes * BITS_PER_UNIT)
  1549. return;
  1550. if (TREE_CODE (inner) == VAR_DECL
  1551. && offset == NULL_TREE
  1552. && bitpos >= 0
  1553. && DECL_SIZE (inner)
  1554. && tree_fits_shwi_p (DECL_SIZE (inner))
  1555. && bitpos + bitsize <= tree_to_shwi (DECL_SIZE (inner)))
  1556. {
  1557. if (DECL_THREAD_LOCAL_P (inner))
  1558. return;
  1559. if (!ASAN_GLOBALS && is_global_var (inner))
  1560. return;
  1561. if (!TREE_STATIC (inner))
  1562. {
  1563. /* Automatic vars in the current function will be always
  1564. accessible. */
  1565. if (decl_function_context (inner) == current_function_decl)
  1566. return;
  1567. }
  1568. /* Always instrument external vars, they might be dynamically
  1569. initialized. */
  1570. else if (!DECL_EXTERNAL (inner))
  1571. {
  1572. /* For static vars if they are known not to be dynamically
  1573. initialized, they will be always accessible. */
  1574. varpool_node *vnode = varpool_node::get (inner);
  1575. if (vnode && !vnode->dynamically_initialized)
  1576. return;
  1577. }
  1578. }
  1579. base = build_fold_addr_expr (t);
  1580. if (!has_mem_ref_been_instrumented (base, size_in_bytes))
  1581. {
  1582. unsigned int align = get_object_alignment (t);
  1583. build_check_stmt (location, base, NULL_TREE, size_in_bytes, iter,
  1584. /*is_non_zero_len*/size_in_bytes > 0, /*before_p=*/true,
  1585. is_store, /*is_scalar_access*/true, align);
  1586. update_mem_ref_hash_table (base, size_in_bytes);
  1587. update_mem_ref_hash_table (t, size_in_bytes);
  1588. }
  1589. }
  1590. /* Insert a memory reference into the hash table if access length
  1591. can be determined in compile time. */
  1592. static void
  1593. maybe_update_mem_ref_hash_table (tree base, tree len)
  1594. {
  1595. if (!POINTER_TYPE_P (TREE_TYPE (base))
  1596. || !INTEGRAL_TYPE_P (TREE_TYPE (len)))
  1597. return;
  1598. HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
  1599. if (size_in_bytes != -1)
  1600. update_mem_ref_hash_table (base, size_in_bytes);
  1601. }
  1602. /* Instrument an access to a contiguous memory region that starts at
  1603. the address pointed to by BASE, over a length of LEN (expressed in
  1604. the sizeof (*BASE) bytes). ITER points to the instruction before
  1605. which the instrumentation instructions must be inserted. LOCATION
  1606. is the source location that the instrumentation instructions must
  1607. have. If IS_STORE is true, then the memory access is a store;
  1608. otherwise, it's a load. */
  1609. static void
  1610. instrument_mem_region_access (tree base, tree len,
  1611. gimple_stmt_iterator *iter,
  1612. location_t location, bool is_store)
  1613. {
  1614. if (!POINTER_TYPE_P (TREE_TYPE (base))
  1615. || !INTEGRAL_TYPE_P (TREE_TYPE (len))
  1616. || integer_zerop (len))
  1617. return;
  1618. HOST_WIDE_INT size_in_bytes = tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
  1619. if ((size_in_bytes == -1)
  1620. || !has_mem_ref_been_instrumented (base, size_in_bytes))
  1621. {
  1622. build_check_stmt (location, base, len, size_in_bytes, iter,
  1623. /*is_non_zero_len*/size_in_bytes > 0, /*before_p*/true,
  1624. is_store, /*is_scalar_access*/false, /*align*/0);
  1625. }
  1626. maybe_update_mem_ref_hash_table (base, len);
  1627. *iter = gsi_for_stmt (gsi_stmt (*iter));
  1628. }
  1629. /* Instrument the call to a built-in memory access function that is
  1630. pointed to by the iterator ITER.
  1631. Upon completion, return TRUE iff *ITER has been advanced to the
  1632. statement following the one it was originally pointing to. */
  1633. static bool
  1634. instrument_builtin_call (gimple_stmt_iterator *iter)
  1635. {
  1636. if (!ASAN_MEMINTRIN)
  1637. return false;
  1638. bool iter_advanced_p = false;
  1639. gcall *call = as_a <gcall *> (gsi_stmt (*iter));
  1640. gcc_checking_assert (gimple_call_builtin_p (call, BUILT_IN_NORMAL));
  1641. location_t loc = gimple_location (call);
  1642. asan_mem_ref src0, src1, dest;
  1643. asan_mem_ref_init (&src0, NULL, 1);
  1644. asan_mem_ref_init (&src1, NULL, 1);
  1645. asan_mem_ref_init (&dest, NULL, 1);
  1646. tree src0_len = NULL_TREE, src1_len = NULL_TREE, dest_len = NULL_TREE;
  1647. bool src0_is_store = false, src1_is_store = false, dest_is_store = false,
  1648. dest_is_deref = false, intercepted_p = true;
  1649. if (get_mem_refs_of_builtin_call (call,
  1650. &src0, &src0_len, &src0_is_store,
  1651. &src1, &src1_len, &src1_is_store,
  1652. &dest, &dest_len, &dest_is_store,
  1653. &dest_is_deref, &intercepted_p))
  1654. {
  1655. if (dest_is_deref)
  1656. {
  1657. instrument_derefs (iter, dest.start, loc, dest_is_store);
  1658. gsi_next (iter);
  1659. iter_advanced_p = true;
  1660. }
  1661. else if (!intercepted_p
  1662. && (src0_len || src1_len || dest_len))
  1663. {
  1664. if (src0.start != NULL_TREE)
  1665. instrument_mem_region_access (src0.start, src0_len,
  1666. iter, loc, /*is_store=*/false);
  1667. if (src1.start != NULL_TREE)
  1668. instrument_mem_region_access (src1.start, src1_len,
  1669. iter, loc, /*is_store=*/false);
  1670. if (dest.start != NULL_TREE)
  1671. instrument_mem_region_access (dest.start, dest_len,
  1672. iter, loc, /*is_store=*/true);
  1673. *iter = gsi_for_stmt (call);
  1674. gsi_next (iter);
  1675. iter_advanced_p = true;
  1676. }
  1677. else
  1678. {
  1679. if (src0.start != NULL_TREE)
  1680. maybe_update_mem_ref_hash_table (src0.start, src0_len);
  1681. if (src1.start != NULL_TREE)
  1682. maybe_update_mem_ref_hash_table (src1.start, src1_len);
  1683. if (dest.start != NULL_TREE)
  1684. maybe_update_mem_ref_hash_table (dest.start, dest_len);
  1685. }
  1686. }
  1687. return iter_advanced_p;
  1688. }
  1689. /* Instrument the assignment statement ITER if it is subject to
  1690. instrumentation. Return TRUE iff instrumentation actually
  1691. happened. In that case, the iterator ITER is advanced to the next
  1692. logical expression following the one initially pointed to by ITER,
  1693. and the relevant memory reference that which access has been
  1694. instrumented is added to the memory references hash table. */
  1695. static bool
  1696. maybe_instrument_assignment (gimple_stmt_iterator *iter)
  1697. {
  1698. gimple s = gsi_stmt (*iter);
  1699. gcc_assert (gimple_assign_single_p (s));
  1700. tree ref_expr = NULL_TREE;
  1701. bool is_store, is_instrumented = false;
  1702. if (gimple_store_p (s))
  1703. {
  1704. ref_expr = gimple_assign_lhs (s);
  1705. is_store = true;
  1706. instrument_derefs (iter, ref_expr,
  1707. gimple_location (s),
  1708. is_store);
  1709. is_instrumented = true;
  1710. }
  1711. if (gimple_assign_load_p (s))
  1712. {
  1713. ref_expr = gimple_assign_rhs1 (s);
  1714. is_store = false;
  1715. instrument_derefs (iter, ref_expr,
  1716. gimple_location (s),
  1717. is_store);
  1718. is_instrumented = true;
  1719. }
  1720. if (is_instrumented)
  1721. gsi_next (iter);
  1722. return is_instrumented;
  1723. }
  1724. /* Instrument the function call pointed to by the iterator ITER, if it
  1725. is subject to instrumentation. At the moment, the only function
  1726. calls that are instrumented are some built-in functions that access
  1727. memory. Look at instrument_builtin_call to learn more.
  1728. Upon completion return TRUE iff *ITER was advanced to the statement
  1729. following the one it was originally pointing to. */
  1730. static bool
  1731. maybe_instrument_call (gimple_stmt_iterator *iter)
  1732. {
  1733. gimple stmt = gsi_stmt (*iter);
  1734. bool is_builtin = gimple_call_builtin_p (stmt, BUILT_IN_NORMAL);
  1735. if (is_builtin && instrument_builtin_call (iter))
  1736. return true;
  1737. if (gimple_call_noreturn_p (stmt))
  1738. {
  1739. if (is_builtin)
  1740. {
  1741. tree callee = gimple_call_fndecl (stmt);
  1742. switch (DECL_FUNCTION_CODE (callee))
  1743. {
  1744. case BUILT_IN_UNREACHABLE:
  1745. case BUILT_IN_TRAP:
  1746. /* Don't instrument these. */
  1747. return false;
  1748. default:
  1749. break;
  1750. }
  1751. }
  1752. tree decl = builtin_decl_implicit (BUILT_IN_ASAN_HANDLE_NO_RETURN);
  1753. gimple g = gimple_build_call (decl, 0);
  1754. gimple_set_location (g, gimple_location (stmt));
  1755. gsi_insert_before (iter, g, GSI_SAME_STMT);
  1756. }
  1757. return false;
  1758. }
  1759. /* Walk each instruction of all basic block and instrument those that
  1760. represent memory references: loads, stores, or function calls.
  1761. In a given basic block, this function avoids instrumenting memory
  1762. references that have already been instrumented. */
  1763. static void
  1764. transform_statements (void)
  1765. {
  1766. basic_block bb, last_bb = NULL;
  1767. gimple_stmt_iterator i;
  1768. int saved_last_basic_block = last_basic_block_for_fn (cfun);
  1769. FOR_EACH_BB_FN (bb, cfun)
  1770. {
  1771. basic_block prev_bb = bb;
  1772. if (bb->index >= saved_last_basic_block) continue;
  1773. /* Flush the mem ref hash table, if current bb doesn't have
  1774. exactly one predecessor, or if that predecessor (skipping
  1775. over asan created basic blocks) isn't the last processed
  1776. basic block. Thus we effectively flush on extended basic
  1777. block boundaries. */
  1778. while (single_pred_p (prev_bb))
  1779. {
  1780. prev_bb = single_pred (prev_bb);
  1781. if (prev_bb->index < saved_last_basic_block)
  1782. break;
  1783. }
  1784. if (prev_bb != last_bb)
  1785. empty_mem_ref_hash_table ();
  1786. last_bb = bb;
  1787. for (i = gsi_start_bb (bb); !gsi_end_p (i);)
  1788. {
  1789. gimple s = gsi_stmt (i);
  1790. if (has_stmt_been_instrumented_p (s))
  1791. gsi_next (&i);
  1792. else if (gimple_assign_single_p (s)
  1793. && !gimple_clobber_p (s)
  1794. && maybe_instrument_assignment (&i))
  1795. /* Nothing to do as maybe_instrument_assignment advanced
  1796. the iterator I. */;
  1797. else if (is_gimple_call (s) && maybe_instrument_call (&i))
  1798. /* Nothing to do as maybe_instrument_call
  1799. advanced the iterator I. */;
  1800. else
  1801. {
  1802. /* No instrumentation happened.
  1803. If the current instruction is a function call that
  1804. might free something, let's forget about the memory
  1805. references that got instrumented. Otherwise we might
  1806. miss some instrumentation opportunities. */
  1807. if (is_gimple_call (s) && !nonfreeing_call_p (s))
  1808. empty_mem_ref_hash_table ();
  1809. gsi_next (&i);
  1810. }
  1811. }
  1812. }
  1813. free_mem_ref_resources ();
  1814. }
  1815. /* Build
  1816. __asan_before_dynamic_init (module_name)
  1817. or
  1818. __asan_after_dynamic_init ()
  1819. call. */
  1820. tree
  1821. asan_dynamic_init_call (bool after_p)
  1822. {
  1823. tree fn = builtin_decl_implicit (after_p
  1824. ? BUILT_IN_ASAN_AFTER_DYNAMIC_INIT
  1825. : BUILT_IN_ASAN_BEFORE_DYNAMIC_INIT);
  1826. tree module_name_cst = NULL_TREE;
  1827. if (!after_p)
  1828. {
  1829. pretty_printer module_name_pp;
  1830. pp_string (&module_name_pp, main_input_filename);
  1831. if (shadow_ptr_types[0] == NULL_TREE)
  1832. asan_init_shadow_ptr_types ();
  1833. module_name_cst = asan_pp_string (&module_name_pp);
  1834. module_name_cst = fold_convert (const_ptr_type_node,
  1835. module_name_cst);
  1836. }
  1837. return build_call_expr (fn, after_p ? 0 : 1, module_name_cst);
  1838. }
  1839. /* Build
  1840. struct __asan_global
  1841. {
  1842. const void *__beg;
  1843. uptr __size;
  1844. uptr __size_with_redzone;
  1845. const void *__name;
  1846. const void *__module_name;
  1847. uptr __has_dynamic_init;
  1848. __asan_global_source_location *__location;
  1849. } type. */
  1850. static tree
  1851. asan_global_struct (void)
  1852. {
  1853. static const char *field_names[7]
  1854. = { "__beg", "__size", "__size_with_redzone",
  1855. "__name", "__module_name", "__has_dynamic_init", "__location"};
  1856. tree fields[7], ret;
  1857. int i;
  1858. ret = make_node (RECORD_TYPE);
  1859. for (i = 0; i < 7; i++)
  1860. {
  1861. fields[i]
  1862. = build_decl (UNKNOWN_LOCATION, FIELD_DECL,
  1863. get_identifier (field_names[i]),
  1864. (i == 0 || i == 3) ? const_ptr_type_node
  1865. : pointer_sized_int_node);
  1866. DECL_CONTEXT (fields[i]) = ret;
  1867. if (i)
  1868. DECL_CHAIN (fields[i - 1]) = fields[i];
  1869. }
  1870. tree type_decl = build_decl (input_location, TYPE_DECL,
  1871. get_identifier ("__asan_global"), ret);
  1872. DECL_IGNORED_P (type_decl) = 1;
  1873. DECL_ARTIFICIAL (type_decl) = 1;
  1874. TYPE_FIELDS (ret) = fields[0];
  1875. TYPE_NAME (ret) = type_decl;
  1876. TYPE_STUB_DECL (ret) = type_decl;
  1877. layout_type (ret);
  1878. return ret;
  1879. }
  1880. /* Append description of a single global DECL into vector V.
  1881. TYPE is __asan_global struct type as returned by asan_global_struct. */
  1882. static void
  1883. asan_add_global (tree decl, tree type, vec<constructor_elt, va_gc> *v)
  1884. {
  1885. tree init, uptr = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
  1886. unsigned HOST_WIDE_INT size;
  1887. tree str_cst, module_name_cst, refdecl = decl;
  1888. vec<constructor_elt, va_gc> *vinner = NULL;
  1889. pretty_printer asan_pp, module_name_pp;
  1890. if (DECL_NAME (decl))
  1891. pp_tree_identifier (&asan_pp, DECL_NAME (decl));
  1892. else
  1893. pp_string (&asan_pp, "<unknown>");
  1894. str_cst = asan_pp_string (&asan_pp);
  1895. pp_string (&module_name_pp, main_input_filename);
  1896. module_name_cst = asan_pp_string (&module_name_pp);
  1897. if (asan_needs_local_alias (decl))
  1898. {
  1899. char buf[20];
  1900. ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", vec_safe_length (v) + 1);
  1901. refdecl = build_decl (DECL_SOURCE_LOCATION (decl),
  1902. VAR_DECL, get_identifier (buf), TREE_TYPE (decl));
  1903. TREE_ADDRESSABLE (refdecl) = TREE_ADDRESSABLE (decl);
  1904. TREE_READONLY (refdecl) = TREE_READONLY (decl);
  1905. TREE_THIS_VOLATILE (refdecl) = TREE_THIS_VOLATILE (decl);
  1906. DECL_GIMPLE_REG_P (refdecl) = DECL_GIMPLE_REG_P (decl);
  1907. DECL_ARTIFICIAL (refdecl) = DECL_ARTIFICIAL (decl);
  1908. DECL_IGNORED_P (refdecl) = DECL_IGNORED_P (decl);
  1909. TREE_STATIC (refdecl) = 1;
  1910. TREE_PUBLIC (refdecl) = 0;
  1911. TREE_USED (refdecl) = 1;
  1912. assemble_alias (refdecl, DECL_ASSEMBLER_NAME (decl));
  1913. }
  1914. CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
  1915. fold_convert (const_ptr_type_node,
  1916. build_fold_addr_expr (refdecl)));
  1917. size = tree_to_uhwi (DECL_SIZE_UNIT (decl));
  1918. CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
  1919. size += asan_red_zone_size (size);
  1920. CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, build_int_cst (uptr, size));
  1921. CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
  1922. fold_convert (const_ptr_type_node, str_cst));
  1923. CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
  1924. fold_convert (const_ptr_type_node, module_name_cst));
  1925. varpool_node *vnode = varpool_node::get (decl);
  1926. int has_dynamic_init = vnode ? vnode->dynamically_initialized : 0;
  1927. CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE,
  1928. build_int_cst (uptr, has_dynamic_init));
  1929. tree locptr = NULL_TREE;
  1930. location_t loc = DECL_SOURCE_LOCATION (decl);
  1931. expanded_location xloc = expand_location (loc);
  1932. if (xloc.file != NULL)
  1933. {
  1934. static int lasanloccnt = 0;
  1935. char buf[25];
  1936. ASM_GENERATE_INTERNAL_LABEL (buf, "LASANLOC", ++lasanloccnt);
  1937. tree var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
  1938. ubsan_get_source_location_type ());
  1939. TREE_STATIC (var) = 1;
  1940. TREE_PUBLIC (var) = 0;
  1941. DECL_ARTIFICIAL (var) = 1;
  1942. DECL_IGNORED_P (var) = 1;
  1943. pretty_printer filename_pp;
  1944. pp_string (&filename_pp, xloc.file);
  1945. tree str = asan_pp_string (&filename_pp);
  1946. tree ctor = build_constructor_va (TREE_TYPE (var), 3,
  1947. NULL_TREE, str, NULL_TREE,
  1948. build_int_cst (unsigned_type_node,
  1949. xloc.line), NULL_TREE,
  1950. build_int_cst (unsigned_type_node,
  1951. xloc.column));
  1952. TREE_CONSTANT (ctor) = 1;
  1953. TREE_STATIC (ctor) = 1;
  1954. DECL_INITIAL (var) = ctor;
  1955. varpool_node::finalize_decl (var);
  1956. locptr = fold_convert (uptr, build_fold_addr_expr (var));
  1957. }
  1958. else
  1959. locptr = build_int_cst (uptr, 0);
  1960. CONSTRUCTOR_APPEND_ELT (vinner, NULL_TREE, locptr);
  1961. init = build_constructor (type, vinner);
  1962. CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, init);
  1963. }
  1964. /* Initialize sanitizer.def builtins if the FE hasn't initialized them. */
  1965. void
  1966. initialize_sanitizer_builtins (void)
  1967. {
  1968. tree decl;
  1969. if (builtin_decl_implicit_p (BUILT_IN_ASAN_INIT))
  1970. return;
  1971. tree BT_FN_VOID = build_function_type_list (void_type_node, NULL_TREE);
  1972. tree BT_FN_VOID_PTR
  1973. = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
  1974. tree BT_FN_VOID_CONST_PTR
  1975. = build_function_type_list (void_type_node, const_ptr_type_node, NULL_TREE);
  1976. tree BT_FN_VOID_PTR_PTR
  1977. = build_function_type_list (void_type_node, ptr_type_node,
  1978. ptr_type_node, NULL_TREE);
  1979. tree BT_FN_VOID_PTR_PTR_PTR
  1980. = build_function_type_list (void_type_node, ptr_type_node,
  1981. ptr_type_node, ptr_type_node, NULL_TREE);
  1982. tree BT_FN_VOID_PTR_PTRMODE
  1983. = build_function_type_list (void_type_node, ptr_type_node,
  1984. pointer_sized_int_node, NULL_TREE);
  1985. tree BT_FN_VOID_INT
  1986. = build_function_type_list (void_type_node, integer_type_node, NULL_TREE);
  1987. tree BT_FN_SIZE_CONST_PTR_INT
  1988. = build_function_type_list (size_type_node, const_ptr_type_node,
  1989. integer_type_node, NULL_TREE);
  1990. tree BT_FN_BOOL_VPTR_PTR_IX_INT_INT[5];
  1991. tree BT_FN_IX_CONST_VPTR_INT[5];
  1992. tree BT_FN_IX_VPTR_IX_INT[5];
  1993. tree BT_FN_VOID_VPTR_IX_INT[5];
  1994. tree vptr
  1995. = build_pointer_type (build_qualified_type (void_type_node,
  1996. TYPE_QUAL_VOLATILE));
  1997. tree cvptr
  1998. = build_pointer_type (build_qualified_type (void_type_node,
  1999. TYPE_QUAL_VOLATILE
  2000. |TYPE_QUAL_CONST));
  2001. tree boolt
  2002. = lang_hooks.types.type_for_size (BOOL_TYPE_SIZE, 1);
  2003. int i;
  2004. for (i = 0; i < 5; i++)
  2005. {
  2006. tree ix = build_nonstandard_integer_type (BITS_PER_UNIT * (1 << i), 1);
  2007. BT_FN_BOOL_VPTR_PTR_IX_INT_INT[i]
  2008. = build_function_type_list (boolt, vptr, ptr_type_node, ix,
  2009. integer_type_node, integer_type_node,
  2010. NULL_TREE);
  2011. BT_FN_IX_CONST_VPTR_INT[i]
  2012. = build_function_type_list (ix, cvptr, integer_type_node, NULL_TREE);
  2013. BT_FN_IX_VPTR_IX_INT[i]
  2014. = build_function_type_list (ix, vptr, ix, integer_type_node,
  2015. NULL_TREE);
  2016. BT_FN_VOID_VPTR_IX_INT[i]
  2017. = build_function_type_list (void_type_node, vptr, ix,
  2018. integer_type_node, NULL_TREE);
  2019. }
  2020. #define BT_FN_BOOL_VPTR_PTR_I1_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[0]
  2021. #define BT_FN_I1_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[0]
  2022. #define BT_FN_I1_VPTR_I1_INT BT_FN_IX_VPTR_IX_INT[0]
  2023. #define BT_FN_VOID_VPTR_I1_INT BT_FN_VOID_VPTR_IX_INT[0]
  2024. #define BT_FN_BOOL_VPTR_PTR_I2_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[1]
  2025. #define BT_FN_I2_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[1]
  2026. #define BT_FN_I2_VPTR_I2_INT BT_FN_IX_VPTR_IX_INT[1]
  2027. #define BT_FN_VOID_VPTR_I2_INT BT_FN_VOID_VPTR_IX_INT[1]
  2028. #define BT_FN_BOOL_VPTR_PTR_I4_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[2]
  2029. #define BT_FN_I4_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[2]
  2030. #define BT_FN_I4_VPTR_I4_INT BT_FN_IX_VPTR_IX_INT[2]
  2031. #define BT_FN_VOID_VPTR_I4_INT BT_FN_VOID_VPTR_IX_INT[2]
  2032. #define BT_FN_BOOL_VPTR_PTR_I8_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[3]
  2033. #define BT_FN_I8_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[3]
  2034. #define BT_FN_I8_VPTR_I8_INT BT_FN_IX_VPTR_IX_INT[3]
  2035. #define BT_FN_VOID_VPTR_I8_INT BT_FN_VOID_VPTR_IX_INT[3]
  2036. #define BT_FN_BOOL_VPTR_PTR_I16_INT_INT BT_FN_BOOL_VPTR_PTR_IX_INT_INT[4]
  2037. #define BT_FN_I16_CONST_VPTR_INT BT_FN_IX_CONST_VPTR_INT[4]
  2038. #define BT_FN_I16_VPTR_I16_INT BT_FN_IX_VPTR_IX_INT[4]
  2039. #define BT_FN_VOID_VPTR_I16_INT BT_FN_VOID_VPTR_IX_INT[4]
  2040. #undef ATTR_NOTHROW_LEAF_LIST
  2041. #define ATTR_NOTHROW_LEAF_LIST ECF_NOTHROW | ECF_LEAF
  2042. #undef ATTR_TMPURE_NOTHROW_LEAF_LIST
  2043. #define ATTR_TMPURE_NOTHROW_LEAF_LIST ECF_TM_PURE | ATTR_NOTHROW_LEAF_LIST
  2044. #undef ATTR_NORETURN_NOTHROW_LEAF_LIST
  2045. #define ATTR_NORETURN_NOTHROW_LEAF_LIST ECF_NORETURN | ATTR_NOTHROW_LEAF_LIST
  2046. #undef ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
  2047. #define ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST \
  2048. ECF_CONST | ATTR_NORETURN_NOTHROW_LEAF_LIST
  2049. #undef ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST
  2050. #define ATTR_TMPURE_NORETURN_NOTHROW_LEAF_LIST \
  2051. ECF_TM_PURE | ATTR_NORETURN_NOTHROW_LEAF_LIST
  2052. #undef ATTR_COLD_NOTHROW_LEAF_LIST
  2053. #define ATTR_COLD_NOTHROW_LEAF_LIST \
  2054. /* ECF_COLD missing */ ATTR_NOTHROW_LEAF_LIST
  2055. #undef ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST
  2056. #define ATTR_COLD_NORETURN_NOTHROW_LEAF_LIST \
  2057. /* ECF_COLD missing */ ATTR_NORETURN_NOTHROW_LEAF_LIST
  2058. #undef ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST
  2059. #define ATTR_COLD_CONST_NORETURN_NOTHROW_LEAF_LIST \
  2060. /* ECF_COLD missing */ ATTR_CONST_NORETURN_NOTHROW_LEAF_LIST
  2061. #undef ATTR_PURE_NOTHROW_LEAF_LIST
  2062. #define ATTR_PURE_NOTHROW_LEAF_LIST ECF_PURE | ATTR_NOTHROW_LEAF_LIST
  2063. #undef DEF_SANITIZER_BUILTIN
  2064. #define DEF_SANITIZER_BUILTIN(ENUM, NAME, TYPE, ATTRS) \
  2065. decl = add_builtin_function ("__builtin_" NAME, TYPE, ENUM, \
  2066. BUILT_IN_NORMAL, NAME, NULL_TREE); \
  2067. set_call_expr_flags (decl, ATTRS); \
  2068. set_builtin_decl (ENUM, decl, true);
  2069. #include "sanitizer.def"
  2070. /* -fsanitize=object-size uses __builtin_object_size, but that might
  2071. not be available for e.g. Fortran at this point. We use
  2072. DEF_SANITIZER_BUILTIN here only as a convenience macro. */
  2073. if ((flag_sanitize & SANITIZE_OBJECT_SIZE)
  2074. && !builtin_decl_implicit_p (BUILT_IN_OBJECT_SIZE))
  2075. DEF_SANITIZER_BUILTIN (BUILT_IN_OBJECT_SIZE, "object_size",
  2076. BT_FN_SIZE_CONST_PTR_INT,
  2077. ATTR_PURE_NOTHROW_LEAF_LIST)
  2078. #undef DEF_SANITIZER_BUILTIN
  2079. }
  2080. /* Called via htab_traverse. Count number of emitted
  2081. STRING_CSTs in the constant hash table. */
  2082. int
  2083. count_string_csts (constant_descriptor_tree **slot,
  2084. unsigned HOST_WIDE_INT *data)
  2085. {
  2086. struct constant_descriptor_tree *desc = *slot;
  2087. if (TREE_CODE (desc->value) == STRING_CST
  2088. && TREE_ASM_WRITTEN (desc->value)
  2089. && asan_protect_global (desc->value))
  2090. ++*data;
  2091. return 1;
  2092. }
  2093. /* Helper structure to pass two parameters to
  2094. add_string_csts. */
  2095. struct asan_add_string_csts_data
  2096. {
  2097. tree type;
  2098. vec<constructor_elt, va_gc> *v;
  2099. };
  2100. /* Called via hash_table::traverse. Call asan_add_global
  2101. on emitted STRING_CSTs from the constant hash table. */
  2102. int
  2103. add_string_csts (constant_descriptor_tree **slot,
  2104. asan_add_string_csts_data *aascd)
  2105. {
  2106. struct constant_descriptor_tree *desc = *slot;
  2107. if (TREE_CODE (desc->value) == STRING_CST
  2108. && TREE_ASM_WRITTEN (desc->value)
  2109. && asan_protect_global (desc->value))
  2110. {
  2111. asan_add_global (SYMBOL_REF_DECL (XEXP (desc->rtl, 0)),
  2112. aascd->type, aascd->v);
  2113. }
  2114. return 1;
  2115. }
  2116. /* Needs to be GTY(()), because cgraph_build_static_cdtor may
  2117. invoke ggc_collect. */
  2118. static GTY(()) tree asan_ctor_statements;
  2119. /* Module-level instrumentation.
  2120. - Insert __asan_init_vN() into the list of CTORs.
  2121. - TODO: insert redzones around globals.
  2122. */
  2123. void
  2124. asan_finish_file (void)
  2125. {
  2126. varpool_node *vnode;
  2127. unsigned HOST_WIDE_INT gcount = 0;
  2128. if (shadow_ptr_types[0] == NULL_TREE)
  2129. asan_init_shadow_ptr_types ();
  2130. /* Avoid instrumenting code in the asan ctors/dtors.
  2131. We don't need to insert padding after the description strings,
  2132. nor after .LASAN* array. */
  2133. flag_sanitize &= ~SANITIZE_ADDRESS;
  2134. /* For user-space we want asan constructors to run first.
  2135. Linux kernel does not support priorities other than default, and the only
  2136. other user of constructors is coverage. So we run with the default
  2137. priority. */
  2138. int priority = flag_sanitize & SANITIZE_USER_ADDRESS
  2139. ? MAX_RESERVED_INIT_PRIORITY - 1 : DEFAULT_INIT_PRIORITY;
  2140. if (flag_sanitize & SANITIZE_USER_ADDRESS)
  2141. {
  2142. tree fn = builtin_decl_implicit (BUILT_IN_ASAN_INIT);
  2143. append_to_statement_list (build_call_expr (fn, 0), &asan_ctor_statements);
  2144. }
  2145. FOR_EACH_DEFINED_VARIABLE (vnode)
  2146. if (TREE_ASM_WRITTEN (vnode->decl)
  2147. && asan_protect_global (vnode->decl))
  2148. ++gcount;
  2149. hash_table<tree_descriptor_hasher> *const_desc_htab = constant_pool_htab ();
  2150. const_desc_htab->traverse<unsigned HOST_WIDE_INT *, count_string_csts>
  2151. (&gcount);
  2152. if (gcount)
  2153. {
  2154. tree type = asan_global_struct (), var, ctor;
  2155. tree dtor_statements = NULL_TREE;
  2156. vec<constructor_elt, va_gc> *v;
  2157. char buf[20];
  2158. type = build_array_type_nelts (type, gcount);
  2159. ASM_GENERATE_INTERNAL_LABEL (buf, "LASAN", 0);
  2160. var = build_decl (UNKNOWN_LOCATION, VAR_DECL, get_identifier (buf),
  2161. type);
  2162. TREE_STATIC (var) = 1;
  2163. TREE_PUBLIC (var) = 0;
  2164. DECL_ARTIFICIAL (var) = 1;
  2165. DECL_IGNORED_P (var) = 1;
  2166. vec_alloc (v, gcount);
  2167. FOR_EACH_DEFINED_VARIABLE (vnode)
  2168. if (TREE_ASM_WRITTEN (vnode->decl)
  2169. && asan_protect_global (vnode->decl))
  2170. asan_add_global (vnode->decl, TREE_TYPE (type), v);
  2171. struct asan_add_string_csts_data aascd;
  2172. aascd.type = TREE_TYPE (type);
  2173. aascd.v = v;
  2174. const_desc_htab->traverse<asan_add_string_csts_data *, add_string_csts>
  2175. (&aascd);
  2176. ctor = build_constructor (type, v);
  2177. TREE_CONSTANT (ctor) = 1;
  2178. TREE_STATIC (ctor) = 1;
  2179. DECL_INITIAL (var) = ctor;
  2180. varpool_node::finalize_decl (var);
  2181. tree fn = builtin_decl_implicit (BUILT_IN_ASAN_REGISTER_GLOBALS);
  2182. tree gcount_tree = build_int_cst (pointer_sized_int_node, gcount);
  2183. append_to_statement_list (build_call_expr (fn, 2,
  2184. build_fold_addr_expr (var),
  2185. gcount_tree),
  2186. &asan_ctor_statements);
  2187. fn = builtin_decl_implicit (BUILT_IN_ASAN_UNREGISTER_GLOBALS);
  2188. append_to_statement_list (build_call_expr (fn, 2,
  2189. build_fold_addr_expr (var),
  2190. gcount_tree),
  2191. &dtor_statements);
  2192. cgraph_build_static_cdtor ('D', dtor_statements, priority);
  2193. }
  2194. if (asan_ctor_statements)
  2195. cgraph_build_static_cdtor ('I', asan_ctor_statements, priority);
  2196. flag_sanitize |= SANITIZE_ADDRESS;
  2197. }
  2198. /* Expand the ASAN_{LOAD,STORE} builtins. */
  2199. bool
  2200. asan_expand_check_ifn (gimple_stmt_iterator *iter, bool use_calls)
  2201. {
  2202. gimple g = gsi_stmt (*iter);
  2203. location_t loc = gimple_location (g);
  2204. bool recover_p
  2205. = (flag_sanitize & flag_sanitize_recover & SANITIZE_KERNEL_ADDRESS) != 0;
  2206. HOST_WIDE_INT flags = tree_to_shwi (gimple_call_arg (g, 0));
  2207. gcc_assert (flags < ASAN_CHECK_LAST);
  2208. bool is_scalar_access = (flags & ASAN_CHECK_SCALAR_ACCESS) != 0;
  2209. bool is_store = (flags & ASAN_CHECK_STORE) != 0;
  2210. bool is_non_zero_len = (flags & ASAN_CHECK_NON_ZERO_LEN) != 0;
  2211. tree base = gimple_call_arg (g, 1);
  2212. tree len = gimple_call_arg (g, 2);
  2213. HOST_WIDE_INT align = tree_to_shwi (gimple_call_arg (g, 3));
  2214. HOST_WIDE_INT size_in_bytes
  2215. = is_scalar_access && tree_fits_shwi_p (len) ? tree_to_shwi (len) : -1;
  2216. if (use_calls)
  2217. {
  2218. /* Instrument using callbacks. */
  2219. gimple g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
  2220. NOP_EXPR, base);
  2221. gimple_set_location (g, loc);
  2222. gsi_insert_before (iter, g, GSI_SAME_STMT);
  2223. tree base_addr = gimple_assign_lhs (g);
  2224. int nargs;
  2225. tree fun = check_func (is_store, recover_p, size_in_bytes, &nargs);
  2226. if (nargs == 1)
  2227. g = gimple_build_call (fun, 1, base_addr);
  2228. else
  2229. {
  2230. gcc_assert (nargs == 2);
  2231. g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
  2232. NOP_EXPR, len);
  2233. gimple_set_location (g, loc);
  2234. gsi_insert_before (iter, g, GSI_SAME_STMT);
  2235. tree sz_arg = gimple_assign_lhs (g);
  2236. g = gimple_build_call (fun, nargs, base_addr, sz_arg);
  2237. }
  2238. gimple_set_location (g, loc);
  2239. gsi_replace (iter, g, false);
  2240. return false;
  2241. }
  2242. HOST_WIDE_INT real_size_in_bytes = size_in_bytes == -1 ? 1 : size_in_bytes;
  2243. tree shadow_ptr_type = shadow_ptr_types[real_size_in_bytes == 16 ? 1 : 0];
  2244. tree shadow_type = TREE_TYPE (shadow_ptr_type);
  2245. gimple_stmt_iterator gsi = *iter;
  2246. if (!is_non_zero_len)
  2247. {
  2248. /* So, the length of the memory area to asan-protect is
  2249. non-constant. Let's guard the generated instrumentation code
  2250. like:
  2251. if (len != 0)
  2252. {
  2253. //asan instrumentation code goes here.
  2254. }
  2255. // falltrough instructions, starting with *ITER. */
  2256. g = gimple_build_cond (NE_EXPR,
  2257. len,
  2258. build_int_cst (TREE_TYPE (len), 0),
  2259. NULL_TREE, NULL_TREE);
  2260. gimple_set_location (g, loc);
  2261. basic_block then_bb, fallthrough_bb;
  2262. insert_if_then_before_iter (as_a <gcond *> (g), iter,
  2263. /*then_more_likely_p=*/true,
  2264. &then_bb, &fallthrough_bb);
  2265. /* Note that fallthrough_bb starts with the statement that was
  2266. pointed to by ITER. */
  2267. /* The 'then block' of the 'if (len != 0) condition is where
  2268. we'll generate the asan instrumentation code now. */
  2269. gsi = gsi_last_bb (then_bb);
  2270. }
  2271. /* Get an iterator on the point where we can add the condition
  2272. statement for the instrumentation. */
  2273. basic_block then_bb, else_bb;
  2274. gsi = create_cond_insert_point (&gsi, /*before_p*/false,
  2275. /*then_more_likely_p=*/false,
  2276. /*create_then_fallthru_edge*/recover_p,
  2277. &then_bb,
  2278. &else_bb);
  2279. g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
  2280. NOP_EXPR, base);
  2281. gimple_set_location (g, loc);
  2282. gsi_insert_before (&gsi, g, GSI_NEW_STMT);
  2283. tree base_addr = gimple_assign_lhs (g);
  2284. tree t = NULL_TREE;
  2285. if (real_size_in_bytes >= 8)
  2286. {
  2287. tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
  2288. shadow_ptr_type);
  2289. t = shadow;
  2290. }
  2291. else
  2292. {
  2293. /* Slow path for 1, 2 and 4 byte accesses. */
  2294. /* Test (shadow != 0)
  2295. & ((base_addr & 7) + (real_size_in_bytes - 1)) >= shadow). */
  2296. tree shadow = build_shadow_mem_access (&gsi, loc, base_addr,
  2297. shadow_ptr_type);
  2298. gimple shadow_test = build_assign (NE_EXPR, shadow, 0);
  2299. gimple_seq seq = NULL;
  2300. gimple_seq_add_stmt (&seq, shadow_test);
  2301. /* Aligned (>= 8 bytes) can test just
  2302. (real_size_in_bytes - 1 >= shadow), as base_addr & 7 is known
  2303. to be 0. */
  2304. if (align < 8)
  2305. {
  2306. gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
  2307. base_addr, 7));
  2308. gimple_seq_add_stmt (&seq,
  2309. build_type_cast (shadow_type,
  2310. gimple_seq_last (seq)));
  2311. if (real_size_in_bytes > 1)
  2312. gimple_seq_add_stmt (&seq,
  2313. build_assign (PLUS_EXPR,
  2314. gimple_seq_last (seq),
  2315. real_size_in_bytes - 1));
  2316. t = gimple_assign_lhs (gimple_seq_last_stmt (seq));
  2317. }
  2318. else
  2319. t = build_int_cst (shadow_type, real_size_in_bytes - 1);
  2320. gimple_seq_add_stmt (&seq, build_assign (GE_EXPR, t, shadow));
  2321. gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
  2322. gimple_seq_last (seq)));
  2323. t = gimple_assign_lhs (gimple_seq_last (seq));
  2324. gimple_seq_set_location (seq, loc);
  2325. gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
  2326. /* For non-constant, misaligned or otherwise weird access sizes,
  2327. check first and last byte. */
  2328. if (size_in_bytes == -1)
  2329. {
  2330. g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
  2331. MINUS_EXPR, len,
  2332. build_int_cst (pointer_sized_int_node, 1));
  2333. gimple_set_location (g, loc);
  2334. gsi_insert_after (&gsi, g, GSI_NEW_STMT);
  2335. tree last = gimple_assign_lhs (g);
  2336. g = gimple_build_assign (make_ssa_name (pointer_sized_int_node),
  2337. PLUS_EXPR, base_addr, last);
  2338. gimple_set_location (g, loc);
  2339. gsi_insert_after (&gsi, g, GSI_NEW_STMT);
  2340. tree base_end_addr = gimple_assign_lhs (g);
  2341. tree shadow = build_shadow_mem_access (&gsi, loc, base_end_addr,
  2342. shadow_ptr_type);
  2343. gimple shadow_test = build_assign (NE_EXPR, shadow, 0);
  2344. gimple_seq seq = NULL;
  2345. gimple_seq_add_stmt (&seq, shadow_test);
  2346. gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR,
  2347. base_end_addr, 7));
  2348. gimple_seq_add_stmt (&seq, build_type_cast (shadow_type,
  2349. gimple_seq_last (seq)));
  2350. gimple_seq_add_stmt (&seq, build_assign (GE_EXPR,
  2351. gimple_seq_last (seq),
  2352. shadow));
  2353. gimple_seq_add_stmt (&seq, build_assign (BIT_AND_EXPR, shadow_test,
  2354. gimple_seq_last (seq)));
  2355. gimple_seq_add_stmt (&seq, build_assign (BIT_IOR_EXPR, t,
  2356. gimple_seq_last (seq)));
  2357. t = gimple_assign_lhs (gimple_seq_last (seq));
  2358. gimple_seq_set_location (seq, loc);
  2359. gsi_insert_seq_after (&gsi, seq, GSI_CONTINUE_LINKING);
  2360. }
  2361. }
  2362. g = gimple_build_cond (NE_EXPR, t, build_int_cst (TREE_TYPE (t), 0),
  2363. NULL_TREE, NULL_TREE);
  2364. gimple_set_location (g, loc);
  2365. gsi_insert_after (&gsi, g, GSI_NEW_STMT);
  2366. /* Generate call to the run-time library (e.g. __asan_report_load8). */
  2367. gsi = gsi_start_bb (then_bb);
  2368. int nargs;
  2369. tree fun = report_error_func (is_store, recover_p, size_in_bytes, &nargs);
  2370. g = gimple_build_call (fun, nargs, base_addr, len);
  2371. gimple_set_location (g, loc);
  2372. gsi_insert_after (&gsi, g, GSI_NEW_STMT);
  2373. gsi_remove (iter, true);
  2374. *iter = gsi_start_bb (else_bb);
  2375. return true;
  2376. }
  2377. /* Instrument the current function. */
  2378. static unsigned int
  2379. asan_instrument (void)
  2380. {
  2381. if (shadow_ptr_types[0] == NULL_TREE)
  2382. asan_init_shadow_ptr_types ();
  2383. transform_statements ();
  2384. return 0;
  2385. }
  2386. static bool
  2387. gate_asan (void)
  2388. {
  2389. return (flag_sanitize & SANITIZE_ADDRESS) != 0
  2390. && !lookup_attribute ("no_sanitize_address",
  2391. DECL_ATTRIBUTES (current_function_decl));
  2392. }
  2393. namespace {
  2394. const pass_data pass_data_asan =
  2395. {
  2396. GIMPLE_PASS, /* type */
  2397. "asan", /* name */
  2398. OPTGROUP_NONE, /* optinfo_flags */
  2399. TV_NONE, /* tv_id */
  2400. ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
  2401. 0, /* properties_provided */
  2402. 0, /* properties_destroyed */
  2403. 0, /* todo_flags_start */
  2404. TODO_update_ssa, /* todo_flags_finish */
  2405. };
  2406. class pass_asan : public gimple_opt_pass
  2407. {
  2408. public:
  2409. pass_asan (gcc::context *ctxt)
  2410. : gimple_opt_pass (pass_data_asan, ctxt)
  2411. {}
  2412. /* opt_pass methods: */
  2413. opt_pass * clone () { return new pass_asan (m_ctxt); }
  2414. virtual bool gate (function *) { return gate_asan (); }
  2415. virtual unsigned int execute (function *) { return asan_instrument (); }
  2416. }; // class pass_asan
  2417. } // anon namespace
  2418. gimple_opt_pass *
  2419. make_pass_asan (gcc::context *ctxt)
  2420. {
  2421. return new pass_asan (ctxt);
  2422. }
  2423. namespace {
  2424. const pass_data pass_data_asan_O0 =
  2425. {
  2426. GIMPLE_PASS, /* type */
  2427. "asan0", /* name */
  2428. OPTGROUP_NONE, /* optinfo_flags */
  2429. TV_NONE, /* tv_id */
  2430. ( PROP_ssa | PROP_cfg | PROP_gimple_leh ), /* properties_required */
  2431. 0, /* properties_provided */
  2432. 0, /* properties_destroyed */
  2433. 0, /* todo_flags_start */
  2434. TODO_update_ssa, /* todo_flags_finish */
  2435. };
  2436. class pass_asan_O0 : public gimple_opt_pass
  2437. {
  2438. public:
  2439. pass_asan_O0 (gcc::context *ctxt)
  2440. : gimple_opt_pass (pass_data_asan_O0, ctxt)
  2441. {}
  2442. /* opt_pass methods: */
  2443. virtual bool gate (function *) { return !optimize && gate_asan (); }
  2444. virtual unsigned int execute (function *) { return asan_instrument (); }
  2445. }; // class pass_asan_O0
  2446. } // anon namespace
  2447. gimple_opt_pass *
  2448. make_pass_asan_O0 (gcc::context *ctxt)
  2449. {
  2450. return new pass_asan_O0 (ctxt);
  2451. }
  2452. #include "gt-asan.h"