cgraphunit.c 74 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525
  1. /* Driver of optimization process
  2. Copyright (C) 2003-2015 Free Software Foundation, Inc.
  3. Contributed by Jan Hubicka
  4. This file is part of GCC.
  5. GCC is free software; you can redistribute it and/or modify it under
  6. the terms of the GNU General Public License as published by the Free
  7. Software Foundation; either version 3, or (at your option) any later
  8. version.
  9. GCC is distributed in the hope that it will be useful, but WITHOUT ANY
  10. WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11. FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
  12. for more details.
  13. You should have received a copy of the GNU General Public License
  14. along with GCC; see the file COPYING3. If not see
  15. <http://www.gnu.org/licenses/>. */
  16. /* This module implements main driver of compilation process.
  17. The main scope of this file is to act as an interface in between
  18. tree based frontends and the backend.
  19. The front-end is supposed to use following functionality:
  20. - finalize_function
  21. This function is called once front-end has parsed whole body of function
  22. and it is certain that the function body nor the declaration will change.
  23. (There is one exception needed for implementing GCC extern inline
  24. function.)
  25. - varpool_finalize_decl
  26. This function has same behavior as the above but is used for static
  27. variables.
  28. - add_asm_node
  29. Insert new toplevel ASM statement
  30. - finalize_compilation_unit
  31. This function is called once (source level) compilation unit is finalized
  32. and it will no longer change.
  33. The symbol table is constructed starting from the trivially needed
  34. symbols finalized by the frontend. Functions are lowered into
  35. GIMPLE representation and callgraph/reference lists are constructed.
  36. Those are used to discover other necessary functions and variables.
  37. At the end the bodies of unreachable functions are removed.
  38. The function can be called multiple times when multiple source level
  39. compilation units are combined.
  40. - compile
  41. This passes control to the back-end. Optimizations are performed and
  42. final assembler is generated. This is done in the following way. Note
  43. that with link time optimization the process is split into three
  44. stages (compile time, linktime analysis and parallel linktime as
  45. indicated bellow).
  46. Compile time:
  47. 1) Inter-procedural optimization.
  48. (ipa_passes)
  49. This part is further split into:
  50. a) early optimizations. These are local passes executed in
  51. the topological order on the callgraph.
  52. The purpose of early optimiations is to optimize away simple
  53. things that may otherwise confuse IP analysis. Very simple
  54. propagation across the callgraph is done i.e. to discover
  55. functions without side effects and simple inlining is performed.
  56. b) early small interprocedural passes.
  57. Those are interprocedural passes executed only at compilation
  58. time. These include, for example, transational memory lowering,
  59. unreachable code removal and other simple transformations.
  60. c) IP analysis stage. All interprocedural passes do their
  61. analysis.
  62. Interprocedural passes differ from small interprocedural
  63. passes by their ability to operate across whole program
  64. at linktime. Their analysis stage is performed early to
  65. both reduce linking times and linktime memory usage by
  66. not having to represent whole program in memory.
  67. d) LTO sreaming. When doing LTO, everything important gets
  68. streamed into the object file.
  69. Compile time and or linktime analysis stage (WPA):
  70. At linktime units gets streamed back and symbol table is
  71. merged. Function bodies are not streamed in and not
  72. available.
  73. e) IP propagation stage. All IP passes execute their
  74. IP propagation. This is done based on the earlier analysis
  75. without having function bodies at hand.
  76. f) Ltrans streaming. When doing WHOPR LTO, the program
  77. is partitioned and streamed into multple object files.
  78. Compile time and/or parallel linktime stage (ltrans)
  79. Each of the object files is streamed back and compiled
  80. separately. Now the function bodies becomes available
  81. again.
  82. 2) Virtual clone materialization
  83. (cgraph_materialize_clone)
  84. IP passes can produce copies of existing functoins (such
  85. as versioned clones or inline clones) without actually
  86. manipulating their bodies by creating virtual clones in
  87. the callgraph. At this time the virtual clones are
  88. turned into real functions
  89. 3) IP transformation
  90. All IP passes transform function bodies based on earlier
  91. decision of the IP propagation.
  92. 4) late small IP passes
  93. Simple IP passes working within single program partition.
  94. 5) Expansion
  95. (expand_all_functions)
  96. At this stage functions that needs to be output into
  97. assembler are identified and compiled in topological order
  98. 6) Output of variables and aliases
  99. Now it is known what variable references was not optimized
  100. out and thus all variables are output to the file.
  101. Note that with -fno-toplevel-reorder passes 5 and 6
  102. are combined together in cgraph_output_in_order.
  103. Finally there are functions to manipulate the callgraph from
  104. backend.
  105. - cgraph_add_new_function is used to add backend produced
  106. functions introduced after the unit is finalized.
  107. The functions are enqueue for later processing and inserted
  108. into callgraph with cgraph_process_new_functions.
  109. - cgraph_function_versioning
  110. produces a copy of function into new one (a version)
  111. and apply simple transformations
  112. */
  113. #include "config.h"
  114. #include "system.h"
  115. #include "coretypes.h"
  116. #include "tm.h"
  117. #include "hash-set.h"
  118. #include "machmode.h"
  119. #include "vec.h"
  120. #include "double-int.h"
  121. #include "input.h"
  122. #include "alias.h"
  123. #include "symtab.h"
  124. #include "wide-int.h"
  125. #include "inchash.h"
  126. #include "tree.h"
  127. #include "fold-const.h"
  128. #include "varasm.h"
  129. #include "stor-layout.h"
  130. #include "stringpool.h"
  131. #include "output.h"
  132. #include "rtl.h"
  133. #include "predict.h"
  134. #include "hard-reg-set.h"
  135. #include "input.h"
  136. #include "function.h"
  137. #include "basic-block.h"
  138. #include "tree-ssa-alias.h"
  139. #include "internal-fn.h"
  140. #include "gimple-fold.h"
  141. #include "gimple-expr.h"
  142. #include "is-a.h"
  143. #include "gimple.h"
  144. #include "gimplify.h"
  145. #include "gimple-iterator.h"
  146. #include "gimplify-me.h"
  147. #include "gimple-ssa.h"
  148. #include "tree-cfg.h"
  149. #include "tree-into-ssa.h"
  150. #include "tree-ssa.h"
  151. #include "tree-inline.h"
  152. #include "langhooks.h"
  153. #include "toplev.h"
  154. #include "flags.h"
  155. #include "debug.h"
  156. #include "target.h"
  157. #include "diagnostic.h"
  158. #include "params.h"
  159. #include "intl.h"
  160. #include "hash-map.h"
  161. #include "plugin-api.h"
  162. #include "ipa-ref.h"
  163. #include "cgraph.h"
  164. #include "alloc-pool.h"
  165. #include "symbol-summary.h"
  166. #include "ipa-prop.h"
  167. #include "tree-iterator.h"
  168. #include "tree-pass.h"
  169. #include "tree-dump.h"
  170. #include "gimple-pretty-print.h"
  171. #include "output.h"
  172. #include "coverage.h"
  173. #include "plugin.h"
  174. #include "ipa-inline.h"
  175. #include "ipa-utils.h"
  176. #include "lto-streamer.h"
  177. #include "except.h"
  178. #include "cfgloop.h"
  179. #include "regset.h" /* FIXME: For reg_obstack. */
  180. #include "context.h"
  181. #include "pass_manager.h"
  182. #include "tree-nested.h"
  183. #include "gimplify.h"
  184. #include "dbgcnt.h"
  185. #include "tree-chkp.h"
  186. #include "lto-section-names.h"
  187. #include "omp-low.h"
  188. #include "print-tree.h"
  189. /* Queue of cgraph nodes scheduled to be added into cgraph. This is a
  190. secondary queue used during optimization to accommodate passes that
  191. may generate new functions that need to be optimized and expanded. */
  192. vec<cgraph_node *> cgraph_new_nodes;
  193. static void expand_all_functions (void);
  194. static void mark_functions_to_output (void);
  195. static void handle_alias_pairs (void);
  196. /* Used for vtable lookup in thunk adjusting. */
  197. static GTY (()) tree vtable_entry_type;
  198. /* Determine if symbol declaration is needed. That is, visible to something
  199. either outside this translation unit, something magic in the system
  200. configury */
  201. bool
  202. symtab_node::needed_p (void)
  203. {
  204. /* Double check that no one output the function into assembly file
  205. early. */
  206. gcc_checking_assert (!DECL_ASSEMBLER_NAME_SET_P (decl)
  207. || !TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (decl)));
  208. if (!definition)
  209. return false;
  210. if (DECL_EXTERNAL (decl))
  211. return false;
  212. /* If the user told us it is used, then it must be so. */
  213. if (force_output)
  214. return true;
  215. /* ABI forced symbols are needed when they are external. */
  216. if (forced_by_abi && TREE_PUBLIC (decl))
  217. return true;
  218. /* Keep constructors, destructors and virtual functions. */
  219. if (TREE_CODE (decl) == FUNCTION_DECL
  220. && (DECL_STATIC_CONSTRUCTOR (decl) || DECL_STATIC_DESTRUCTOR (decl)))
  221. return true;
  222. /* Externally visible variables must be output. The exception is
  223. COMDAT variables that must be output only when they are needed. */
  224. if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
  225. return true;
  226. return false;
  227. }
  228. /* Head and terminator of the queue of nodes to be processed while building
  229. callgraph. */
  230. static symtab_node symtab_terminator;
  231. static symtab_node *queued_nodes = &symtab_terminator;
  232. /* Add NODE to queue starting at QUEUED_NODES.
  233. The queue is linked via AUX pointers and terminated by pointer to 1. */
  234. static void
  235. enqueue_node (symtab_node *node)
  236. {
  237. if (node->aux)
  238. return;
  239. gcc_checking_assert (queued_nodes);
  240. node->aux = queued_nodes;
  241. queued_nodes = node;
  242. }
  243. /* Process CGRAPH_NEW_FUNCTIONS and perform actions necessary to add these
  244. functions into callgraph in a way so they look like ordinary reachable
  245. functions inserted into callgraph already at construction time. */
  246. void
  247. symbol_table::process_new_functions (void)
  248. {
  249. tree fndecl;
  250. if (!cgraph_new_nodes.exists ())
  251. return;
  252. handle_alias_pairs ();
  253. /* Note that this queue may grow as its being processed, as the new
  254. functions may generate new ones. */
  255. for (unsigned i = 0; i < cgraph_new_nodes.length (); i++)
  256. {
  257. cgraph_node *node = cgraph_new_nodes[i];
  258. fndecl = node->decl;
  259. switch (state)
  260. {
  261. case CONSTRUCTION:
  262. /* At construction time we just need to finalize function and move
  263. it into reachable functions list. */
  264. cgraph_node::finalize_function (fndecl, false);
  265. call_cgraph_insertion_hooks (node);
  266. enqueue_node (node);
  267. break;
  268. case IPA:
  269. case IPA_SSA:
  270. case IPA_SSA_AFTER_INLINING:
  271. /* When IPA optimization already started, do all essential
  272. transformations that has been already performed on the whole
  273. cgraph but not on this function. */
  274. gimple_register_cfg_hooks ();
  275. if (!node->analyzed)
  276. node->analyze ();
  277. push_cfun (DECL_STRUCT_FUNCTION (fndecl));
  278. if ((state == IPA_SSA || state == IPA_SSA_AFTER_INLINING)
  279. && !gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
  280. g->get_passes ()->execute_early_local_passes ();
  281. else if (inline_summaries != NULL)
  282. compute_inline_parameters (node, true);
  283. free_dominance_info (CDI_POST_DOMINATORS);
  284. free_dominance_info (CDI_DOMINATORS);
  285. pop_cfun ();
  286. call_cgraph_insertion_hooks (node);
  287. break;
  288. case EXPANSION:
  289. /* Functions created during expansion shall be compiled
  290. directly. */
  291. node->process = 0;
  292. call_cgraph_insertion_hooks (node);
  293. node->expand ();
  294. break;
  295. default:
  296. gcc_unreachable ();
  297. break;
  298. }
  299. }
  300. cgraph_new_nodes.release ();
  301. }
  302. /* As an GCC extension we allow redefinition of the function. The
  303. semantics when both copies of bodies differ is not well defined.
  304. We replace the old body with new body so in unit at a time mode
  305. we always use new body, while in normal mode we may end up with
  306. old body inlined into some functions and new body expanded and
  307. inlined in others.
  308. ??? It may make more sense to use one body for inlining and other
  309. body for expanding the function but this is difficult to do. */
  310. void
  311. cgraph_node::reset (void)
  312. {
  313. /* If process is set, then we have already begun whole-unit analysis.
  314. This is *not* testing for whether we've already emitted the function.
  315. That case can be sort-of legitimately seen with real function redefinition
  316. errors. I would argue that the front end should never present us with
  317. such a case, but don't enforce that for now. */
  318. gcc_assert (!process);
  319. /* Reset our data structures so we can analyze the function again. */
  320. memset (&local, 0, sizeof (local));
  321. memset (&global, 0, sizeof (global));
  322. memset (&rtl, 0, sizeof (rtl));
  323. analyzed = false;
  324. definition = false;
  325. alias = false;
  326. weakref = false;
  327. cpp_implicit_alias = false;
  328. remove_callees ();
  329. remove_all_references ();
  330. }
  331. /* Return true when there are references to the node. */
  332. bool
  333. symtab_node::referred_to_p (void)
  334. {
  335. ipa_ref *ref = NULL;
  336. /* See if there are any references at all. */
  337. if (iterate_referring (0, ref))
  338. return true;
  339. /* For functions check also calls. */
  340. cgraph_node *cn = dyn_cast <cgraph_node *> (this);
  341. if (cn && cn->callers)
  342. return true;
  343. return false;
  344. }
  345. /* DECL has been parsed. Take it, queue it, compile it at the whim of the
  346. logic in effect. If NO_COLLECT is true, then our caller cannot stand to have
  347. the garbage collector run at the moment. We would need to either create
  348. a new GC context, or just not compile right now. */
  349. void
  350. cgraph_node::finalize_function (tree decl, bool no_collect)
  351. {
  352. cgraph_node *node = cgraph_node::get_create (decl);
  353. if (node->definition)
  354. {
  355. /* Nested functions should only be defined once. */
  356. gcc_assert (!DECL_CONTEXT (decl)
  357. || TREE_CODE (DECL_CONTEXT (decl)) != FUNCTION_DECL);
  358. node->reset ();
  359. node->local.redefined_extern_inline = true;
  360. }
  361. /* Set definition first before calling notice_global_symbol so that
  362. it is available to notice_global_symbol. */
  363. node->definition = true;
  364. notice_global_symbol (decl);
  365. node->lowered = DECL_STRUCT_FUNCTION (decl)->cfg != NULL;
  366. /* With -fkeep-inline-functions we are keeping all inline functions except
  367. for extern inline ones. */
  368. if (flag_keep_inline_functions
  369. && DECL_DECLARED_INLINE_P (decl)
  370. && !DECL_EXTERNAL (decl)
  371. && !DECL_DISREGARD_INLINE_LIMITS (decl))
  372. node->force_output = 1;
  373. /* When not optimizing, also output the static functions. (see
  374. PR24561), but don't do so for always_inline functions, functions
  375. declared inline and nested functions. These were optimized out
  376. in the original implementation and it is unclear whether we want
  377. to change the behavior here. */
  378. if ((!opt_for_fn (decl, optimize)
  379. && !node->cpp_implicit_alias
  380. && !DECL_DISREGARD_INLINE_LIMITS (decl)
  381. && !DECL_DECLARED_INLINE_P (decl)
  382. && !(DECL_CONTEXT (decl)
  383. && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL))
  384. && !DECL_COMDAT (decl) && !DECL_EXTERNAL (decl))
  385. node->force_output = 1;
  386. /* If we've not yet emitted decl, tell the debug info about it. */
  387. if (!TREE_ASM_WRITTEN (decl))
  388. (*debug_hooks->deferred_inline_function) (decl);
  389. /* Possibly warn about unused parameters. */
  390. if (warn_unused_parameter)
  391. do_warn_unused_parameter (decl);
  392. if (!no_collect)
  393. ggc_collect ();
  394. if (symtab->state == CONSTRUCTION
  395. && (node->needed_p () || node->referred_to_p ()))
  396. enqueue_node (node);
  397. }
  398. /* Add the function FNDECL to the call graph.
  399. Unlike finalize_function, this function is intended to be used
  400. by middle end and allows insertion of new function at arbitrary point
  401. of compilation. The function can be either in high, low or SSA form
  402. GIMPLE.
  403. The function is assumed to be reachable and have address taken (so no
  404. API breaking optimizations are performed on it).
  405. Main work done by this function is to enqueue the function for later
  406. processing to avoid need the passes to be re-entrant. */
  407. void
  408. cgraph_node::add_new_function (tree fndecl, bool lowered)
  409. {
  410. gcc::pass_manager *passes = g->get_passes ();
  411. cgraph_node *node;
  412. switch (symtab->state)
  413. {
  414. case PARSING:
  415. cgraph_node::finalize_function (fndecl, false);
  416. break;
  417. case CONSTRUCTION:
  418. /* Just enqueue function to be processed at nearest occurrence. */
  419. node = cgraph_node::get_create (fndecl);
  420. if (lowered)
  421. node->lowered = true;
  422. cgraph_new_nodes.safe_push (node);
  423. break;
  424. case IPA:
  425. case IPA_SSA:
  426. case IPA_SSA_AFTER_INLINING:
  427. case EXPANSION:
  428. /* Bring the function into finalized state and enqueue for later
  429. analyzing and compilation. */
  430. node = cgraph_node::get_create (fndecl);
  431. node->local.local = false;
  432. node->definition = true;
  433. node->force_output = true;
  434. if (!lowered && symtab->state == EXPANSION)
  435. {
  436. push_cfun (DECL_STRUCT_FUNCTION (fndecl));
  437. gimple_register_cfg_hooks ();
  438. bitmap_obstack_initialize (NULL);
  439. execute_pass_list (cfun, passes->all_lowering_passes);
  440. passes->execute_early_local_passes ();
  441. bitmap_obstack_release (NULL);
  442. pop_cfun ();
  443. lowered = true;
  444. }
  445. if (lowered)
  446. node->lowered = true;
  447. cgraph_new_nodes.safe_push (node);
  448. break;
  449. case FINISHED:
  450. /* At the very end of compilation we have to do all the work up
  451. to expansion. */
  452. node = cgraph_node::create (fndecl);
  453. if (lowered)
  454. node->lowered = true;
  455. node->definition = true;
  456. node->analyze ();
  457. push_cfun (DECL_STRUCT_FUNCTION (fndecl));
  458. gimple_register_cfg_hooks ();
  459. bitmap_obstack_initialize (NULL);
  460. if (!gimple_in_ssa_p (DECL_STRUCT_FUNCTION (fndecl)))
  461. g->get_passes ()->execute_early_local_passes ();
  462. bitmap_obstack_release (NULL);
  463. pop_cfun ();
  464. node->expand ();
  465. break;
  466. default:
  467. gcc_unreachable ();
  468. }
  469. /* Set a personality if required and we already passed EH lowering. */
  470. if (lowered
  471. && (function_needs_eh_personality (DECL_STRUCT_FUNCTION (fndecl))
  472. == eh_personality_lang))
  473. DECL_FUNCTION_PERSONALITY (fndecl) = lang_hooks.eh_personality ();
  474. }
  475. /* Analyze the function scheduled to be output. */
  476. void
  477. cgraph_node::analyze (void)
  478. {
  479. tree decl = this->decl;
  480. location_t saved_loc = input_location;
  481. input_location = DECL_SOURCE_LOCATION (decl);
  482. if (thunk.thunk_p)
  483. {
  484. cgraph_node *t = cgraph_node::get (thunk.alias);
  485. create_edge (t, NULL, 0, CGRAPH_FREQ_BASE);
  486. /* Target code in expand_thunk may need the thunk's target
  487. to be analyzed, so recurse here. */
  488. if (!t->analyzed)
  489. t->analyze ();
  490. if (t->alias)
  491. {
  492. t = t->get_alias_target ();
  493. if (!t->analyzed)
  494. t->analyze ();
  495. }
  496. if (!expand_thunk (false, false))
  497. {
  498. thunk.alias = NULL;
  499. return;
  500. }
  501. thunk.alias = NULL;
  502. }
  503. if (alias)
  504. resolve_alias (cgraph_node::get (alias_target));
  505. else if (dispatcher_function)
  506. {
  507. /* Generate the dispatcher body of multi-versioned functions. */
  508. cgraph_function_version_info *dispatcher_version_info
  509. = function_version ();
  510. if (dispatcher_version_info != NULL
  511. && (dispatcher_version_info->dispatcher_resolver
  512. == NULL_TREE))
  513. {
  514. tree resolver = NULL_TREE;
  515. gcc_assert (targetm.generate_version_dispatcher_body);
  516. resolver = targetm.generate_version_dispatcher_body (this);
  517. gcc_assert (resolver != NULL_TREE);
  518. }
  519. }
  520. else
  521. {
  522. push_cfun (DECL_STRUCT_FUNCTION (decl));
  523. assign_assembler_name_if_neeeded (decl);
  524. /* Make sure to gimplify bodies only once. During analyzing a
  525. function we lower it, which will require gimplified nested
  526. functions, so we can end up here with an already gimplified
  527. body. */
  528. if (!gimple_has_body_p (decl))
  529. gimplify_function_tree (decl);
  530. dump_function (TDI_generic, decl);
  531. /* Lower the function. */
  532. if (!lowered)
  533. {
  534. if (nested)
  535. lower_nested_functions (decl);
  536. gcc_assert (!nested);
  537. gimple_register_cfg_hooks ();
  538. bitmap_obstack_initialize (NULL);
  539. execute_pass_list (cfun, g->get_passes ()->all_lowering_passes);
  540. free_dominance_info (CDI_POST_DOMINATORS);
  541. free_dominance_info (CDI_DOMINATORS);
  542. compact_blocks ();
  543. bitmap_obstack_release (NULL);
  544. lowered = true;
  545. }
  546. pop_cfun ();
  547. }
  548. analyzed = true;
  549. input_location = saved_loc;
  550. }
  551. /* C++ frontend produce same body aliases all over the place, even before PCH
  552. gets streamed out. It relies on us linking the aliases with their function
  553. in order to do the fixups, but ipa-ref is not PCH safe. Consequentely we
  554. first produce aliases without links, but once C++ FE is sure he won't sream
  555. PCH we build the links via this function. */
  556. void
  557. symbol_table::process_same_body_aliases (void)
  558. {
  559. symtab_node *node;
  560. FOR_EACH_SYMBOL (node)
  561. if (node->cpp_implicit_alias && !node->analyzed)
  562. node->resolve_alias
  563. (TREE_CODE (node->alias_target) == VAR_DECL
  564. ? (symtab_node *)varpool_node::get_create (node->alias_target)
  565. : (symtab_node *)cgraph_node::get_create (node->alias_target));
  566. cpp_implicit_aliases_done = true;
  567. }
  568. /* Process attributes common for vars and functions. */
  569. static void
  570. process_common_attributes (symtab_node *node, tree decl)
  571. {
  572. tree weakref = lookup_attribute ("weakref", DECL_ATTRIBUTES (decl));
  573. if (weakref && !lookup_attribute ("alias", DECL_ATTRIBUTES (decl)))
  574. {
  575. warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
  576. "%<weakref%> attribute should be accompanied with"
  577. " an %<alias%> attribute");
  578. DECL_WEAK (decl) = 0;
  579. DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
  580. DECL_ATTRIBUTES (decl));
  581. }
  582. if (lookup_attribute ("no_reorder", DECL_ATTRIBUTES (decl)))
  583. node->no_reorder = 1;
  584. }
  585. /* Look for externally_visible and used attributes and mark cgraph nodes
  586. accordingly.
  587. We cannot mark the nodes at the point the attributes are processed (in
  588. handle_*_attribute) because the copy of the declarations available at that
  589. point may not be canonical. For example, in:
  590. void f();
  591. void f() __attribute__((used));
  592. the declaration we see in handle_used_attribute will be the second
  593. declaration -- but the front end will subsequently merge that declaration
  594. with the original declaration and discard the second declaration.
  595. Furthermore, we can't mark these nodes in finalize_function because:
  596. void f() {}
  597. void f() __attribute__((externally_visible));
  598. is valid.
  599. So, we walk the nodes at the end of the translation unit, applying the
  600. attributes at that point. */
  601. static void
  602. process_function_and_variable_attributes (cgraph_node *first,
  603. varpool_node *first_var)
  604. {
  605. cgraph_node *node;
  606. varpool_node *vnode;
  607. for (node = symtab->first_function (); node != first;
  608. node = symtab->next_function (node))
  609. {
  610. tree decl = node->decl;
  611. if (DECL_PRESERVE_P (decl))
  612. node->mark_force_output ();
  613. else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
  614. {
  615. if (! TREE_PUBLIC (node->decl))
  616. warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
  617. "%<externally_visible%>"
  618. " attribute have effect only on public objects");
  619. }
  620. if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
  621. && (node->definition && !node->alias))
  622. {
  623. warning_at (DECL_SOURCE_LOCATION (node->decl), OPT_Wattributes,
  624. "%<weakref%> attribute ignored"
  625. " because function is defined");
  626. DECL_WEAK (decl) = 0;
  627. DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
  628. DECL_ATTRIBUTES (decl));
  629. }
  630. if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (decl))
  631. && !DECL_DECLARED_INLINE_P (decl)
  632. /* redefining extern inline function makes it DECL_UNINLINABLE. */
  633. && !DECL_UNINLINABLE (decl))
  634. warning_at (DECL_SOURCE_LOCATION (decl), OPT_Wattributes,
  635. "always_inline function might not be inlinable");
  636. process_common_attributes (node, decl);
  637. }
  638. for (vnode = symtab->first_variable (); vnode != first_var;
  639. vnode = symtab->next_variable (vnode))
  640. {
  641. tree decl = vnode->decl;
  642. if (DECL_EXTERNAL (decl)
  643. && DECL_INITIAL (decl))
  644. varpool_node::finalize_decl (decl);
  645. if (DECL_PRESERVE_P (decl))
  646. vnode->force_output = true;
  647. else if (lookup_attribute ("externally_visible", DECL_ATTRIBUTES (decl)))
  648. {
  649. if (! TREE_PUBLIC (vnode->decl))
  650. warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
  651. "%<externally_visible%>"
  652. " attribute have effect only on public objects");
  653. }
  654. if (lookup_attribute ("weakref", DECL_ATTRIBUTES (decl))
  655. && vnode->definition
  656. && DECL_INITIAL (decl))
  657. {
  658. warning_at (DECL_SOURCE_LOCATION (vnode->decl), OPT_Wattributes,
  659. "%<weakref%> attribute ignored"
  660. " because variable is initialized");
  661. DECL_WEAK (decl) = 0;
  662. DECL_ATTRIBUTES (decl) = remove_attribute ("weakref",
  663. DECL_ATTRIBUTES (decl));
  664. }
  665. process_common_attributes (vnode, decl);
  666. }
  667. }
  668. /* Mark DECL as finalized. By finalizing the declaration, frontend instruct the
  669. middle end to output the variable to asm file, if needed or externally
  670. visible. */
  671. void
  672. varpool_node::finalize_decl (tree decl)
  673. {
  674. varpool_node *node = varpool_node::get_create (decl);
  675. gcc_assert (TREE_STATIC (decl) || DECL_EXTERNAL (decl));
  676. if (node->definition)
  677. return;
  678. /* Set definition first before calling notice_global_symbol so that
  679. it is available to notice_global_symbol. */
  680. node->definition = true;
  681. notice_global_symbol (decl);
  682. if (TREE_THIS_VOLATILE (decl) || DECL_PRESERVE_P (decl)
  683. /* Traditionally we do not eliminate static variables when not
  684. optimizing and when not doing toplevel reoder. */
  685. || node->no_reorder
  686. || ((!flag_toplevel_reorder
  687. && !DECL_COMDAT (node->decl)
  688. && !DECL_ARTIFICIAL (node->decl))))
  689. node->force_output = true;
  690. if (symtab->state == CONSTRUCTION
  691. && (node->needed_p () || node->referred_to_p ()))
  692. enqueue_node (node);
  693. if (symtab->state >= IPA_SSA)
  694. node->analyze ();
  695. /* Some frontends produce various interface variables after compilation
  696. finished. */
  697. if (symtab->state == FINISHED
  698. || (!flag_toplevel_reorder
  699. && symtab->state == EXPANSION))
  700. node->assemble_decl ();
  701. if (DECL_INITIAL (decl))
  702. chkp_register_var_initializer (decl);
  703. }
  704. /* EDGE is an polymorphic call. Mark all possible targets as reachable
  705. and if there is only one target, perform trivial devirtualization.
  706. REACHABLE_CALL_TARGETS collects target lists we already walked to
  707. avoid udplicate work. */
  708. static void
  709. walk_polymorphic_call_targets (hash_set<void *> *reachable_call_targets,
  710. cgraph_edge *edge)
  711. {
  712. unsigned int i;
  713. void *cache_token;
  714. bool final;
  715. vec <cgraph_node *>targets
  716. = possible_polymorphic_call_targets
  717. (edge, &final, &cache_token);
  718. if (!reachable_call_targets->add (cache_token))
  719. {
  720. if (symtab->dump_file)
  721. dump_possible_polymorphic_call_targets
  722. (symtab->dump_file, edge);
  723. for (i = 0; i < targets.length (); i++)
  724. {
  725. /* Do not bother to mark virtual methods in anonymous namespace;
  726. either we will find use of virtual table defining it, or it is
  727. unused. */
  728. if (targets[i]->definition
  729. && TREE_CODE
  730. (TREE_TYPE (targets[i]->decl))
  731. == METHOD_TYPE
  732. && !type_in_anonymous_namespace_p
  733. (method_class_type
  734. (TREE_TYPE (targets[i]->decl))))
  735. enqueue_node (targets[i]);
  736. }
  737. }
  738. /* Very trivial devirtualization; when the type is
  739. final or anonymous (so we know all its derivation)
  740. and there is only one possible virtual call target,
  741. make the edge direct. */
  742. if (final)
  743. {
  744. if (targets.length () <= 1 && dbg_cnt (devirt))
  745. {
  746. cgraph_node *target;
  747. if (targets.length () == 1)
  748. target = targets[0];
  749. else
  750. target = cgraph_node::create
  751. (builtin_decl_implicit (BUILT_IN_UNREACHABLE));
  752. if (symtab->dump_file)
  753. {
  754. fprintf (symtab->dump_file,
  755. "Devirtualizing call: ");
  756. print_gimple_stmt (symtab->dump_file,
  757. edge->call_stmt, 0,
  758. TDF_SLIM);
  759. }
  760. if (dump_enabled_p ())
  761. {
  762. location_t locus = gimple_location_safe (edge->call_stmt);
  763. dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, locus,
  764. "devirtualizing call in %s to %s\n",
  765. edge->caller->name (), target->name ());
  766. }
  767. edge->make_direct (target);
  768. edge->redirect_call_stmt_to_callee ();
  769. /* Call to __builtin_unreachable shouldn't be instrumented. */
  770. if (!targets.length ())
  771. gimple_call_set_with_bounds (edge->call_stmt, false);
  772. if (symtab->dump_file)
  773. {
  774. fprintf (symtab->dump_file,
  775. "Devirtualized as: ");
  776. print_gimple_stmt (symtab->dump_file,
  777. edge->call_stmt, 0,
  778. TDF_SLIM);
  779. }
  780. }
  781. }
  782. }
  783. /* Discover all functions and variables that are trivially needed, analyze
  784. them as well as all functions and variables referred by them */
  785. static cgraph_node *first_analyzed;
  786. static varpool_node *first_analyzed_var;
  787. static void
  788. analyze_functions (void)
  789. {
  790. /* Keep track of already processed nodes when called multiple times for
  791. intermodule optimization. */
  792. cgraph_node *first_handled = first_analyzed;
  793. varpool_node *first_handled_var = first_analyzed_var;
  794. hash_set<void *> reachable_call_targets;
  795. symtab_node *node;
  796. symtab_node *next;
  797. int i;
  798. ipa_ref *ref;
  799. bool changed = true;
  800. location_t saved_loc = input_location;
  801. bitmap_obstack_initialize (NULL);
  802. symtab->state = CONSTRUCTION;
  803. input_location = UNKNOWN_LOCATION;
  804. /* Ugly, but the fixup can not happen at a time same body alias is created;
  805. C++ FE is confused about the COMDAT groups being right. */
  806. if (symtab->cpp_implicit_aliases_done)
  807. FOR_EACH_SYMBOL (node)
  808. if (node->cpp_implicit_alias)
  809. node->fixup_same_cpp_alias_visibility (node->get_alias_target ());
  810. build_type_inheritance_graph ();
  811. /* Analysis adds static variables that in turn adds references to new functions.
  812. So we need to iterate the process until it stabilize. */
  813. while (changed)
  814. {
  815. changed = false;
  816. process_function_and_variable_attributes (first_analyzed,
  817. first_analyzed_var);
  818. /* First identify the trivially needed symbols. */
  819. for (node = symtab->first_symbol ();
  820. node != first_analyzed
  821. && node != first_analyzed_var; node = node->next)
  822. {
  823. /* Convert COMDAT group designators to IDENTIFIER_NODEs. */
  824. node->get_comdat_group_id ();
  825. if (node->needed_p ())
  826. {
  827. enqueue_node (node);
  828. if (!changed && symtab->dump_file)
  829. fprintf (symtab->dump_file, "Trivially needed symbols:");
  830. changed = true;
  831. if (symtab->dump_file)
  832. fprintf (symtab->dump_file, " %s", node->asm_name ());
  833. if (!changed && symtab->dump_file)
  834. fprintf (symtab->dump_file, "\n");
  835. }
  836. if (node == first_analyzed
  837. || node == first_analyzed_var)
  838. break;
  839. }
  840. symtab->process_new_functions ();
  841. first_analyzed_var = symtab->first_variable ();
  842. first_analyzed = symtab->first_function ();
  843. if (changed && symtab->dump_file)
  844. fprintf (symtab->dump_file, "\n");
  845. /* Lower representation, build callgraph edges and references for all trivially
  846. needed symbols and all symbols referred by them. */
  847. while (queued_nodes != &symtab_terminator)
  848. {
  849. changed = true;
  850. node = queued_nodes;
  851. queued_nodes = (symtab_node *)queued_nodes->aux;
  852. cgraph_node *cnode = dyn_cast <cgraph_node *> (node);
  853. if (cnode && cnode->definition)
  854. {
  855. cgraph_edge *edge;
  856. tree decl = cnode->decl;
  857. /* ??? It is possible to create extern inline function
  858. and later using weak alias attribute to kill its body.
  859. See gcc.c-torture/compile/20011119-1.c */
  860. if (!DECL_STRUCT_FUNCTION (decl)
  861. && !cnode->alias
  862. && !cnode->thunk.thunk_p
  863. && !cnode->dispatcher_function)
  864. {
  865. cnode->reset ();
  866. cnode->local.redefined_extern_inline = true;
  867. continue;
  868. }
  869. if (!cnode->analyzed)
  870. cnode->analyze ();
  871. for (edge = cnode->callees; edge; edge = edge->next_callee)
  872. if (edge->callee->definition
  873. && (!DECL_EXTERNAL (edge->callee->decl)
  874. /* When not optimizing, do not try to analyze extern
  875. inline functions. Doing so is pointless. */
  876. || opt_for_fn (edge->callee->decl, optimize)
  877. /* Weakrefs needs to be preserved. */
  878. || edge->callee->alias
  879. /* always_inline functions are inlined aven at -O0. */
  880. || lookup_attribute
  881. ("always_inline",
  882. DECL_ATTRIBUTES (edge->callee->decl))
  883. /* Multiversioned functions needs the dispatcher to
  884. be produced locally even for extern functions. */
  885. || edge->callee->function_version ()))
  886. enqueue_node (edge->callee);
  887. if (opt_for_fn (cnode->decl, optimize)
  888. && opt_for_fn (cnode->decl, flag_devirtualize))
  889. {
  890. cgraph_edge *next;
  891. for (edge = cnode->indirect_calls; edge; edge = next)
  892. {
  893. next = edge->next_callee;
  894. if (edge->indirect_info->polymorphic)
  895. walk_polymorphic_call_targets (&reachable_call_targets,
  896. edge);
  897. }
  898. }
  899. /* If decl is a clone of an abstract function,
  900. mark that abstract function so that we don't release its body.
  901. The DECL_INITIAL() of that abstract function declaration
  902. will be later needed to output debug info. */
  903. if (DECL_ABSTRACT_ORIGIN (decl))
  904. {
  905. cgraph_node *origin_node
  906. = cgraph_node::get_create (DECL_ABSTRACT_ORIGIN (decl));
  907. origin_node->used_as_abstract_origin = true;
  908. }
  909. }
  910. else
  911. {
  912. varpool_node *vnode = dyn_cast <varpool_node *> (node);
  913. if (vnode && vnode->definition && !vnode->analyzed)
  914. vnode->analyze ();
  915. }
  916. if (node->same_comdat_group)
  917. {
  918. symtab_node *next;
  919. for (next = node->same_comdat_group;
  920. next != node;
  921. next = next->same_comdat_group)
  922. if (!next->comdat_local_p ())
  923. enqueue_node (next);
  924. }
  925. for (i = 0; node->iterate_reference (i, ref); i++)
  926. if (ref->referred->definition
  927. && (!DECL_EXTERNAL (ref->referred->decl)
  928. || ((TREE_CODE (ref->referred->decl) != FUNCTION_DECL
  929. && optimize)
  930. || (TREE_CODE (ref->referred->decl) == FUNCTION_DECL
  931. && opt_for_fn (ref->referred->decl, optimize))
  932. || node->alias
  933. || ref->referred->alias)))
  934. enqueue_node (ref->referred);
  935. symtab->process_new_functions ();
  936. }
  937. }
  938. update_type_inheritance_graph ();
  939. /* Collect entry points to the unit. */
  940. if (symtab->dump_file)
  941. {
  942. fprintf (symtab->dump_file, "\n\nInitial ");
  943. symtab_node::dump_table (symtab->dump_file);
  944. }
  945. if (symtab->dump_file)
  946. fprintf (symtab->dump_file, "\nRemoving unused symbols:");
  947. for (node = symtab->first_symbol ();
  948. node != first_handled
  949. && node != first_handled_var; node = next)
  950. {
  951. next = node->next;
  952. if (!node->aux && !node->referred_to_p ())
  953. {
  954. if (symtab->dump_file)
  955. fprintf (symtab->dump_file, " %s", node->name ());
  956. node->remove ();
  957. continue;
  958. }
  959. if (cgraph_node *cnode = dyn_cast <cgraph_node *> (node))
  960. {
  961. tree decl = node->decl;
  962. if (cnode->definition && !gimple_has_body_p (decl)
  963. && !cnode->alias
  964. && !cnode->thunk.thunk_p)
  965. cnode->reset ();
  966. gcc_assert (!cnode->definition || cnode->thunk.thunk_p
  967. || cnode->alias
  968. || gimple_has_body_p (decl));
  969. gcc_assert (cnode->analyzed == cnode->definition);
  970. }
  971. node->aux = NULL;
  972. }
  973. for (;node; node = node->next)
  974. node->aux = NULL;
  975. first_analyzed = symtab->first_function ();
  976. first_analyzed_var = symtab->first_variable ();
  977. if (symtab->dump_file)
  978. {
  979. fprintf (symtab->dump_file, "\n\nReclaimed ");
  980. symtab_node::dump_table (symtab->dump_file);
  981. }
  982. bitmap_obstack_release (NULL);
  983. ggc_collect ();
  984. /* Initialize assembler name hash, in particular we want to trigger C++
  985. mangling and same body alias creation before we free DECL_ARGUMENTS
  986. used by it. */
  987. if (!seen_error ())
  988. symtab->symtab_initialize_asm_name_hash ();
  989. input_location = saved_loc;
  990. }
  991. /* Translate the ugly representation of aliases as alias pairs into nice
  992. representation in callgraph. We don't handle all cases yet,
  993. unfortunately. */
  994. static void
  995. handle_alias_pairs (void)
  996. {
  997. alias_pair *p;
  998. unsigned i;
  999. for (i = 0; alias_pairs && alias_pairs->iterate (i, &p);)
  1000. {
  1001. symtab_node *target_node = symtab_node::get_for_asmname (p->target);
  1002. /* Weakrefs with target not defined in current unit are easy to handle:
  1003. they behave just as external variables except we need to note the
  1004. alias flag to later output the weakref pseudo op into asm file. */
  1005. if (!target_node
  1006. && lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)) != NULL)
  1007. {
  1008. symtab_node *node = symtab_node::get (p->decl);
  1009. if (node)
  1010. {
  1011. node->alias_target = p->target;
  1012. node->weakref = true;
  1013. node->alias = true;
  1014. }
  1015. alias_pairs->unordered_remove (i);
  1016. continue;
  1017. }
  1018. else if (!target_node)
  1019. {
  1020. error ("%q+D aliased to undefined symbol %qE", p->decl, p->target);
  1021. symtab_node *node = symtab_node::get (p->decl);
  1022. if (node)
  1023. node->alias = false;
  1024. alias_pairs->unordered_remove (i);
  1025. continue;
  1026. }
  1027. if (DECL_EXTERNAL (target_node->decl)
  1028. /* We use local aliases for C++ thunks to force the tailcall
  1029. to bind locally. This is a hack - to keep it working do
  1030. the following (which is not strictly correct). */
  1031. && (TREE_CODE (target_node->decl) != FUNCTION_DECL
  1032. || ! DECL_VIRTUAL_P (target_node->decl))
  1033. && ! lookup_attribute ("weakref", DECL_ATTRIBUTES (p->decl)))
  1034. {
  1035. error ("%q+D aliased to external symbol %qE",
  1036. p->decl, p->target);
  1037. }
  1038. if (TREE_CODE (p->decl) == FUNCTION_DECL
  1039. && target_node && is_a <cgraph_node *> (target_node))
  1040. {
  1041. cgraph_node *src_node = cgraph_node::get (p->decl);
  1042. if (src_node && src_node->definition)
  1043. src_node->reset ();
  1044. cgraph_node::create_alias (p->decl, target_node->decl);
  1045. alias_pairs->unordered_remove (i);
  1046. }
  1047. else if (TREE_CODE (p->decl) == VAR_DECL
  1048. && target_node && is_a <varpool_node *> (target_node))
  1049. {
  1050. varpool_node::create_alias (p->decl, target_node->decl);
  1051. alias_pairs->unordered_remove (i);
  1052. }
  1053. else
  1054. {
  1055. error ("%q+D alias in between function and variable is not supported",
  1056. p->decl);
  1057. warning (0, "%q+D aliased declaration",
  1058. target_node->decl);
  1059. alias_pairs->unordered_remove (i);
  1060. }
  1061. }
  1062. vec_free (alias_pairs);
  1063. }
  1064. /* Figure out what functions we want to assemble. */
  1065. static void
  1066. mark_functions_to_output (void)
  1067. {
  1068. cgraph_node *node;
  1069. #ifdef ENABLE_CHECKING
  1070. bool check_same_comdat_groups = false;
  1071. FOR_EACH_FUNCTION (node)
  1072. gcc_assert (!node->process);
  1073. #endif
  1074. FOR_EACH_FUNCTION (node)
  1075. {
  1076. tree decl = node->decl;
  1077. gcc_assert (!node->process || node->same_comdat_group);
  1078. if (node->process)
  1079. continue;
  1080. /* We need to output all local functions that are used and not
  1081. always inlined, as well as those that are reachable from
  1082. outside the current compilation unit. */
  1083. if (node->analyzed
  1084. && !node->thunk.thunk_p
  1085. && !node->alias
  1086. && !node->global.inlined_to
  1087. && !TREE_ASM_WRITTEN (decl)
  1088. && !DECL_EXTERNAL (decl))
  1089. {
  1090. node->process = 1;
  1091. if (node->same_comdat_group)
  1092. {
  1093. cgraph_node *next;
  1094. for (next = dyn_cast<cgraph_node *> (node->same_comdat_group);
  1095. next != node;
  1096. next = dyn_cast<cgraph_node *> (next->same_comdat_group))
  1097. if (!next->thunk.thunk_p && !next->alias
  1098. && !next->comdat_local_p ())
  1099. next->process = 1;
  1100. }
  1101. }
  1102. else if (node->same_comdat_group)
  1103. {
  1104. #ifdef ENABLE_CHECKING
  1105. check_same_comdat_groups = true;
  1106. #endif
  1107. }
  1108. else
  1109. {
  1110. /* We should've reclaimed all functions that are not needed. */
  1111. #ifdef ENABLE_CHECKING
  1112. if (!node->global.inlined_to
  1113. && gimple_has_body_p (decl)
  1114. /* FIXME: in ltrans unit when offline copy is outside partition but inline copies
  1115. are inside partition, we can end up not removing the body since we no longer
  1116. have analyzed node pointing to it. */
  1117. && !node->in_other_partition
  1118. && !node->alias
  1119. && !node->clones
  1120. && !DECL_EXTERNAL (decl))
  1121. {
  1122. node->debug ();
  1123. internal_error ("failed to reclaim unneeded function");
  1124. }
  1125. #endif
  1126. gcc_assert (node->global.inlined_to
  1127. || !gimple_has_body_p (decl)
  1128. || node->in_other_partition
  1129. || node->clones
  1130. || DECL_ARTIFICIAL (decl)
  1131. || DECL_EXTERNAL (decl));
  1132. }
  1133. }
  1134. #ifdef ENABLE_CHECKING
  1135. if (check_same_comdat_groups)
  1136. FOR_EACH_FUNCTION (node)
  1137. if (node->same_comdat_group && !node->process)
  1138. {
  1139. tree decl = node->decl;
  1140. if (!node->global.inlined_to
  1141. && gimple_has_body_p (decl)
  1142. /* FIXME: in an ltrans unit when the offline copy is outside a
  1143. partition but inline copies are inside a partition, we can
  1144. end up not removing the body since we no longer have an
  1145. analyzed node pointing to it. */
  1146. && !node->in_other_partition
  1147. && !node->clones
  1148. && !DECL_EXTERNAL (decl))
  1149. {
  1150. node->debug ();
  1151. internal_error ("failed to reclaim unneeded function in same "
  1152. "comdat group");
  1153. }
  1154. }
  1155. #endif
  1156. }
  1157. /* DECL is FUNCTION_DECL. Initialize datastructures so DECL is a function
  1158. in lowered gimple form. IN_SSA is true if the gimple is in SSA.
  1159. Set current_function_decl and cfun to newly constructed empty function body.
  1160. return basic block in the function body. */
  1161. basic_block
  1162. init_lowered_empty_function (tree decl, bool in_ssa, gcov_type count)
  1163. {
  1164. basic_block bb;
  1165. edge e;
  1166. current_function_decl = decl;
  1167. allocate_struct_function (decl, false);
  1168. gimple_register_cfg_hooks ();
  1169. init_empty_tree_cfg ();
  1170. if (in_ssa)
  1171. {
  1172. init_tree_ssa (cfun);
  1173. init_ssa_operands (cfun);
  1174. cfun->gimple_df->in_ssa_p = true;
  1175. cfun->curr_properties |= PROP_ssa;
  1176. }
  1177. DECL_INITIAL (decl) = make_node (BLOCK);
  1178. DECL_SAVED_TREE (decl) = error_mark_node;
  1179. cfun->curr_properties |= (PROP_gimple_lcf | PROP_gimple_leh | PROP_gimple_any
  1180. | PROP_cfg | PROP_loops);
  1181. set_loops_for_fn (cfun, ggc_cleared_alloc<loops> ());
  1182. init_loops_structure (cfun, loops_for_fn (cfun), 1);
  1183. loops_for_fn (cfun)->state |= LOOPS_MAY_HAVE_MULTIPLE_LATCHES;
  1184. /* Create BB for body of the function and connect it properly. */
  1185. ENTRY_BLOCK_PTR_FOR_FN (cfun)->count = count;
  1186. ENTRY_BLOCK_PTR_FOR_FN (cfun)->frequency = REG_BR_PROB_BASE;
  1187. EXIT_BLOCK_PTR_FOR_FN (cfun)->count = count;
  1188. EXIT_BLOCK_PTR_FOR_FN (cfun)->frequency = REG_BR_PROB_BASE;
  1189. bb = create_basic_block (NULL, (void *) 0, ENTRY_BLOCK_PTR_FOR_FN (cfun));
  1190. bb->count = count;
  1191. bb->frequency = BB_FREQ_MAX;
  1192. e = make_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun), bb, EDGE_FALLTHRU);
  1193. e->count = count;
  1194. e->probability = REG_BR_PROB_BASE;
  1195. e = make_edge (bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
  1196. e->count = count;
  1197. e->probability = REG_BR_PROB_BASE;
  1198. add_bb_to_loop (bb, ENTRY_BLOCK_PTR_FOR_FN (cfun)->loop_father);
  1199. return bb;
  1200. }
  1201. /* Adjust PTR by the constant FIXED_OFFSET, and by the vtable
  1202. offset indicated by VIRTUAL_OFFSET, if that is
  1203. non-null. THIS_ADJUSTING is nonzero for a this adjusting thunk and
  1204. zero for a result adjusting thunk. */
  1205. static tree
  1206. thunk_adjust (gimple_stmt_iterator * bsi,
  1207. tree ptr, bool this_adjusting,
  1208. HOST_WIDE_INT fixed_offset, tree virtual_offset)
  1209. {
  1210. gassign *stmt;
  1211. tree ret;
  1212. if (this_adjusting
  1213. && fixed_offset != 0)
  1214. {
  1215. stmt = gimple_build_assign
  1216. (ptr, fold_build_pointer_plus_hwi_loc (input_location,
  1217. ptr,
  1218. fixed_offset));
  1219. gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
  1220. }
  1221. /* If there's a virtual offset, look up that value in the vtable and
  1222. adjust the pointer again. */
  1223. if (virtual_offset)
  1224. {
  1225. tree vtabletmp;
  1226. tree vtabletmp2;
  1227. tree vtabletmp3;
  1228. if (!vtable_entry_type)
  1229. {
  1230. tree vfunc_type = make_node (FUNCTION_TYPE);
  1231. TREE_TYPE (vfunc_type) = integer_type_node;
  1232. TYPE_ARG_TYPES (vfunc_type) = NULL_TREE;
  1233. layout_type (vfunc_type);
  1234. vtable_entry_type = build_pointer_type (vfunc_type);
  1235. }
  1236. vtabletmp =
  1237. create_tmp_reg (build_pointer_type
  1238. (build_pointer_type (vtable_entry_type)), "vptr");
  1239. /* The vptr is always at offset zero in the object. */
  1240. stmt = gimple_build_assign (vtabletmp,
  1241. build1 (NOP_EXPR, TREE_TYPE (vtabletmp),
  1242. ptr));
  1243. gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
  1244. /* Form the vtable address. */
  1245. vtabletmp2 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp)),
  1246. "vtableaddr");
  1247. stmt = gimple_build_assign (vtabletmp2,
  1248. build_simple_mem_ref (vtabletmp));
  1249. gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
  1250. /* Find the entry with the vcall offset. */
  1251. stmt = gimple_build_assign (vtabletmp2,
  1252. fold_build_pointer_plus_loc (input_location,
  1253. vtabletmp2,
  1254. virtual_offset));
  1255. gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
  1256. /* Get the offset itself. */
  1257. vtabletmp3 = create_tmp_reg (TREE_TYPE (TREE_TYPE (vtabletmp2)),
  1258. "vcalloffset");
  1259. stmt = gimple_build_assign (vtabletmp3,
  1260. build_simple_mem_ref (vtabletmp2));
  1261. gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
  1262. /* Adjust the `this' pointer. */
  1263. ptr = fold_build_pointer_plus_loc (input_location, ptr, vtabletmp3);
  1264. ptr = force_gimple_operand_gsi (bsi, ptr, true, NULL_TREE, false,
  1265. GSI_CONTINUE_LINKING);
  1266. }
  1267. if (!this_adjusting
  1268. && fixed_offset != 0)
  1269. /* Adjust the pointer by the constant. */
  1270. {
  1271. tree ptrtmp;
  1272. if (TREE_CODE (ptr) == VAR_DECL)
  1273. ptrtmp = ptr;
  1274. else
  1275. {
  1276. ptrtmp = create_tmp_reg (TREE_TYPE (ptr), "ptr");
  1277. stmt = gimple_build_assign (ptrtmp, ptr);
  1278. gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
  1279. }
  1280. ptr = fold_build_pointer_plus_hwi_loc (input_location,
  1281. ptrtmp, fixed_offset);
  1282. }
  1283. /* Emit the statement and gimplify the adjustment expression. */
  1284. ret = create_tmp_reg (TREE_TYPE (ptr), "adjusted_this");
  1285. stmt = gimple_build_assign (ret, ptr);
  1286. gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
  1287. return ret;
  1288. }
  1289. /* Expand thunk NODE to gimple if possible.
  1290. When FORCE_GIMPLE_THUNK is true, gimple thunk is created and
  1291. no assembler is produced.
  1292. When OUTPUT_ASM_THUNK is true, also produce assembler for
  1293. thunks that are not lowered. */
  1294. bool
  1295. cgraph_node::expand_thunk (bool output_asm_thunks, bool force_gimple_thunk)
  1296. {
  1297. bool this_adjusting = thunk.this_adjusting;
  1298. HOST_WIDE_INT fixed_offset = thunk.fixed_offset;
  1299. HOST_WIDE_INT virtual_value = thunk.virtual_value;
  1300. tree virtual_offset = NULL;
  1301. tree alias = callees->callee->decl;
  1302. tree thunk_fndecl = decl;
  1303. tree a;
  1304. /* Instrumentation thunk is the same function with
  1305. a different signature. Never need to expand it. */
  1306. if (thunk.add_pointer_bounds_args)
  1307. return false;
  1308. if (!force_gimple_thunk && this_adjusting
  1309. && targetm.asm_out.can_output_mi_thunk (thunk_fndecl, fixed_offset,
  1310. virtual_value, alias))
  1311. {
  1312. const char *fnname;
  1313. tree fn_block;
  1314. tree restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
  1315. if (!output_asm_thunks)
  1316. {
  1317. analyzed = true;
  1318. return false;
  1319. }
  1320. if (in_lto_p)
  1321. get_untransformed_body ();
  1322. a = DECL_ARGUMENTS (thunk_fndecl);
  1323. current_function_decl = thunk_fndecl;
  1324. /* Ensure thunks are emitted in their correct sections. */
  1325. resolve_unique_section (thunk_fndecl, 0,
  1326. flag_function_sections);
  1327. DECL_RESULT (thunk_fndecl)
  1328. = build_decl (DECL_SOURCE_LOCATION (thunk_fndecl),
  1329. RESULT_DECL, 0, restype);
  1330. DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
  1331. fnname = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (thunk_fndecl));
  1332. /* The back end expects DECL_INITIAL to contain a BLOCK, so we
  1333. create one. */
  1334. fn_block = make_node (BLOCK);
  1335. BLOCK_VARS (fn_block) = a;
  1336. DECL_INITIAL (thunk_fndecl) = fn_block;
  1337. init_function_start (thunk_fndecl);
  1338. cfun->is_thunk = 1;
  1339. insn_locations_init ();
  1340. set_curr_insn_location (DECL_SOURCE_LOCATION (thunk_fndecl));
  1341. prologue_location = curr_insn_location ();
  1342. assemble_start_function (thunk_fndecl, fnname);
  1343. targetm.asm_out.output_mi_thunk (asm_out_file, thunk_fndecl,
  1344. fixed_offset, virtual_value, alias);
  1345. assemble_end_function (thunk_fndecl, fnname);
  1346. insn_locations_finalize ();
  1347. init_insn_lengths ();
  1348. free_after_compilation (cfun);
  1349. set_cfun (NULL);
  1350. TREE_ASM_WRITTEN (thunk_fndecl) = 1;
  1351. thunk.thunk_p = false;
  1352. analyzed = false;
  1353. }
  1354. else if (stdarg_p (TREE_TYPE (thunk_fndecl)))
  1355. {
  1356. error ("generic thunk code fails for method %qD which uses %<...%>",
  1357. thunk_fndecl);
  1358. TREE_ASM_WRITTEN (thunk_fndecl) = 1;
  1359. analyzed = true;
  1360. return false;
  1361. }
  1362. else
  1363. {
  1364. tree restype;
  1365. basic_block bb, then_bb, else_bb, return_bb;
  1366. gimple_stmt_iterator bsi;
  1367. int nargs = 0;
  1368. tree arg;
  1369. int i;
  1370. tree resdecl;
  1371. tree restmp = NULL;
  1372. tree resbnd = NULL;
  1373. gcall *call;
  1374. greturn *ret;
  1375. bool alias_is_noreturn = TREE_THIS_VOLATILE (alias);
  1376. if (in_lto_p)
  1377. get_untransformed_body ();
  1378. a = DECL_ARGUMENTS (thunk_fndecl);
  1379. current_function_decl = thunk_fndecl;
  1380. /* Ensure thunks are emitted in their correct sections. */
  1381. resolve_unique_section (thunk_fndecl, 0,
  1382. flag_function_sections);
  1383. DECL_IGNORED_P (thunk_fndecl) = 1;
  1384. bitmap_obstack_initialize (NULL);
  1385. if (thunk.virtual_offset_p)
  1386. virtual_offset = size_int (virtual_value);
  1387. /* Build the return declaration for the function. */
  1388. restype = TREE_TYPE (TREE_TYPE (thunk_fndecl));
  1389. if (DECL_RESULT (thunk_fndecl) == NULL_TREE)
  1390. {
  1391. resdecl = build_decl (input_location, RESULT_DECL, 0, restype);
  1392. DECL_ARTIFICIAL (resdecl) = 1;
  1393. DECL_IGNORED_P (resdecl) = 1;
  1394. DECL_RESULT (thunk_fndecl) = resdecl;
  1395. DECL_CONTEXT (DECL_RESULT (thunk_fndecl)) = thunk_fndecl;
  1396. }
  1397. else
  1398. resdecl = DECL_RESULT (thunk_fndecl);
  1399. bb = then_bb = else_bb = return_bb
  1400. = init_lowered_empty_function (thunk_fndecl, true, count);
  1401. bsi = gsi_start_bb (bb);
  1402. /* Build call to the function being thunked. */
  1403. if (!VOID_TYPE_P (restype) && !alias_is_noreturn)
  1404. {
  1405. if (DECL_BY_REFERENCE (resdecl))
  1406. {
  1407. restmp = gimple_fold_indirect_ref (resdecl);
  1408. if (!restmp)
  1409. restmp = build2 (MEM_REF,
  1410. TREE_TYPE (TREE_TYPE (DECL_RESULT (alias))),
  1411. resdecl,
  1412. build_int_cst (TREE_TYPE
  1413. (DECL_RESULT (alias)), 0));
  1414. }
  1415. else if (!is_gimple_reg_type (restype))
  1416. {
  1417. if (aggregate_value_p (resdecl, TREE_TYPE (thunk_fndecl)))
  1418. {
  1419. restmp = resdecl;
  1420. if (TREE_CODE (restmp) == VAR_DECL)
  1421. add_local_decl (cfun, restmp);
  1422. BLOCK_VARS (DECL_INITIAL (current_function_decl)) = restmp;
  1423. }
  1424. else
  1425. restmp = create_tmp_var (restype, "retval");
  1426. }
  1427. else
  1428. restmp = create_tmp_reg (restype, "retval");
  1429. }
  1430. for (arg = a; arg; arg = DECL_CHAIN (arg))
  1431. nargs++;
  1432. auto_vec<tree> vargs (nargs);
  1433. i = 0;
  1434. arg = a;
  1435. if (this_adjusting)
  1436. {
  1437. vargs.quick_push (thunk_adjust (&bsi, a, 1, fixed_offset,
  1438. virtual_offset));
  1439. arg = DECL_CHAIN (a);
  1440. i = 1;
  1441. }
  1442. if (nargs)
  1443. for (; i < nargs; i++, arg = DECL_CHAIN (arg))
  1444. {
  1445. tree tmp = arg;
  1446. if (!is_gimple_val (arg))
  1447. {
  1448. tmp = create_tmp_reg (TYPE_MAIN_VARIANT
  1449. (TREE_TYPE (arg)), "arg");
  1450. gimple stmt = gimple_build_assign (tmp, arg);
  1451. gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
  1452. }
  1453. vargs.quick_push (tmp);
  1454. }
  1455. call = gimple_build_call_vec (build_fold_addr_expr_loc (0, alias), vargs);
  1456. callees->call_stmt = call;
  1457. gimple_call_set_from_thunk (call, true);
  1458. gimple_call_set_with_bounds (call, instrumentation_clone);
  1459. /* Return slot optimization is always possible and in fact requred to
  1460. return values with DECL_BY_REFERENCE. */
  1461. if (aggregate_value_p (resdecl, TREE_TYPE (thunk_fndecl))
  1462. && (!is_gimple_reg_type (TREE_TYPE (resdecl))
  1463. || DECL_BY_REFERENCE (resdecl)))
  1464. gimple_call_set_return_slot_opt (call, true);
  1465. if (restmp && !alias_is_noreturn)
  1466. {
  1467. gimple_call_set_lhs (call, restmp);
  1468. gcc_assert (useless_type_conversion_p (TREE_TYPE (restmp),
  1469. TREE_TYPE (TREE_TYPE (alias))));
  1470. }
  1471. gsi_insert_after (&bsi, call, GSI_NEW_STMT);
  1472. if (!alias_is_noreturn)
  1473. {
  1474. if (instrumentation_clone
  1475. && !DECL_BY_REFERENCE (resdecl)
  1476. && restmp
  1477. && BOUNDED_P (restmp))
  1478. {
  1479. resbnd = chkp_insert_retbnd_call (NULL, restmp, &bsi);
  1480. create_edge (get_create (gimple_call_fndecl (gsi_stmt (bsi))),
  1481. as_a <gcall *> (gsi_stmt (bsi)),
  1482. callees->count, callees->frequency);
  1483. }
  1484. if (restmp && !this_adjusting
  1485. && (fixed_offset || virtual_offset))
  1486. {
  1487. tree true_label = NULL_TREE;
  1488. if (TREE_CODE (TREE_TYPE (restmp)) == POINTER_TYPE)
  1489. {
  1490. gimple stmt;
  1491. edge e;
  1492. /* If the return type is a pointer, we need to
  1493. protect against NULL. We know there will be an
  1494. adjustment, because that's why we're emitting a
  1495. thunk. */
  1496. then_bb = create_basic_block (NULL, (void *) 0, bb);
  1497. then_bb->count = count - count / 16;
  1498. then_bb->frequency = BB_FREQ_MAX - BB_FREQ_MAX / 16;
  1499. return_bb = create_basic_block (NULL, (void *) 0, then_bb);
  1500. return_bb->count = count;
  1501. return_bb->frequency = BB_FREQ_MAX;
  1502. else_bb = create_basic_block (NULL, (void *) 0, else_bb);
  1503. then_bb->count = count / 16;
  1504. then_bb->frequency = BB_FREQ_MAX / 16;
  1505. add_bb_to_loop (then_bb, bb->loop_father);
  1506. add_bb_to_loop (return_bb, bb->loop_father);
  1507. add_bb_to_loop (else_bb, bb->loop_father);
  1508. remove_edge (single_succ_edge (bb));
  1509. true_label = gimple_block_label (then_bb);
  1510. stmt = gimple_build_cond (NE_EXPR, restmp,
  1511. build_zero_cst (TREE_TYPE (restmp)),
  1512. NULL_TREE, NULL_TREE);
  1513. gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
  1514. e = make_edge (bb, then_bb, EDGE_TRUE_VALUE);
  1515. e->probability = REG_BR_PROB_BASE - REG_BR_PROB_BASE / 16;
  1516. e->count = count - count / 16;
  1517. e = make_edge (bb, else_bb, EDGE_FALSE_VALUE);
  1518. e->probability = REG_BR_PROB_BASE / 16;
  1519. e->count = count / 16;
  1520. e = make_edge (return_bb, EXIT_BLOCK_PTR_FOR_FN (cfun), 0);
  1521. e->probability = REG_BR_PROB_BASE;
  1522. e->count = count;
  1523. e = make_edge (then_bb, return_bb, EDGE_FALLTHRU);
  1524. e->probability = REG_BR_PROB_BASE;
  1525. e->count = count - count / 16;
  1526. e = make_edge (else_bb, return_bb, EDGE_FALLTHRU);
  1527. e->probability = REG_BR_PROB_BASE;
  1528. e->count = count / 16;
  1529. bsi = gsi_last_bb (then_bb);
  1530. }
  1531. restmp = thunk_adjust (&bsi, restmp, /*this_adjusting=*/0,
  1532. fixed_offset, virtual_offset);
  1533. if (true_label)
  1534. {
  1535. gimple stmt;
  1536. bsi = gsi_last_bb (else_bb);
  1537. stmt = gimple_build_assign (restmp,
  1538. build_zero_cst (TREE_TYPE (restmp)));
  1539. gsi_insert_after (&bsi, stmt, GSI_NEW_STMT);
  1540. bsi = gsi_last_bb (return_bb);
  1541. }
  1542. }
  1543. else
  1544. gimple_call_set_tail (call, true);
  1545. /* Build return value. */
  1546. if (!DECL_BY_REFERENCE (resdecl))
  1547. ret = gimple_build_return (restmp);
  1548. else
  1549. ret = gimple_build_return (resdecl);
  1550. gimple_return_set_retbnd (ret, resbnd);
  1551. gsi_insert_after (&bsi, ret, GSI_NEW_STMT);
  1552. }
  1553. else
  1554. {
  1555. gimple_call_set_tail (call, true);
  1556. remove_edge (single_succ_edge (bb));
  1557. }
  1558. cfun->gimple_df->in_ssa_p = true;
  1559. profile_status_for_fn (cfun)
  1560. = count ? PROFILE_READ : PROFILE_GUESSED;
  1561. /* FIXME: C++ FE should stop setting TREE_ASM_WRITTEN on thunks. */
  1562. TREE_ASM_WRITTEN (thunk_fndecl) = false;
  1563. delete_unreachable_blocks ();
  1564. update_ssa (TODO_update_ssa);
  1565. #ifdef ENABLE_CHECKING
  1566. verify_flow_info ();
  1567. #endif
  1568. free_dominance_info (CDI_DOMINATORS);
  1569. /* Since we want to emit the thunk, we explicitly mark its name as
  1570. referenced. */
  1571. thunk.thunk_p = false;
  1572. lowered = true;
  1573. bitmap_obstack_release (NULL);
  1574. }
  1575. current_function_decl = NULL;
  1576. set_cfun (NULL);
  1577. return true;
  1578. }
  1579. /* Assemble thunks and aliases associated to node. */
  1580. void
  1581. cgraph_node::assemble_thunks_and_aliases (void)
  1582. {
  1583. cgraph_edge *e;
  1584. ipa_ref *ref;
  1585. for (e = callers; e;)
  1586. if (e->caller->thunk.thunk_p
  1587. && !e->caller->thunk.add_pointer_bounds_args)
  1588. {
  1589. cgraph_node *thunk = e->caller;
  1590. e = e->next_caller;
  1591. thunk->expand_thunk (true, false);
  1592. thunk->assemble_thunks_and_aliases ();
  1593. }
  1594. else
  1595. e = e->next_caller;
  1596. FOR_EACH_ALIAS (this, ref)
  1597. {
  1598. cgraph_node *alias = dyn_cast <cgraph_node *> (ref->referring);
  1599. bool saved_written = TREE_ASM_WRITTEN (decl);
  1600. /* Force assemble_alias to really output the alias this time instead
  1601. of buffering it in same alias pairs. */
  1602. TREE_ASM_WRITTEN (decl) = 1;
  1603. do_assemble_alias (alias->decl,
  1604. DECL_ASSEMBLER_NAME (decl));
  1605. alias->assemble_thunks_and_aliases ();
  1606. TREE_ASM_WRITTEN (decl) = saved_written;
  1607. }
  1608. }
  1609. /* Expand function specified by node. */
  1610. void
  1611. cgraph_node::expand (void)
  1612. {
  1613. location_t saved_loc;
  1614. /* We ought to not compile any inline clones. */
  1615. gcc_assert (!global.inlined_to);
  1616. announce_function (decl);
  1617. process = 0;
  1618. gcc_assert (lowered);
  1619. get_untransformed_body ();
  1620. /* Generate RTL for the body of DECL. */
  1621. timevar_push (TV_REST_OF_COMPILATION);
  1622. gcc_assert (symtab->global_info_ready);
  1623. /* Initialize the default bitmap obstack. */
  1624. bitmap_obstack_initialize (NULL);
  1625. /* Initialize the RTL code for the function. */
  1626. current_function_decl = decl;
  1627. saved_loc = input_location;
  1628. input_location = DECL_SOURCE_LOCATION (decl);
  1629. init_function_start (decl);
  1630. gimple_register_cfg_hooks ();
  1631. bitmap_obstack_initialize (&reg_obstack); /* FIXME, only at RTL generation*/
  1632. execute_all_ipa_transforms ();
  1633. /* Perform all tree transforms and optimizations. */
  1634. /* Signal the start of passes. */
  1635. invoke_plugin_callbacks (PLUGIN_ALL_PASSES_START, NULL);
  1636. execute_pass_list (cfun, g->get_passes ()->all_passes);
  1637. /* Signal the end of passes. */
  1638. invoke_plugin_callbacks (PLUGIN_ALL_PASSES_END, NULL);
  1639. bitmap_obstack_release (&reg_obstack);
  1640. /* Release the default bitmap obstack. */
  1641. bitmap_obstack_release (NULL);
  1642. /* If requested, warn about function definitions where the function will
  1643. return a value (usually of some struct or union type) which itself will
  1644. take up a lot of stack space. */
  1645. if (warn_larger_than && !DECL_EXTERNAL (decl) && TREE_TYPE (decl))
  1646. {
  1647. tree ret_type = TREE_TYPE (TREE_TYPE (decl));
  1648. if (ret_type && TYPE_SIZE_UNIT (ret_type)
  1649. && TREE_CODE (TYPE_SIZE_UNIT (ret_type)) == INTEGER_CST
  1650. && 0 < compare_tree_int (TYPE_SIZE_UNIT (ret_type),
  1651. larger_than_size))
  1652. {
  1653. unsigned int size_as_int
  1654. = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (ret_type));
  1655. if (compare_tree_int (TYPE_SIZE_UNIT (ret_type), size_as_int) == 0)
  1656. warning (OPT_Wlarger_than_, "size of return value of %q+D is %u bytes",
  1657. decl, size_as_int);
  1658. else
  1659. warning (OPT_Wlarger_than_, "size of return value of %q+D is larger than %wd bytes",
  1660. decl, larger_than_size);
  1661. }
  1662. }
  1663. gimple_set_body (decl, NULL);
  1664. if (DECL_STRUCT_FUNCTION (decl) == 0
  1665. && !cgraph_node::get (decl)->origin)
  1666. {
  1667. /* Stop pointing to the local nodes about to be freed.
  1668. But DECL_INITIAL must remain nonzero so we know this
  1669. was an actual function definition.
  1670. For a nested function, this is done in c_pop_function_context.
  1671. If rest_of_compilation set this to 0, leave it 0. */
  1672. if (DECL_INITIAL (decl) != 0)
  1673. DECL_INITIAL (decl) = error_mark_node;
  1674. }
  1675. input_location = saved_loc;
  1676. ggc_collect ();
  1677. timevar_pop (TV_REST_OF_COMPILATION);
  1678. /* Make sure that BE didn't give up on compiling. */
  1679. gcc_assert (TREE_ASM_WRITTEN (decl));
  1680. set_cfun (NULL);
  1681. current_function_decl = NULL;
  1682. /* It would make a lot more sense to output thunks before function body to get more
  1683. forward and lest backwarding jumps. This however would need solving problem
  1684. with comdats. See PR48668. Also aliases must come after function itself to
  1685. make one pass assemblers, like one on AIX, happy. See PR 50689.
  1686. FIXME: Perhaps thunks should be move before function IFF they are not in comdat
  1687. groups. */
  1688. assemble_thunks_and_aliases ();
  1689. release_body ();
  1690. /* Eliminate all call edges. This is important so the GIMPLE_CALL no longer
  1691. points to the dead function body. */
  1692. remove_callees ();
  1693. remove_all_references ();
  1694. }
  1695. /* Node comparer that is responsible for the order that corresponds
  1696. to time when a function was launched for the first time. */
  1697. static int
  1698. node_cmp (const void *pa, const void *pb)
  1699. {
  1700. const cgraph_node *a = *(const cgraph_node * const *) pa;
  1701. const cgraph_node *b = *(const cgraph_node * const *) pb;
  1702. /* Functions with time profile must be before these without profile. */
  1703. if (!a->tp_first_run || !b->tp_first_run)
  1704. return a->tp_first_run - b->tp_first_run;
  1705. return a->tp_first_run != b->tp_first_run
  1706. ? b->tp_first_run - a->tp_first_run
  1707. : b->order - a->order;
  1708. }
  1709. /* Expand all functions that must be output.
  1710. Attempt to topologically sort the nodes so function is output when
  1711. all called functions are already assembled to allow data to be
  1712. propagated across the callgraph. Use a stack to get smaller distance
  1713. between a function and its callees (later we may choose to use a more
  1714. sophisticated algorithm for function reordering; we will likely want
  1715. to use subsections to make the output functions appear in top-down
  1716. order). */
  1717. static void
  1718. expand_all_functions (void)
  1719. {
  1720. cgraph_node *node;
  1721. cgraph_node **order = XCNEWVEC (cgraph_node *,
  1722. symtab->cgraph_count);
  1723. unsigned int expanded_func_count = 0, profiled_func_count = 0;
  1724. int order_pos, new_order_pos = 0;
  1725. int i;
  1726. order_pos = ipa_reverse_postorder (order);
  1727. gcc_assert (order_pos == symtab->cgraph_count);
  1728. /* Garbage collector may remove inline clones we eliminate during
  1729. optimization. So we must be sure to not reference them. */
  1730. for (i = 0; i < order_pos; i++)
  1731. if (order[i]->process)
  1732. order[new_order_pos++] = order[i];
  1733. if (flag_profile_reorder_functions)
  1734. qsort (order, new_order_pos, sizeof (cgraph_node *), node_cmp);
  1735. for (i = new_order_pos - 1; i >= 0; i--)
  1736. {
  1737. node = order[i];
  1738. if (node->process)
  1739. {
  1740. expanded_func_count++;
  1741. if(node->tp_first_run)
  1742. profiled_func_count++;
  1743. if (symtab->dump_file)
  1744. fprintf (symtab->dump_file,
  1745. "Time profile order in expand_all_functions:%s:%d\n",
  1746. node->asm_name (), node->tp_first_run);
  1747. node->process = 0;
  1748. node->expand ();
  1749. }
  1750. }
  1751. if (dump_file)
  1752. fprintf (dump_file, "Expanded functions with time profile (%s):%u/%u\n",
  1753. main_input_filename, profiled_func_count, expanded_func_count);
  1754. if (symtab->dump_file && flag_profile_reorder_functions)
  1755. fprintf (symtab->dump_file, "Expanded functions with time profile:%u/%u\n",
  1756. profiled_func_count, expanded_func_count);
  1757. symtab->process_new_functions ();
  1758. free_gimplify_stack ();
  1759. free (order);
  1760. }
  1761. /* This is used to sort the node types by the cgraph order number. */
  1762. enum cgraph_order_sort_kind
  1763. {
  1764. ORDER_UNDEFINED = 0,
  1765. ORDER_FUNCTION,
  1766. ORDER_VAR,
  1767. ORDER_ASM
  1768. };
  1769. struct cgraph_order_sort
  1770. {
  1771. enum cgraph_order_sort_kind kind;
  1772. union
  1773. {
  1774. cgraph_node *f;
  1775. varpool_node *v;
  1776. asm_node *a;
  1777. } u;
  1778. };
  1779. /* Output all functions, variables, and asm statements in the order
  1780. according to their order fields, which is the order in which they
  1781. appeared in the file. This implements -fno-toplevel-reorder. In
  1782. this mode we may output functions and variables which don't really
  1783. need to be output.
  1784. When NO_REORDER is true only do this for symbols marked no reorder. */
  1785. static void
  1786. output_in_order (bool no_reorder)
  1787. {
  1788. int max;
  1789. cgraph_order_sort *nodes;
  1790. int i;
  1791. cgraph_node *pf;
  1792. varpool_node *pv;
  1793. asm_node *pa;
  1794. max = symtab->order;
  1795. nodes = XCNEWVEC (cgraph_order_sort, max);
  1796. FOR_EACH_DEFINED_FUNCTION (pf)
  1797. {
  1798. if (pf->process && !pf->thunk.thunk_p && !pf->alias)
  1799. {
  1800. if (no_reorder && !pf->no_reorder)
  1801. continue;
  1802. i = pf->order;
  1803. gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
  1804. nodes[i].kind = ORDER_FUNCTION;
  1805. nodes[i].u.f = pf;
  1806. }
  1807. }
  1808. FOR_EACH_DEFINED_VARIABLE (pv)
  1809. if (!DECL_EXTERNAL (pv->decl))
  1810. {
  1811. if (no_reorder && !pv->no_reorder)
  1812. continue;
  1813. i = pv->order;
  1814. gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
  1815. nodes[i].kind = ORDER_VAR;
  1816. nodes[i].u.v = pv;
  1817. }
  1818. for (pa = symtab->first_asm_symbol (); pa; pa = pa->next)
  1819. {
  1820. i = pa->order;
  1821. gcc_assert (nodes[i].kind == ORDER_UNDEFINED);
  1822. nodes[i].kind = ORDER_ASM;
  1823. nodes[i].u.a = pa;
  1824. }
  1825. /* In toplevel reorder mode we output all statics; mark them as needed. */
  1826. for (i = 0; i < max; ++i)
  1827. if (nodes[i].kind == ORDER_VAR)
  1828. nodes[i].u.v->finalize_named_section_flags ();
  1829. for (i = 0; i < max; ++i)
  1830. {
  1831. switch (nodes[i].kind)
  1832. {
  1833. case ORDER_FUNCTION:
  1834. nodes[i].u.f->process = 0;
  1835. nodes[i].u.f->expand ();
  1836. break;
  1837. case ORDER_VAR:
  1838. nodes[i].u.v->assemble_decl ();
  1839. break;
  1840. case ORDER_ASM:
  1841. assemble_asm (nodes[i].u.a->asm_str);
  1842. break;
  1843. case ORDER_UNDEFINED:
  1844. break;
  1845. default:
  1846. gcc_unreachable ();
  1847. }
  1848. }
  1849. symtab->clear_asm_symbols ();
  1850. free (nodes);
  1851. }
  1852. static void
  1853. ipa_passes (void)
  1854. {
  1855. gcc::pass_manager *passes = g->get_passes ();
  1856. set_cfun (NULL);
  1857. current_function_decl = NULL;
  1858. gimple_register_cfg_hooks ();
  1859. bitmap_obstack_initialize (NULL);
  1860. invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_START, NULL);
  1861. if (!in_lto_p)
  1862. {
  1863. execute_ipa_pass_list (passes->all_small_ipa_passes);
  1864. if (seen_error ())
  1865. return;
  1866. }
  1867. /* This extra symtab_remove_unreachable_nodes pass tends to catch some
  1868. devirtualization and other changes where removal iterate. */
  1869. symtab->remove_unreachable_nodes (symtab->dump_file);
  1870. /* If pass_all_early_optimizations was not scheduled, the state of
  1871. the cgraph will not be properly updated. Update it now. */
  1872. if (symtab->state < IPA_SSA)
  1873. symtab->state = IPA_SSA;
  1874. if (!in_lto_p)
  1875. {
  1876. /* Generate coverage variables and constructors. */
  1877. coverage_finish ();
  1878. /* Process new functions added. */
  1879. set_cfun (NULL);
  1880. current_function_decl = NULL;
  1881. symtab->process_new_functions ();
  1882. execute_ipa_summary_passes
  1883. ((ipa_opt_pass_d *) passes->all_regular_ipa_passes);
  1884. }
  1885. /* Some targets need to handle LTO assembler output specially. */
  1886. if (flag_generate_lto || flag_generate_offload)
  1887. targetm.asm_out.lto_start ();
  1888. if (!in_lto_p)
  1889. {
  1890. if (g->have_offload)
  1891. {
  1892. section_name_prefix = OFFLOAD_SECTION_NAME_PREFIX;
  1893. lto_stream_offload_p = true;
  1894. ipa_write_summaries ();
  1895. lto_stream_offload_p = false;
  1896. }
  1897. if (flag_lto)
  1898. {
  1899. section_name_prefix = LTO_SECTION_NAME_PREFIX;
  1900. lto_stream_offload_p = false;
  1901. ipa_write_summaries ();
  1902. }
  1903. }
  1904. if (flag_generate_lto || flag_generate_offload)
  1905. targetm.asm_out.lto_end ();
  1906. if (!flag_ltrans && (in_lto_p || !flag_lto || flag_fat_lto_objects))
  1907. execute_ipa_pass_list (passes->all_regular_ipa_passes);
  1908. invoke_plugin_callbacks (PLUGIN_ALL_IPA_PASSES_END, NULL);
  1909. bitmap_obstack_release (NULL);
  1910. }
  1911. /* Return string alias is alias of. */
  1912. static tree
  1913. get_alias_symbol (tree decl)
  1914. {
  1915. tree alias = lookup_attribute ("alias", DECL_ATTRIBUTES (decl));
  1916. return get_identifier (TREE_STRING_POINTER
  1917. (TREE_VALUE (TREE_VALUE (alias))));
  1918. }
  1919. /* Weakrefs may be associated to external decls and thus not output
  1920. at expansion time. Emit all necessary aliases. */
  1921. void
  1922. symbol_table::output_weakrefs (void)
  1923. {
  1924. symtab_node *node;
  1925. cgraph_node *cnode;
  1926. FOR_EACH_SYMBOL (node)
  1927. if (node->alias
  1928. && !TREE_ASM_WRITTEN (node->decl)
  1929. && (!(cnode = dyn_cast <cgraph_node *> (node))
  1930. || !cnode->instrumented_version
  1931. || !TREE_ASM_WRITTEN (cnode->instrumented_version->decl))
  1932. && node->weakref)
  1933. {
  1934. tree target;
  1935. /* Weakrefs are special by not requiring target definition in current
  1936. compilation unit. It is thus bit hard to work out what we want to
  1937. alias.
  1938. When alias target is defined, we need to fetch it from symtab reference,
  1939. otherwise it is pointed to by alias_target. */
  1940. if (node->alias_target)
  1941. target = (DECL_P (node->alias_target)
  1942. ? DECL_ASSEMBLER_NAME (node->alias_target)
  1943. : node->alias_target);
  1944. else if (node->analyzed)
  1945. target = DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl);
  1946. else
  1947. {
  1948. gcc_unreachable ();
  1949. target = get_alias_symbol (node->decl);
  1950. }
  1951. do_assemble_alias (node->decl, target);
  1952. }
  1953. }
  1954. /* Perform simple optimizations based on callgraph. */
  1955. void
  1956. symbol_table::compile (void)
  1957. {
  1958. if (seen_error ())
  1959. return;
  1960. #ifdef ENABLE_CHECKING
  1961. symtab_node::verify_symtab_nodes ();
  1962. #endif
  1963. timevar_push (TV_CGRAPHOPT);
  1964. if (pre_ipa_mem_report)
  1965. {
  1966. fprintf (stderr, "Memory consumption before IPA\n");
  1967. dump_memory_report (false);
  1968. }
  1969. if (!quiet_flag)
  1970. fprintf (stderr, "Performing interprocedural optimizations\n");
  1971. state = IPA;
  1972. /* Offloading requires LTO infrastructure. */
  1973. if (!in_lto_p && g->have_offload)
  1974. flag_generate_offload = 1;
  1975. /* If LTO is enabled, initialize the streamer hooks needed by GIMPLE. */
  1976. if (flag_generate_lto || flag_generate_offload)
  1977. lto_streamer_hooks_init ();
  1978. /* Don't run the IPA passes if there was any error or sorry messages. */
  1979. if (!seen_error ())
  1980. ipa_passes ();
  1981. /* Do nothing else if any IPA pass found errors or if we are just streaming LTO. */
  1982. if (seen_error ()
  1983. || (!in_lto_p && flag_lto && !flag_fat_lto_objects))
  1984. {
  1985. timevar_pop (TV_CGRAPHOPT);
  1986. return;
  1987. }
  1988. global_info_ready = true;
  1989. if (dump_file)
  1990. {
  1991. fprintf (dump_file, "Optimized ");
  1992. symtab_node:: dump_table (dump_file);
  1993. }
  1994. if (post_ipa_mem_report)
  1995. {
  1996. fprintf (stderr, "Memory consumption after IPA\n");
  1997. dump_memory_report (false);
  1998. }
  1999. timevar_pop (TV_CGRAPHOPT);
  2000. /* Output everything. */
  2001. (*debug_hooks->assembly_start) ();
  2002. if (!quiet_flag)
  2003. fprintf (stderr, "Assembling functions:\n");
  2004. #ifdef ENABLE_CHECKING
  2005. symtab_node::verify_symtab_nodes ();
  2006. #endif
  2007. materialize_all_clones ();
  2008. bitmap_obstack_initialize (NULL);
  2009. execute_ipa_pass_list (g->get_passes ()->all_late_ipa_passes);
  2010. bitmap_obstack_release (NULL);
  2011. mark_functions_to_output ();
  2012. /* When weakref support is missing, we autmatically translate all
  2013. references to NODE to references to its ultimate alias target.
  2014. The renaming mechanizm uses flag IDENTIFIER_TRANSPARENT_ALIAS and
  2015. TREE_CHAIN.
  2016. Set up this mapping before we output any assembler but once we are sure
  2017. that all symbol renaming is done.
  2018. FIXME: All this uglyness can go away if we just do renaming at gimple
  2019. level by physically rewritting the IL. At the moment we can only redirect
  2020. calls, so we need infrastructure for renaming references as well. */
  2021. #ifndef ASM_OUTPUT_WEAKREF
  2022. symtab_node *node;
  2023. FOR_EACH_SYMBOL (node)
  2024. if (node->alias
  2025. && lookup_attribute ("weakref", DECL_ATTRIBUTES (node->decl)))
  2026. {
  2027. IDENTIFIER_TRANSPARENT_ALIAS
  2028. (DECL_ASSEMBLER_NAME (node->decl)) = 1;
  2029. TREE_CHAIN (DECL_ASSEMBLER_NAME (node->decl))
  2030. = (node->alias_target ? node->alias_target
  2031. : DECL_ASSEMBLER_NAME (node->get_alias_target ()->decl));
  2032. }
  2033. #endif
  2034. state = EXPANSION;
  2035. if (!flag_toplevel_reorder)
  2036. output_in_order (false);
  2037. else
  2038. {
  2039. /* Output first asm statements and anything ordered. The process
  2040. flag is cleared for these nodes, so we skip them later. */
  2041. output_in_order (true);
  2042. expand_all_functions ();
  2043. output_variables ();
  2044. }
  2045. process_new_functions ();
  2046. state = FINISHED;
  2047. output_weakrefs ();
  2048. if (dump_file)
  2049. {
  2050. fprintf (dump_file, "\nFinal ");
  2051. symtab_node::dump_table (dump_file);
  2052. }
  2053. #ifdef ENABLE_CHECKING
  2054. symtab_node::verify_symtab_nodes ();
  2055. /* Double check that all inline clones are gone and that all
  2056. function bodies have been released from memory. */
  2057. if (!seen_error ())
  2058. {
  2059. cgraph_node *node;
  2060. bool error_found = false;
  2061. FOR_EACH_DEFINED_FUNCTION (node)
  2062. if (node->global.inlined_to
  2063. || gimple_has_body_p (node->decl))
  2064. {
  2065. error_found = true;
  2066. node->debug ();
  2067. }
  2068. if (error_found)
  2069. internal_error ("nodes with unreleased memory found");
  2070. }
  2071. #endif
  2072. }
  2073. /* Analyze the whole compilation unit once it is parsed completely. */
  2074. void
  2075. symbol_table::finalize_compilation_unit (void)
  2076. {
  2077. timevar_push (TV_CGRAPH);
  2078. /* If we're here there's no current function anymore. Some frontends
  2079. are lazy in clearing these. */
  2080. current_function_decl = NULL;
  2081. set_cfun (NULL);
  2082. /* Do not skip analyzing the functions if there were errors, we
  2083. miss diagnostics for following functions otherwise. */
  2084. /* Emit size functions we didn't inline. */
  2085. finalize_size_functions ();
  2086. /* Mark alias targets necessary and emit diagnostics. */
  2087. handle_alias_pairs ();
  2088. if (!quiet_flag)
  2089. {
  2090. fprintf (stderr, "\nAnalyzing compilation unit\n");
  2091. fflush (stderr);
  2092. }
  2093. if (flag_dump_passes)
  2094. dump_passes ();
  2095. /* Gimplify and lower all functions, compute reachability and
  2096. remove unreachable nodes. */
  2097. analyze_functions ();
  2098. /* Mark alias targets necessary and emit diagnostics. */
  2099. handle_alias_pairs ();
  2100. /* Gimplify and lower thunks. */
  2101. analyze_functions ();
  2102. /* Finally drive the pass manager. */
  2103. compile ();
  2104. timevar_pop (TV_CGRAPH);
  2105. }
  2106. /* Reset all state within cgraphunit.c so that we can rerun the compiler
  2107. within the same process. For use by toplev::finalize. */
  2108. void
  2109. cgraphunit_c_finalize (void)
  2110. {
  2111. gcc_assert (cgraph_new_nodes.length () == 0);
  2112. cgraph_new_nodes.truncate (0);
  2113. vtable_entry_type = NULL;
  2114. queued_nodes = &symtab_terminator;
  2115. first_analyzed = NULL;
  2116. first_analyzed_var = NULL;
  2117. }
  2118. /* Creates a wrapper from cgraph_node to TARGET node. Thunk is used for this
  2119. kind of wrapper method. */
  2120. void
  2121. cgraph_node::create_wrapper (cgraph_node *target)
  2122. {
  2123. /* Preserve DECL_RESULT so we get right by reference flag. */
  2124. tree decl_result = DECL_RESULT (decl);
  2125. /* Remove the function's body but keep arguments to be reused
  2126. for thunk. */
  2127. release_body (true);
  2128. reset ();
  2129. DECL_UNINLINABLE (decl) = false;
  2130. DECL_RESULT (decl) = decl_result;
  2131. DECL_INITIAL (decl) = NULL;
  2132. allocate_struct_function (decl, false);
  2133. set_cfun (NULL);
  2134. /* Turn alias into thunk and expand it into GIMPLE representation. */
  2135. definition = true;
  2136. memset (&thunk, 0, sizeof (cgraph_thunk_info));
  2137. thunk.thunk_p = true;
  2138. create_edge (target, NULL, count, CGRAPH_FREQ_BASE);
  2139. tree arguments = DECL_ARGUMENTS (decl);
  2140. while (arguments)
  2141. {
  2142. TREE_ADDRESSABLE (arguments) = false;
  2143. arguments = TREE_CHAIN (arguments);
  2144. }
  2145. expand_thunk (false, true);
  2146. /* Inline summary set-up. */
  2147. analyze ();
  2148. inline_analyze_function (this);
  2149. }
  2150. #include "gt-cgraphunit.h"