tree-stdarg.c 28 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067
  1. /* Pass computing data for optimizing stdarg functions.
  2. Copyright (C) 2004-2015 Free Software Foundation, Inc.
  3. Contributed by Jakub Jelinek <jakub@redhat.com>
  4. This file is part of GCC.
  5. GCC is free software; you can redistribute it and/or modify
  6. it under the terms of the GNU General Public License as published by
  7. the Free Software Foundation; either version 3, or (at your option)
  8. any later version.
  9. GCC is distributed in the hope that it will be useful,
  10. but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  12. GNU General Public License for more details.
  13. You should have received a copy of the GNU General Public License
  14. along with GCC; see the file COPYING3. If not see
  15. <http://www.gnu.org/licenses/>. */
  16. #include "config.h"
  17. #include "system.h"
  18. #include "coretypes.h"
  19. #include "tm.h"
  20. #include "hash-set.h"
  21. #include "machmode.h"
  22. #include "vec.h"
  23. #include "double-int.h"
  24. #include "input.h"
  25. #include "alias.h"
  26. #include "symtab.h"
  27. #include "wide-int.h"
  28. #include "inchash.h"
  29. #include "tree.h"
  30. #include "fold-const.h"
  31. #include "hard-reg-set.h"
  32. #include "input.h"
  33. #include "function.h"
  34. #include "langhooks.h"
  35. #include "gimple-pretty-print.h"
  36. #include "target.h"
  37. #include "bitmap.h"
  38. #include "predict.h"
  39. #include "dominance.h"
  40. #include "cfg.h"
  41. #include "basic-block.h"
  42. #include "tree-ssa-alias.h"
  43. #include "internal-fn.h"
  44. #include "gimple-expr.h"
  45. #include "is-a.h"
  46. #include "gimple.h"
  47. #include "gimple-iterator.h"
  48. #include "gimple-walk.h"
  49. #include "gimple-ssa.h"
  50. #include "tree-phinodes.h"
  51. #include "ssa-iterators.h"
  52. #include "stringpool.h"
  53. #include "tree-ssanames.h"
  54. #include "sbitmap.h"
  55. #include "tree-pass.h"
  56. #include "tree-stdarg.h"
  57. /* A simple pass that attempts to optimize stdarg functions on architectures
  58. that need to save register arguments to stack on entry to stdarg functions.
  59. If the function doesn't use any va_start macros, no registers need to
  60. be saved. If va_start macros are used, the va_list variables don't escape
  61. the function, it is only necessary to save registers that will be used
  62. in va_arg macros. E.g. if va_arg is only used with integral types
  63. in the function, floating point registers don't need to be saved, etc. */
  64. /* Return true if basic block VA_ARG_BB is dominated by VA_START_BB and
  65. is executed at most as many times as VA_START_BB. */
  66. static bool
  67. reachable_at_most_once (basic_block va_arg_bb, basic_block va_start_bb)
  68. {
  69. vec<edge> stack = vNULL;
  70. edge e;
  71. edge_iterator ei;
  72. sbitmap visited;
  73. bool ret;
  74. if (va_arg_bb == va_start_bb)
  75. return true;
  76. if (! dominated_by_p (CDI_DOMINATORS, va_arg_bb, va_start_bb))
  77. return false;
  78. visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
  79. bitmap_clear (visited);
  80. ret = true;
  81. FOR_EACH_EDGE (e, ei, va_arg_bb->preds)
  82. stack.safe_push (e);
  83. while (! stack.is_empty ())
  84. {
  85. basic_block src;
  86. e = stack.pop ();
  87. src = e->src;
  88. if (e->flags & EDGE_COMPLEX)
  89. {
  90. ret = false;
  91. break;
  92. }
  93. if (src == va_start_bb)
  94. continue;
  95. /* va_arg_bb can be executed more times than va_start_bb. */
  96. if (src == va_arg_bb)
  97. {
  98. ret = false;
  99. break;
  100. }
  101. gcc_assert (src != ENTRY_BLOCK_PTR_FOR_FN (cfun));
  102. if (! bitmap_bit_p (visited, src->index))
  103. {
  104. bitmap_set_bit (visited, src->index);
  105. FOR_EACH_EDGE (e, ei, src->preds)
  106. stack.safe_push (e);
  107. }
  108. }
  109. stack.release ();
  110. sbitmap_free (visited);
  111. return ret;
  112. }
  113. /* For statement COUNTER = RHS, if RHS is COUNTER + constant,
  114. return constant, otherwise return HOST_WIDE_INT_M1U.
  115. GPR_P is true if this is GPR counter. */
  116. static unsigned HOST_WIDE_INT
  117. va_list_counter_bump (struct stdarg_info *si, tree counter, tree rhs,
  118. bool gpr_p)
  119. {
  120. tree lhs, orig_lhs;
  121. gimple stmt;
  122. unsigned HOST_WIDE_INT ret = 0, val, counter_val;
  123. unsigned int max_size;
  124. if (si->offsets == NULL)
  125. {
  126. unsigned int i;
  127. si->offsets = XNEWVEC (int, num_ssa_names);
  128. for (i = 0; i < num_ssa_names; ++i)
  129. si->offsets[i] = -1;
  130. }
  131. counter_val = gpr_p ? cfun->va_list_gpr_size : cfun->va_list_fpr_size;
  132. max_size = gpr_p ? VA_LIST_MAX_GPR_SIZE : VA_LIST_MAX_FPR_SIZE;
  133. orig_lhs = lhs = rhs;
  134. while (lhs)
  135. {
  136. enum tree_code rhs_code;
  137. tree rhs1;
  138. if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
  139. {
  140. if (counter_val >= max_size)
  141. {
  142. ret = max_size;
  143. break;
  144. }
  145. ret -= counter_val - si->offsets[SSA_NAME_VERSION (lhs)];
  146. break;
  147. }
  148. stmt = SSA_NAME_DEF_STMT (lhs);
  149. if (!is_gimple_assign (stmt) || gimple_assign_lhs (stmt) != lhs)
  150. return HOST_WIDE_INT_M1U;
  151. rhs_code = gimple_assign_rhs_code (stmt);
  152. rhs1 = gimple_assign_rhs1 (stmt);
  153. if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
  154. || gimple_assign_cast_p (stmt))
  155. && TREE_CODE (rhs1) == SSA_NAME)
  156. {
  157. lhs = rhs1;
  158. continue;
  159. }
  160. if ((rhs_code == POINTER_PLUS_EXPR
  161. || rhs_code == PLUS_EXPR)
  162. && TREE_CODE (rhs1) == SSA_NAME
  163. && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt)))
  164. {
  165. ret += tree_to_uhwi (gimple_assign_rhs2 (stmt));
  166. lhs = rhs1;
  167. continue;
  168. }
  169. if (rhs_code == ADDR_EXPR
  170. && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
  171. && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
  172. && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)))
  173. {
  174. ret += tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1));
  175. lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
  176. continue;
  177. }
  178. if (get_gimple_rhs_class (rhs_code) != GIMPLE_SINGLE_RHS)
  179. return HOST_WIDE_INT_M1U;
  180. rhs = gimple_assign_rhs1 (stmt);
  181. if (TREE_CODE (counter) != TREE_CODE (rhs))
  182. return HOST_WIDE_INT_M1U;
  183. if (TREE_CODE (counter) == COMPONENT_REF)
  184. {
  185. if (get_base_address (counter) != get_base_address (rhs)
  186. || TREE_CODE (TREE_OPERAND (rhs, 1)) != FIELD_DECL
  187. || TREE_OPERAND (counter, 1) != TREE_OPERAND (rhs, 1))
  188. return HOST_WIDE_INT_M1U;
  189. }
  190. else if (counter != rhs)
  191. return HOST_WIDE_INT_M1U;
  192. lhs = NULL;
  193. }
  194. lhs = orig_lhs;
  195. val = ret + counter_val;
  196. while (lhs)
  197. {
  198. enum tree_code rhs_code;
  199. tree rhs1;
  200. if (si->offsets[SSA_NAME_VERSION (lhs)] != -1)
  201. break;
  202. if (val >= max_size)
  203. si->offsets[SSA_NAME_VERSION (lhs)] = max_size;
  204. else
  205. si->offsets[SSA_NAME_VERSION (lhs)] = val;
  206. stmt = SSA_NAME_DEF_STMT (lhs);
  207. rhs_code = gimple_assign_rhs_code (stmt);
  208. rhs1 = gimple_assign_rhs1 (stmt);
  209. if ((get_gimple_rhs_class (rhs_code) == GIMPLE_SINGLE_RHS
  210. || gimple_assign_cast_p (stmt))
  211. && TREE_CODE (rhs1) == SSA_NAME)
  212. {
  213. lhs = rhs1;
  214. continue;
  215. }
  216. if ((rhs_code == POINTER_PLUS_EXPR
  217. || rhs_code == PLUS_EXPR)
  218. && TREE_CODE (rhs1) == SSA_NAME
  219. && tree_fits_uhwi_p (gimple_assign_rhs2 (stmt)))
  220. {
  221. val -= tree_to_uhwi (gimple_assign_rhs2 (stmt));
  222. lhs = rhs1;
  223. continue;
  224. }
  225. if (rhs_code == ADDR_EXPR
  226. && TREE_CODE (TREE_OPERAND (rhs1, 0)) == MEM_REF
  227. && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0)) == SSA_NAME
  228. && tree_fits_uhwi_p (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1)))
  229. {
  230. val -= tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (rhs1, 0), 1));
  231. lhs = TREE_OPERAND (TREE_OPERAND (rhs1, 0), 0);
  232. continue;
  233. }
  234. lhs = NULL;
  235. }
  236. return ret;
  237. }
  238. /* Called by walk_tree to look for references to va_list variables. */
  239. static tree
  240. find_va_list_reference (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
  241. void *data)
  242. {
  243. bitmap va_list_vars = (bitmap) ((struct walk_stmt_info *) data)->info;
  244. tree var = *tp;
  245. if (TREE_CODE (var) == SSA_NAME)
  246. {
  247. if (bitmap_bit_p (va_list_vars, SSA_NAME_VERSION (var)))
  248. return var;
  249. }
  250. else if (TREE_CODE (var) == VAR_DECL)
  251. {
  252. if (bitmap_bit_p (va_list_vars, DECL_UID (var) + num_ssa_names))
  253. return var;
  254. }
  255. return NULL_TREE;
  256. }
  257. /* Helper function of va_list_counter_struct_op. Compute
  258. cfun->va_list_{g,f}pr_size. AP is a va_list GPR/FPR counter,
  259. if WRITE_P is true, seen in AP = VAR, otherwise seen in VAR = AP
  260. statement. GPR_P is true if AP is a GPR counter, false if it is
  261. a FPR counter. */
  262. static void
  263. va_list_counter_op (struct stdarg_info *si, tree ap, tree var, bool gpr_p,
  264. bool write_p)
  265. {
  266. unsigned HOST_WIDE_INT increment;
  267. if (si->compute_sizes < 0)
  268. {
  269. si->compute_sizes = 0;
  270. if (si->va_start_count == 1
  271. && reachable_at_most_once (si->bb, si->va_start_bb))
  272. si->compute_sizes = 1;
  273. if (dump_file && (dump_flags & TDF_DETAILS))
  274. fprintf (dump_file,
  275. "bb%d will %sbe executed at most once for each va_start "
  276. "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
  277. si->va_start_bb->index);
  278. }
  279. if (write_p
  280. && si->compute_sizes
  281. && (increment = va_list_counter_bump (si, ap, var, gpr_p)) + 1 > 1)
  282. {
  283. if (gpr_p && cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
  284. {
  285. cfun->va_list_gpr_size += increment;
  286. return;
  287. }
  288. if (!gpr_p && cfun->va_list_fpr_size + increment < VA_LIST_MAX_FPR_SIZE)
  289. {
  290. cfun->va_list_fpr_size += increment;
  291. return;
  292. }
  293. }
  294. if (write_p || !si->compute_sizes)
  295. {
  296. if (gpr_p)
  297. cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
  298. else
  299. cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
  300. }
  301. }
  302. /* If AP is a va_list GPR/FPR counter, compute cfun->va_list_{g,f}pr_size.
  303. If WRITE_P is true, AP has been seen in AP = VAR assignment, if WRITE_P
  304. is false, AP has been seen in VAR = AP assignment.
  305. Return true if the AP = VAR (resp. VAR = AP) statement is a recognized
  306. va_arg operation that doesn't cause the va_list variable to escape
  307. current function. */
  308. static bool
  309. va_list_counter_struct_op (struct stdarg_info *si, tree ap, tree var,
  310. bool write_p)
  311. {
  312. tree base;
  313. if (TREE_CODE (ap) != COMPONENT_REF
  314. || TREE_CODE (TREE_OPERAND (ap, 1)) != FIELD_DECL)
  315. return false;
  316. if (TREE_CODE (var) != SSA_NAME
  317. || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (var)))
  318. return false;
  319. base = get_base_address (ap);
  320. if (TREE_CODE (base) != VAR_DECL
  321. || !bitmap_bit_p (si->va_list_vars, DECL_UID (base) + num_ssa_names))
  322. return false;
  323. if (TREE_OPERAND (ap, 1) == va_list_gpr_counter_field)
  324. va_list_counter_op (si, ap, var, true, write_p);
  325. else if (TREE_OPERAND (ap, 1) == va_list_fpr_counter_field)
  326. va_list_counter_op (si, ap, var, false, write_p);
  327. return true;
  328. }
  329. /* Check for TEM = AP. Return true if found and the caller shouldn't
  330. search for va_list references in the statement. */
  331. static bool
  332. va_list_ptr_read (struct stdarg_info *si, tree ap, tree tem)
  333. {
  334. if (TREE_CODE (ap) != VAR_DECL
  335. || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names))
  336. return false;
  337. if (TREE_CODE (tem) != SSA_NAME
  338. || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem)))
  339. return false;
  340. if (si->compute_sizes < 0)
  341. {
  342. si->compute_sizes = 0;
  343. if (si->va_start_count == 1
  344. && reachable_at_most_once (si->bb, si->va_start_bb))
  345. si->compute_sizes = 1;
  346. if (dump_file && (dump_flags & TDF_DETAILS))
  347. fprintf (dump_file,
  348. "bb%d will %sbe executed at most once for each va_start "
  349. "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
  350. si->va_start_bb->index);
  351. }
  352. /* For void * or char * va_list types, there is just one counter.
  353. If va_arg is used in a loop, we don't know how many registers need
  354. saving. */
  355. if (! si->compute_sizes)
  356. return false;
  357. if (va_list_counter_bump (si, ap, tem, true) == HOST_WIDE_INT_M1U)
  358. return false;
  359. /* Note the temporary, as we need to track whether it doesn't escape
  360. the current function. */
  361. bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (tem));
  362. return true;
  363. }
  364. /* Check for:
  365. tem1 = AP;
  366. TEM2 = tem1 + CST;
  367. AP = TEM2;
  368. sequence and update cfun->va_list_gpr_size. Return true if found. */
  369. static bool
  370. va_list_ptr_write (struct stdarg_info *si, tree ap, tree tem2)
  371. {
  372. unsigned HOST_WIDE_INT increment;
  373. if (TREE_CODE (ap) != VAR_DECL
  374. || !bitmap_bit_p (si->va_list_vars, DECL_UID (ap) + num_ssa_names))
  375. return false;
  376. if (TREE_CODE (tem2) != SSA_NAME
  377. || bitmap_bit_p (si->va_list_vars, SSA_NAME_VERSION (tem2)))
  378. return false;
  379. if (si->compute_sizes <= 0)
  380. return false;
  381. increment = va_list_counter_bump (si, ap, tem2, true);
  382. if (increment + 1 <= 1)
  383. return false;
  384. if (cfun->va_list_gpr_size + increment < VA_LIST_MAX_GPR_SIZE)
  385. cfun->va_list_gpr_size += increment;
  386. else
  387. cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
  388. return true;
  389. }
  390. /* If RHS is X, (some type *) X or X + CST for X a temporary variable
  391. containing value of some va_list variable plus optionally some constant,
  392. either set si->va_list_escapes or add LHS to si->va_list_escape_vars,
  393. depending whether LHS is a function local temporary. */
  394. static void
  395. check_va_list_escapes (struct stdarg_info *si, tree lhs, tree rhs)
  396. {
  397. if (! POINTER_TYPE_P (TREE_TYPE (rhs)))
  398. return;
  399. if (TREE_CODE (rhs) == SSA_NAME)
  400. {
  401. if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (rhs)))
  402. return;
  403. }
  404. else if (TREE_CODE (rhs) == ADDR_EXPR
  405. && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
  406. && TREE_CODE (TREE_OPERAND (TREE_OPERAND (rhs, 0), 0)) == SSA_NAME)
  407. {
  408. tree ptr = TREE_OPERAND (TREE_OPERAND (rhs, 0), 0);
  409. if (! bitmap_bit_p (si->va_list_escape_vars, SSA_NAME_VERSION (ptr)))
  410. return;
  411. }
  412. else
  413. return;
  414. if (TREE_CODE (lhs) != SSA_NAME)
  415. {
  416. si->va_list_escapes = true;
  417. return;
  418. }
  419. if (si->compute_sizes < 0)
  420. {
  421. si->compute_sizes = 0;
  422. if (si->va_start_count == 1
  423. && reachable_at_most_once (si->bb, si->va_start_bb))
  424. si->compute_sizes = 1;
  425. if (dump_file && (dump_flags & TDF_DETAILS))
  426. fprintf (dump_file,
  427. "bb%d will %sbe executed at most once for each va_start "
  428. "in bb%d\n", si->bb->index, si->compute_sizes ? "" : "not ",
  429. si->va_start_bb->index);
  430. }
  431. /* For void * or char * va_list types, there is just one counter.
  432. If va_arg is used in a loop, we don't know how many registers need
  433. saving. */
  434. if (! si->compute_sizes)
  435. {
  436. si->va_list_escapes = true;
  437. return;
  438. }
  439. if (va_list_counter_bump (si, si->va_start_ap, lhs, true)
  440. == HOST_WIDE_INT_M1U)
  441. {
  442. si->va_list_escapes = true;
  443. return;
  444. }
  445. bitmap_set_bit (si->va_list_escape_vars, SSA_NAME_VERSION (lhs));
  446. }
  447. /* Check all uses of temporaries from si->va_list_escape_vars bitmap.
  448. Return true if va_list might be escaping. */
  449. static bool
  450. check_all_va_list_escapes (struct stdarg_info *si)
  451. {
  452. basic_block bb;
  453. FOR_EACH_BB_FN (bb, cfun)
  454. {
  455. for (gphi_iterator i = gsi_start_phis (bb); !gsi_end_p (i);
  456. gsi_next (&i))
  457. {
  458. tree lhs;
  459. use_operand_p uop;
  460. ssa_op_iter soi;
  461. gphi *phi = i.phi ();
  462. lhs = PHI_RESULT (phi);
  463. if (virtual_operand_p (lhs)
  464. || bitmap_bit_p (si->va_list_escape_vars,
  465. SSA_NAME_VERSION (lhs)))
  466. continue;
  467. FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
  468. {
  469. tree rhs = USE_FROM_PTR (uop);
  470. if (TREE_CODE (rhs) == SSA_NAME
  471. && bitmap_bit_p (si->va_list_escape_vars,
  472. SSA_NAME_VERSION (rhs)))
  473. {
  474. if (dump_file && (dump_flags & TDF_DETAILS))
  475. {
  476. fputs ("va_list escapes in ", dump_file);
  477. print_gimple_stmt (dump_file, phi, 0, dump_flags);
  478. fputc ('\n', dump_file);
  479. }
  480. return true;
  481. }
  482. }
  483. }
  484. for (gimple_stmt_iterator i = gsi_start_bb (bb); !gsi_end_p (i);
  485. gsi_next (&i))
  486. {
  487. gimple stmt = gsi_stmt (i);
  488. tree use;
  489. ssa_op_iter iter;
  490. if (is_gimple_debug (stmt))
  491. continue;
  492. FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_ALL_USES)
  493. {
  494. if (! bitmap_bit_p (si->va_list_escape_vars,
  495. SSA_NAME_VERSION (use)))
  496. continue;
  497. if (is_gimple_assign (stmt))
  498. {
  499. tree rhs = gimple_assign_rhs1 (stmt);
  500. enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
  501. /* x = *ap_temp; */
  502. if (rhs_code == MEM_REF
  503. && TREE_OPERAND (rhs, 0) == use
  504. && TYPE_SIZE_UNIT (TREE_TYPE (rhs))
  505. && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (rhs)))
  506. && si->offsets[SSA_NAME_VERSION (use)] != -1)
  507. {
  508. unsigned HOST_WIDE_INT gpr_size;
  509. tree access_size = TYPE_SIZE_UNIT (TREE_TYPE (rhs));
  510. gpr_size = si->offsets[SSA_NAME_VERSION (use)]
  511. + tree_to_shwi (TREE_OPERAND (rhs, 1))
  512. + tree_to_uhwi (access_size);
  513. if (gpr_size >= VA_LIST_MAX_GPR_SIZE)
  514. cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
  515. else if (gpr_size > cfun->va_list_gpr_size)
  516. cfun->va_list_gpr_size = gpr_size;
  517. continue;
  518. }
  519. /* va_arg sequences may contain
  520. other_ap_temp = ap_temp;
  521. other_ap_temp = ap_temp + constant;
  522. other_ap_temp = (some_type *) ap_temp;
  523. ap = ap_temp;
  524. statements. */
  525. if (rhs == use
  526. && ((rhs_code == POINTER_PLUS_EXPR
  527. && (TREE_CODE (gimple_assign_rhs2 (stmt))
  528. == INTEGER_CST))
  529. || gimple_assign_cast_p (stmt)
  530. || (get_gimple_rhs_class (rhs_code)
  531. == GIMPLE_SINGLE_RHS)))
  532. {
  533. tree lhs = gimple_assign_lhs (stmt);
  534. if (TREE_CODE (lhs) == SSA_NAME
  535. && bitmap_bit_p (si->va_list_escape_vars,
  536. SSA_NAME_VERSION (lhs)))
  537. continue;
  538. if (TREE_CODE (lhs) == VAR_DECL
  539. && bitmap_bit_p (si->va_list_vars,
  540. DECL_UID (lhs) + num_ssa_names))
  541. continue;
  542. }
  543. else if (rhs_code == ADDR_EXPR
  544. && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF
  545. && TREE_OPERAND (TREE_OPERAND (rhs, 0), 0) == use)
  546. {
  547. tree lhs = gimple_assign_lhs (stmt);
  548. if (bitmap_bit_p (si->va_list_escape_vars,
  549. SSA_NAME_VERSION (lhs)))
  550. continue;
  551. }
  552. }
  553. if (dump_file && (dump_flags & TDF_DETAILS))
  554. {
  555. fputs ("va_list escapes in ", dump_file);
  556. print_gimple_stmt (dump_file, stmt, 0, dump_flags);
  557. fputc ('\n', dump_file);
  558. }
  559. return true;
  560. }
  561. }
  562. }
  563. return false;
  564. }
  565. namespace {
  566. const pass_data pass_data_stdarg =
  567. {
  568. GIMPLE_PASS, /* type */
  569. "stdarg", /* name */
  570. OPTGROUP_NONE, /* optinfo_flags */
  571. TV_NONE, /* tv_id */
  572. ( PROP_cfg | PROP_ssa ), /* properties_required */
  573. 0, /* properties_provided */
  574. 0, /* properties_destroyed */
  575. 0, /* todo_flags_start */
  576. 0, /* todo_flags_finish */
  577. };
  578. class pass_stdarg : public gimple_opt_pass
  579. {
  580. public:
  581. pass_stdarg (gcc::context *ctxt)
  582. : gimple_opt_pass (pass_data_stdarg, ctxt)
  583. {}
  584. /* opt_pass methods: */
  585. virtual bool gate (function *fun)
  586. {
  587. return (flag_stdarg_opt
  588. #ifdef ACCEL_COMPILER
  589. /* Disable for GCC5 in the offloading compilers, as
  590. va_list and gpr/fpr counter fields are not merged.
  591. In GCC6 when stdarg is lowered late this shouldn't be
  592. an issue. */
  593. && !in_lto_p
  594. #endif
  595. /* This optimization is only for stdarg functions. */
  596. && fun->stdarg != 0);
  597. }
  598. virtual unsigned int execute (function *);
  599. }; // class pass_stdarg
  600. unsigned int
  601. pass_stdarg::execute (function *fun)
  602. {
  603. basic_block bb;
  604. bool va_list_escapes = false;
  605. bool va_list_simple_ptr;
  606. struct stdarg_info si;
  607. struct walk_stmt_info wi;
  608. const char *funcname = NULL;
  609. tree cfun_va_list;
  610. fun->va_list_gpr_size = 0;
  611. fun->va_list_fpr_size = 0;
  612. memset (&si, 0, sizeof (si));
  613. si.va_list_vars = BITMAP_ALLOC (NULL);
  614. si.va_list_escape_vars = BITMAP_ALLOC (NULL);
  615. if (dump_file)
  616. funcname = lang_hooks.decl_printable_name (current_function_decl, 2);
  617. cfun_va_list = targetm.fn_abi_va_list (fun->decl);
  618. va_list_simple_ptr = POINTER_TYPE_P (cfun_va_list)
  619. && (TREE_TYPE (cfun_va_list) == void_type_node
  620. || TREE_TYPE (cfun_va_list) == char_type_node);
  621. gcc_assert (is_gimple_reg_type (cfun_va_list) == va_list_simple_ptr);
  622. FOR_EACH_BB_FN (bb, fun)
  623. {
  624. gimple_stmt_iterator i;
  625. for (i = gsi_start_bb (bb); !gsi_end_p (i); gsi_next (&i))
  626. {
  627. gimple stmt = gsi_stmt (i);
  628. tree callee, ap;
  629. if (!is_gimple_call (stmt))
  630. continue;
  631. callee = gimple_call_fndecl (stmt);
  632. if (!callee
  633. || DECL_BUILT_IN_CLASS (callee) != BUILT_IN_NORMAL)
  634. continue;
  635. switch (DECL_FUNCTION_CODE (callee))
  636. {
  637. case BUILT_IN_VA_START:
  638. break;
  639. /* If old style builtins are used, don't optimize anything. */
  640. case BUILT_IN_SAVEREGS:
  641. case BUILT_IN_NEXT_ARG:
  642. va_list_escapes = true;
  643. continue;
  644. default:
  645. continue;
  646. }
  647. si.va_start_count++;
  648. ap = gimple_call_arg (stmt, 0);
  649. if (TREE_CODE (ap) != ADDR_EXPR)
  650. {
  651. va_list_escapes = true;
  652. break;
  653. }
  654. ap = TREE_OPERAND (ap, 0);
  655. if (TREE_CODE (ap) == ARRAY_REF)
  656. {
  657. if (! integer_zerop (TREE_OPERAND (ap, 1)))
  658. {
  659. va_list_escapes = true;
  660. break;
  661. }
  662. ap = TREE_OPERAND (ap, 0);
  663. }
  664. if (TYPE_MAIN_VARIANT (TREE_TYPE (ap))
  665. != TYPE_MAIN_VARIANT (targetm.fn_abi_va_list (fun->decl))
  666. || TREE_CODE (ap) != VAR_DECL)
  667. {
  668. va_list_escapes = true;
  669. break;
  670. }
  671. if (is_global_var (ap))
  672. {
  673. va_list_escapes = true;
  674. break;
  675. }
  676. bitmap_set_bit (si.va_list_vars, DECL_UID (ap) + num_ssa_names);
  677. /* VA_START_BB and VA_START_AP will be only used if there is just
  678. one va_start in the function. */
  679. si.va_start_bb = bb;
  680. si.va_start_ap = ap;
  681. }
  682. if (va_list_escapes)
  683. break;
  684. }
  685. /* If there were no va_start uses in the function, there is no need to
  686. save anything. */
  687. if (si.va_start_count == 0)
  688. goto finish;
  689. /* If some va_list arguments weren't local, we can't optimize. */
  690. if (va_list_escapes)
  691. goto finish;
  692. /* For void * or char * va_list, something useful can be done only
  693. if there is just one va_start. */
  694. if (va_list_simple_ptr && si.va_start_count > 1)
  695. {
  696. va_list_escapes = true;
  697. goto finish;
  698. }
  699. /* For struct * va_list, if the backend didn't tell us what the counter fields
  700. are, there is nothing more we can do. */
  701. if (!va_list_simple_ptr
  702. && va_list_gpr_counter_field == NULL_TREE
  703. && va_list_fpr_counter_field == NULL_TREE)
  704. {
  705. va_list_escapes = true;
  706. goto finish;
  707. }
  708. /* For void * or char * va_list there is just one counter
  709. (va_list itself). Use VA_LIST_GPR_SIZE for it. */
  710. if (va_list_simple_ptr)
  711. fun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
  712. calculate_dominance_info (CDI_DOMINATORS);
  713. memset (&wi, 0, sizeof (wi));
  714. wi.info = si.va_list_vars;
  715. FOR_EACH_BB_FN (bb, fun)
  716. {
  717. si.compute_sizes = -1;
  718. si.bb = bb;
  719. /* For va_list_simple_ptr, we have to check PHI nodes too. We treat
  720. them as assignments for the purpose of escape analysis. This is
  721. not needed for non-simple va_list because virtual phis don't perform
  722. any real data movement. Also, check PHI nodes for taking address of
  723. the va_list vars. */
  724. tree lhs, rhs;
  725. use_operand_p uop;
  726. ssa_op_iter soi;
  727. for (gphi_iterator i = gsi_start_phis (bb); !gsi_end_p (i);
  728. gsi_next (&i))
  729. {
  730. gphi *phi = i.phi ();
  731. lhs = PHI_RESULT (phi);
  732. if (virtual_operand_p (lhs))
  733. continue;
  734. if (va_list_simple_ptr)
  735. {
  736. FOR_EACH_PHI_ARG (uop, phi, soi, SSA_OP_USE)
  737. {
  738. rhs = USE_FROM_PTR (uop);
  739. if (va_list_ptr_read (&si, rhs, lhs))
  740. continue;
  741. else if (va_list_ptr_write (&si, lhs, rhs))
  742. continue;
  743. else
  744. check_va_list_escapes (&si, lhs, rhs);
  745. if (si.va_list_escapes)
  746. {
  747. if (dump_file && (dump_flags & TDF_DETAILS))
  748. {
  749. fputs ("va_list escapes in ", dump_file);
  750. print_gimple_stmt (dump_file, phi, 0, dump_flags);
  751. fputc ('\n', dump_file);
  752. }
  753. va_list_escapes = true;
  754. }
  755. }
  756. }
  757. for (unsigned j = 0; !va_list_escapes
  758. && j < gimple_phi_num_args (phi); ++j)
  759. if ((!va_list_simple_ptr
  760. || TREE_CODE (gimple_phi_arg_def (phi, j)) != SSA_NAME)
  761. && walk_tree (gimple_phi_arg_def_ptr (phi, j),
  762. find_va_list_reference, &wi, NULL))
  763. {
  764. if (dump_file && (dump_flags & TDF_DETAILS))
  765. {
  766. fputs ("va_list escapes in ", dump_file);
  767. print_gimple_stmt (dump_file, phi, 0, dump_flags);
  768. fputc ('\n', dump_file);
  769. }
  770. va_list_escapes = true;
  771. }
  772. }
  773. for (gimple_stmt_iterator i = gsi_start_bb (bb);
  774. !gsi_end_p (i) && !va_list_escapes;
  775. gsi_next (&i))
  776. {
  777. gimple stmt = gsi_stmt (i);
  778. /* Don't look at __builtin_va_{start,end}, they are ok. */
  779. if (is_gimple_call (stmt))
  780. {
  781. tree callee = gimple_call_fndecl (stmt);
  782. if (callee
  783. && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL
  784. && (DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_START
  785. || DECL_FUNCTION_CODE (callee) == BUILT_IN_VA_END))
  786. continue;
  787. }
  788. if (is_gimple_assign (stmt))
  789. {
  790. lhs = gimple_assign_lhs (stmt);
  791. rhs = gimple_assign_rhs1 (stmt);
  792. if (va_list_simple_ptr)
  793. {
  794. if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
  795. == GIMPLE_SINGLE_RHS)
  796. {
  797. /* Check for ap ={v} {}. */
  798. if (TREE_CLOBBER_P (rhs))
  799. continue;
  800. /* Check for tem = ap. */
  801. else if (va_list_ptr_read (&si, rhs, lhs))
  802. continue;
  803. /* Check for the last insn in:
  804. tem1 = ap;
  805. tem2 = tem1 + CST;
  806. ap = tem2;
  807. sequence. */
  808. else if (va_list_ptr_write (&si, lhs, rhs))
  809. continue;
  810. }
  811. if ((gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
  812. && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST)
  813. || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
  814. || (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
  815. == GIMPLE_SINGLE_RHS))
  816. check_va_list_escapes (&si, lhs, rhs);
  817. }
  818. else
  819. {
  820. if (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
  821. == GIMPLE_SINGLE_RHS)
  822. {
  823. /* Check for ap ={v} {}. */
  824. if (TREE_CLOBBER_P (rhs))
  825. continue;
  826. /* Check for ap[0].field = temp. */
  827. else if (va_list_counter_struct_op (&si, lhs, rhs, true))
  828. continue;
  829. /* Check for temp = ap[0].field. */
  830. else if (va_list_counter_struct_op (&si, rhs, lhs,
  831. false))
  832. continue;
  833. }
  834. /* Do any architecture specific checking. */
  835. if (targetm.stdarg_optimize_hook
  836. && targetm.stdarg_optimize_hook (&si, stmt))
  837. continue;
  838. }
  839. }
  840. else if (is_gimple_debug (stmt))
  841. continue;
  842. /* All other uses of va_list are either va_copy (that is not handled
  843. in this optimization), taking address of va_list variable or
  844. passing va_list to other functions (in that case va_list might
  845. escape the function and therefore va_start needs to set it up
  846. fully), or some unexpected use of va_list. None of these should
  847. happen in a gimplified VA_ARG_EXPR. */
  848. if (si.va_list_escapes
  849. || walk_gimple_op (stmt, find_va_list_reference, &wi))
  850. {
  851. if (dump_file && (dump_flags & TDF_DETAILS))
  852. {
  853. fputs ("va_list escapes in ", dump_file);
  854. print_gimple_stmt (dump_file, stmt, 0, dump_flags);
  855. fputc ('\n', dump_file);
  856. }
  857. va_list_escapes = true;
  858. }
  859. }
  860. if (va_list_escapes)
  861. break;
  862. }
  863. if (! va_list_escapes
  864. && va_list_simple_ptr
  865. && ! bitmap_empty_p (si.va_list_escape_vars)
  866. && check_all_va_list_escapes (&si))
  867. va_list_escapes = true;
  868. finish:
  869. if (va_list_escapes)
  870. {
  871. fun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
  872. fun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
  873. }
  874. BITMAP_FREE (si.va_list_vars);
  875. BITMAP_FREE (si.va_list_escape_vars);
  876. free (si.offsets);
  877. if (dump_file)
  878. {
  879. fprintf (dump_file, "%s: va_list escapes %d, needs to save ",
  880. funcname, (int) va_list_escapes);
  881. if (fun->va_list_gpr_size >= VA_LIST_MAX_GPR_SIZE)
  882. fputs ("all", dump_file);
  883. else
  884. fprintf (dump_file, "%d", cfun->va_list_gpr_size);
  885. fputs (" GPR units and ", dump_file);
  886. if (fun->va_list_fpr_size >= VA_LIST_MAX_FPR_SIZE)
  887. fputs ("all", dump_file);
  888. else
  889. fprintf (dump_file, "%d", cfun->va_list_fpr_size);
  890. fputs (" FPR units.\n", dump_file);
  891. }
  892. return 0;
  893. }
  894. } // anon namespace
  895. gimple_opt_pass *
  896. make_pass_stdarg (gcc::context *ctxt)
  897. {
  898. return new pass_stdarg (ctxt);
  899. }