tree-affine.c 24 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969
  1. /* Operations with affine combinations of trees.
  2. Copyright (C) 2005-2015 Free Software Foundation, Inc.
  3. This file is part of GCC.
  4. GCC is free software; you can redistribute it and/or modify it
  5. under the terms of the GNU General Public License as published by the
  6. Free Software Foundation; either version 3, or (at your option) any
  7. later version.
  8. GCC is distributed in the hope that it will be useful, but WITHOUT
  9. ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  10. FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
  11. for more details.
  12. You should have received a copy of the GNU General Public License
  13. along with GCC; see the file COPYING3. If not see
  14. <http://www.gnu.org/licenses/>. */
  15. #include "config.h"
  16. #include "system.h"
  17. #include "coretypes.h"
  18. #include "hash-set.h"
  19. #include "machmode.h"
  20. #include "vec.h"
  21. #include "double-int.h"
  22. #include "input.h"
  23. #include "alias.h"
  24. #include "symtab.h"
  25. #include "options.h"
  26. #include "wide-int.h"
  27. #include "inchash.h"
  28. #include "tree.h"
  29. #include "fold-const.h"
  30. #include "hashtab.h"
  31. #include "tm.h"
  32. #include "hard-reg-set.h"
  33. #include "function.h"
  34. #include "rtl.h"
  35. #include "flags.h"
  36. #include "statistics.h"
  37. #include "real.h"
  38. #include "fixed-value.h"
  39. #include "insn-config.h"
  40. #include "expmed.h"
  41. #include "dojump.h"
  42. #include "explow.h"
  43. #include "calls.h"
  44. #include "emit-rtl.h"
  45. #include "varasm.h"
  46. #include "stmt.h"
  47. #include "expr.h"
  48. #include "tree-pretty-print.h"
  49. #include "tree-affine.h"
  50. #include "predict.h"
  51. #include "basic-block.h"
  52. #include "tree-ssa-alias.h"
  53. #include "internal-fn.h"
  54. #include "gimple-expr.h"
  55. #include "is-a.h"
  56. #include "gimple.h"
  57. #include "gimplify.h"
  58. #include "dumpfile.h"
  59. #include "cfgexpand.h"
  60. #include "wide-int-print.h"
  61. /* Extends CST as appropriate for the affine combinations COMB. */
  62. widest_int
  63. wide_int_ext_for_comb (const widest_int &cst, aff_tree *comb)
  64. {
  65. return wi::sext (cst, TYPE_PRECISION (comb->type));
  66. }
  67. /* Initializes affine combination COMB so that its value is zero in TYPE. */
  68. static void
  69. aff_combination_zero (aff_tree *comb, tree type)
  70. {
  71. int i;
  72. comb->type = type;
  73. comb->offset = 0;
  74. comb->n = 0;
  75. for (i = 0; i < MAX_AFF_ELTS; i++)
  76. comb->elts[i].coef = 0;
  77. comb->rest = NULL_TREE;
  78. }
  79. /* Sets COMB to CST. */
  80. void
  81. aff_combination_const (aff_tree *comb, tree type, const widest_int &cst)
  82. {
  83. aff_combination_zero (comb, type);
  84. comb->offset = wide_int_ext_for_comb (cst, comb);;
  85. }
  86. /* Sets COMB to single element ELT. */
  87. void
  88. aff_combination_elt (aff_tree *comb, tree type, tree elt)
  89. {
  90. aff_combination_zero (comb, type);
  91. comb->n = 1;
  92. comb->elts[0].val = elt;
  93. comb->elts[0].coef = 1;
  94. }
  95. /* Scales COMB by SCALE. */
  96. void
  97. aff_combination_scale (aff_tree *comb, const widest_int &scale_in)
  98. {
  99. unsigned i, j;
  100. widest_int scale = wide_int_ext_for_comb (scale_in, comb);
  101. if (scale == 1)
  102. return;
  103. if (scale == 0)
  104. {
  105. aff_combination_zero (comb, comb->type);
  106. return;
  107. }
  108. comb->offset = wide_int_ext_for_comb (scale * comb->offset, comb);
  109. for (i = 0, j = 0; i < comb->n; i++)
  110. {
  111. widest_int new_coef
  112. = wide_int_ext_for_comb (scale * comb->elts[i].coef, comb);
  113. /* A coefficient may become zero due to overflow. Remove the zero
  114. elements. */
  115. if (new_coef == 0)
  116. continue;
  117. comb->elts[j].coef = new_coef;
  118. comb->elts[j].val = comb->elts[i].val;
  119. j++;
  120. }
  121. comb->n = j;
  122. if (comb->rest)
  123. {
  124. tree type = comb->type;
  125. if (POINTER_TYPE_P (type))
  126. type = sizetype;
  127. if (comb->n < MAX_AFF_ELTS)
  128. {
  129. comb->elts[comb->n].coef = scale;
  130. comb->elts[comb->n].val = comb->rest;
  131. comb->rest = NULL_TREE;
  132. comb->n++;
  133. }
  134. else
  135. comb->rest = fold_build2 (MULT_EXPR, type, comb->rest,
  136. wide_int_to_tree (type, scale));
  137. }
  138. }
  139. /* Adds ELT * SCALE to COMB. */
  140. void
  141. aff_combination_add_elt (aff_tree *comb, tree elt, const widest_int &scale_in)
  142. {
  143. unsigned i;
  144. tree type;
  145. widest_int scale = wide_int_ext_for_comb (scale_in, comb);
  146. if (scale == 0)
  147. return;
  148. for (i = 0; i < comb->n; i++)
  149. if (operand_equal_p (comb->elts[i].val, elt, 0))
  150. {
  151. widest_int new_coef
  152. = wide_int_ext_for_comb (comb->elts[i].coef + scale, comb);
  153. if (new_coef != 0)
  154. {
  155. comb->elts[i].coef = new_coef;
  156. return;
  157. }
  158. comb->n--;
  159. comb->elts[i] = comb->elts[comb->n];
  160. if (comb->rest)
  161. {
  162. gcc_assert (comb->n == MAX_AFF_ELTS - 1);
  163. comb->elts[comb->n].coef = 1;
  164. comb->elts[comb->n].val = comb->rest;
  165. comb->rest = NULL_TREE;
  166. comb->n++;
  167. }
  168. return;
  169. }
  170. if (comb->n < MAX_AFF_ELTS)
  171. {
  172. comb->elts[comb->n].coef = scale;
  173. comb->elts[comb->n].val = elt;
  174. comb->n++;
  175. return;
  176. }
  177. type = comb->type;
  178. if (POINTER_TYPE_P (type))
  179. type = sizetype;
  180. if (scale == 1)
  181. elt = fold_convert (type, elt);
  182. else
  183. elt = fold_build2 (MULT_EXPR, type,
  184. fold_convert (type, elt),
  185. wide_int_to_tree (type, scale));
  186. if (comb->rest)
  187. comb->rest = fold_build2 (PLUS_EXPR, type, comb->rest,
  188. elt);
  189. else
  190. comb->rest = elt;
  191. }
  192. /* Adds CST to C. */
  193. static void
  194. aff_combination_add_cst (aff_tree *c, const widest_int &cst)
  195. {
  196. c->offset = wide_int_ext_for_comb (c->offset + cst, c);
  197. }
  198. /* Adds COMB2 to COMB1. */
  199. void
  200. aff_combination_add (aff_tree *comb1, aff_tree *comb2)
  201. {
  202. unsigned i;
  203. aff_combination_add_cst (comb1, comb2->offset);
  204. for (i = 0; i < comb2->n; i++)
  205. aff_combination_add_elt (comb1, comb2->elts[i].val, comb2->elts[i].coef);
  206. if (comb2->rest)
  207. aff_combination_add_elt (comb1, comb2->rest, 1);
  208. }
  209. /* Converts affine combination COMB to TYPE. */
  210. void
  211. aff_combination_convert (aff_tree *comb, tree type)
  212. {
  213. unsigned i, j;
  214. tree comb_type = comb->type;
  215. if (TYPE_PRECISION (type) > TYPE_PRECISION (comb_type))
  216. {
  217. tree val = fold_convert (type, aff_combination_to_tree (comb));
  218. tree_to_aff_combination (val, type, comb);
  219. return;
  220. }
  221. comb->type = type;
  222. if (comb->rest && !POINTER_TYPE_P (type))
  223. comb->rest = fold_convert (type, comb->rest);
  224. if (TYPE_PRECISION (type) == TYPE_PRECISION (comb_type))
  225. return;
  226. comb->offset = wide_int_ext_for_comb (comb->offset, comb);
  227. for (i = j = 0; i < comb->n; i++)
  228. {
  229. if (comb->elts[i].coef == 0)
  230. continue;
  231. comb->elts[j].coef = comb->elts[i].coef;
  232. comb->elts[j].val = fold_convert (type, comb->elts[i].val);
  233. j++;
  234. }
  235. comb->n = j;
  236. if (comb->n < MAX_AFF_ELTS && comb->rest)
  237. {
  238. comb->elts[comb->n].coef = 1;
  239. comb->elts[comb->n].val = comb->rest;
  240. comb->rest = NULL_TREE;
  241. comb->n++;
  242. }
  243. }
  244. /* Splits EXPR into an affine combination of parts. */
  245. void
  246. tree_to_aff_combination (tree expr, tree type, aff_tree *comb)
  247. {
  248. aff_tree tmp;
  249. enum tree_code code;
  250. tree cst, core, toffset;
  251. HOST_WIDE_INT bitpos, bitsize;
  252. machine_mode mode;
  253. int unsignedp, volatilep;
  254. STRIP_NOPS (expr);
  255. code = TREE_CODE (expr);
  256. switch (code)
  257. {
  258. case INTEGER_CST:
  259. aff_combination_const (comb, type, wi::to_widest (expr));
  260. return;
  261. case POINTER_PLUS_EXPR:
  262. tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
  263. tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
  264. aff_combination_add (comb, &tmp);
  265. return;
  266. case PLUS_EXPR:
  267. case MINUS_EXPR:
  268. tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
  269. tree_to_aff_combination (TREE_OPERAND (expr, 1), type, &tmp);
  270. if (code == MINUS_EXPR)
  271. aff_combination_scale (&tmp, -1);
  272. aff_combination_add (comb, &tmp);
  273. return;
  274. case MULT_EXPR:
  275. cst = TREE_OPERAND (expr, 1);
  276. if (TREE_CODE (cst) != INTEGER_CST)
  277. break;
  278. tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
  279. aff_combination_scale (comb, wi::to_widest (cst));
  280. return;
  281. case NEGATE_EXPR:
  282. tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
  283. aff_combination_scale (comb, -1);
  284. return;
  285. case BIT_NOT_EXPR:
  286. /* ~x = -x - 1 */
  287. tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
  288. aff_combination_scale (comb, -1);
  289. aff_combination_add_cst (comb, -1);
  290. return;
  291. case ADDR_EXPR:
  292. /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR. */
  293. if (TREE_CODE (TREE_OPERAND (expr, 0)) == MEM_REF)
  294. {
  295. expr = TREE_OPERAND (expr, 0);
  296. tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
  297. tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
  298. aff_combination_add (comb, &tmp);
  299. return;
  300. }
  301. core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos,
  302. &toffset, &mode, &unsignedp, &volatilep,
  303. false);
  304. if (bitpos % BITS_PER_UNIT != 0)
  305. break;
  306. aff_combination_const (comb, type, bitpos / BITS_PER_UNIT);
  307. if (TREE_CODE (core) == MEM_REF)
  308. {
  309. aff_combination_add_cst (comb, wi::to_widest (TREE_OPERAND (core, 1)));
  310. core = TREE_OPERAND (core, 0);
  311. }
  312. else
  313. core = build_fold_addr_expr (core);
  314. if (TREE_CODE (core) == ADDR_EXPR)
  315. aff_combination_add_elt (comb, core, 1);
  316. else
  317. {
  318. tree_to_aff_combination (core, type, &tmp);
  319. aff_combination_add (comb, &tmp);
  320. }
  321. if (toffset)
  322. {
  323. tree_to_aff_combination (toffset, type, &tmp);
  324. aff_combination_add (comb, &tmp);
  325. }
  326. return;
  327. case MEM_REF:
  328. if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
  329. tree_to_aff_combination (TREE_OPERAND (TREE_OPERAND (expr, 0), 0),
  330. type, comb);
  331. else if (integer_zerop (TREE_OPERAND (expr, 1)))
  332. {
  333. aff_combination_elt (comb, type, expr);
  334. return;
  335. }
  336. else
  337. aff_combination_elt (comb, type,
  338. build2 (MEM_REF, TREE_TYPE (expr),
  339. TREE_OPERAND (expr, 0),
  340. build_int_cst
  341. (TREE_TYPE (TREE_OPERAND (expr, 1)), 0)));
  342. tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
  343. aff_combination_add (comb, &tmp);
  344. return;
  345. default:
  346. break;
  347. }
  348. aff_combination_elt (comb, type, expr);
  349. }
  350. /* Creates EXPR + ELT * SCALE in TYPE. EXPR is taken from affine
  351. combination COMB. */
  352. static tree
  353. add_elt_to_tree (tree expr, tree type, tree elt, const widest_int &scale_in,
  354. aff_tree *comb ATTRIBUTE_UNUSED)
  355. {
  356. enum tree_code code;
  357. tree type1 = type;
  358. if (POINTER_TYPE_P (type))
  359. type1 = sizetype;
  360. widest_int scale = wide_int_ext_for_comb (scale_in, comb);
  361. if (scale == -1
  362. && POINTER_TYPE_P (TREE_TYPE (elt)))
  363. {
  364. elt = convert_to_ptrofftype (elt);
  365. elt = fold_build1 (NEGATE_EXPR, TREE_TYPE (elt), elt);
  366. scale = 1;
  367. }
  368. if (scale == 1)
  369. {
  370. if (!expr)
  371. {
  372. if (POINTER_TYPE_P (TREE_TYPE (elt)))
  373. return elt;
  374. else
  375. return fold_convert (type1, elt);
  376. }
  377. if (POINTER_TYPE_P (TREE_TYPE (expr)))
  378. return fold_build_pointer_plus (expr, elt);
  379. if (POINTER_TYPE_P (TREE_TYPE (elt)))
  380. return fold_build_pointer_plus (elt, expr);
  381. return fold_build2 (PLUS_EXPR, type1,
  382. expr, fold_convert (type1, elt));
  383. }
  384. if (scale == -1)
  385. {
  386. if (!expr)
  387. return fold_build1 (NEGATE_EXPR, type1,
  388. fold_convert (type1, elt));
  389. if (POINTER_TYPE_P (TREE_TYPE (expr)))
  390. {
  391. elt = convert_to_ptrofftype (elt);
  392. elt = fold_build1 (NEGATE_EXPR, TREE_TYPE (elt), elt);
  393. return fold_build_pointer_plus (expr, elt);
  394. }
  395. return fold_build2 (MINUS_EXPR, type1,
  396. expr, fold_convert (type1, elt));
  397. }
  398. elt = fold_convert (type1, elt);
  399. if (!expr)
  400. return fold_build2 (MULT_EXPR, type1, elt,
  401. wide_int_to_tree (type1, scale));
  402. if (wi::neg_p (scale))
  403. {
  404. code = MINUS_EXPR;
  405. scale = -scale;
  406. }
  407. else
  408. code = PLUS_EXPR;
  409. elt = fold_build2 (MULT_EXPR, type1, elt,
  410. wide_int_to_tree (type1, scale));
  411. if (POINTER_TYPE_P (TREE_TYPE (expr)))
  412. {
  413. if (code == MINUS_EXPR)
  414. elt = fold_build1 (NEGATE_EXPR, type1, elt);
  415. return fold_build_pointer_plus (expr, elt);
  416. }
  417. return fold_build2 (code, type1, expr, elt);
  418. }
  419. /* Makes tree from the affine combination COMB. */
  420. tree
  421. aff_combination_to_tree (aff_tree *comb)
  422. {
  423. tree type = comb->type;
  424. tree expr = NULL_TREE;
  425. unsigned i;
  426. widest_int off, sgn;
  427. tree type1 = type;
  428. if (POINTER_TYPE_P (type))
  429. type1 = sizetype;
  430. gcc_assert (comb->n == MAX_AFF_ELTS || comb->rest == NULL_TREE);
  431. for (i = 0; i < comb->n; i++)
  432. expr = add_elt_to_tree (expr, type, comb->elts[i].val, comb->elts[i].coef,
  433. comb);
  434. if (comb->rest)
  435. expr = add_elt_to_tree (expr, type, comb->rest, 1, comb);
  436. /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is
  437. unsigned. */
  438. if (wi::neg_p (comb->offset))
  439. {
  440. off = -comb->offset;
  441. sgn = -1;
  442. }
  443. else
  444. {
  445. off = comb->offset;
  446. sgn = 1;
  447. }
  448. return add_elt_to_tree (expr, type, wide_int_to_tree (type1, off), sgn,
  449. comb);
  450. }
  451. /* Copies the tree elements of COMB to ensure that they are not shared. */
  452. void
  453. unshare_aff_combination (aff_tree *comb)
  454. {
  455. unsigned i;
  456. for (i = 0; i < comb->n; i++)
  457. comb->elts[i].val = unshare_expr (comb->elts[i].val);
  458. if (comb->rest)
  459. comb->rest = unshare_expr (comb->rest);
  460. }
  461. /* Remove M-th element from COMB. */
  462. void
  463. aff_combination_remove_elt (aff_tree *comb, unsigned m)
  464. {
  465. comb->n--;
  466. if (m <= comb->n)
  467. comb->elts[m] = comb->elts[comb->n];
  468. if (comb->rest)
  469. {
  470. comb->elts[comb->n].coef = 1;
  471. comb->elts[comb->n].val = comb->rest;
  472. comb->rest = NULL_TREE;
  473. comb->n++;
  474. }
  475. }
  476. /* Adds C * COEF * VAL to R. VAL may be NULL, in that case only
  477. C * COEF is added to R. */
  478. static void
  479. aff_combination_add_product (aff_tree *c, const widest_int &coef, tree val,
  480. aff_tree *r)
  481. {
  482. unsigned i;
  483. tree aval, type;
  484. for (i = 0; i < c->n; i++)
  485. {
  486. aval = c->elts[i].val;
  487. if (val)
  488. {
  489. type = TREE_TYPE (aval);
  490. aval = fold_build2 (MULT_EXPR, type, aval,
  491. fold_convert (type, val));
  492. }
  493. aff_combination_add_elt (r, aval, coef * c->elts[i].coef);
  494. }
  495. if (c->rest)
  496. {
  497. aval = c->rest;
  498. if (val)
  499. {
  500. type = TREE_TYPE (aval);
  501. aval = fold_build2 (MULT_EXPR, type, aval,
  502. fold_convert (type, val));
  503. }
  504. aff_combination_add_elt (r, aval, coef);
  505. }
  506. if (val)
  507. aff_combination_add_elt (r, val, coef * c->offset);
  508. else
  509. aff_combination_add_cst (r, coef * c->offset);
  510. }
  511. /* Multiplies C1 by C2, storing the result to R */
  512. void
  513. aff_combination_mult (aff_tree *c1, aff_tree *c2, aff_tree *r)
  514. {
  515. unsigned i;
  516. gcc_assert (TYPE_PRECISION (c1->type) == TYPE_PRECISION (c2->type));
  517. aff_combination_zero (r, c1->type);
  518. for (i = 0; i < c2->n; i++)
  519. aff_combination_add_product (c1, c2->elts[i].coef, c2->elts[i].val, r);
  520. if (c2->rest)
  521. aff_combination_add_product (c1, 1, c2->rest, r);
  522. aff_combination_add_product (c1, c2->offset, NULL, r);
  523. }
  524. /* Returns the element of COMB whose value is VAL, or NULL if no such
  525. element exists. If IDX is not NULL, it is set to the index of VAL in
  526. COMB. */
  527. static struct aff_comb_elt *
  528. aff_combination_find_elt (aff_tree *comb, tree val, unsigned *idx)
  529. {
  530. unsigned i;
  531. for (i = 0; i < comb->n; i++)
  532. if (operand_equal_p (comb->elts[i].val, val, 0))
  533. {
  534. if (idx)
  535. *idx = i;
  536. return &comb->elts[i];
  537. }
  538. return NULL;
  539. }
  540. /* Element of the cache that maps ssa name NAME to its expanded form
  541. as an affine expression EXPANSION. */
  542. struct name_expansion
  543. {
  544. aff_tree expansion;
  545. /* True if the expansion for the name is just being generated. */
  546. unsigned in_progress : 1;
  547. };
  548. /* Expands SSA names in COMB recursively. CACHE is used to cache the
  549. results. */
  550. void
  551. aff_combination_expand (aff_tree *comb ATTRIBUTE_UNUSED,
  552. hash_map<tree, name_expansion *> **cache)
  553. {
  554. unsigned i;
  555. aff_tree to_add, current, curre;
  556. tree e, rhs;
  557. gimple def;
  558. widest_int scale;
  559. struct name_expansion *exp;
  560. aff_combination_zero (&to_add, comb->type);
  561. for (i = 0; i < comb->n; i++)
  562. {
  563. tree type, name;
  564. enum tree_code code;
  565. e = comb->elts[i].val;
  566. type = TREE_TYPE (e);
  567. name = e;
  568. /* Look through some conversions. */
  569. if (CONVERT_EXPR_P (e)
  570. && (TYPE_PRECISION (type)
  571. >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (e, 0)))))
  572. name = TREE_OPERAND (e, 0);
  573. if (TREE_CODE (name) != SSA_NAME)
  574. continue;
  575. def = SSA_NAME_DEF_STMT (name);
  576. if (!is_gimple_assign (def) || gimple_assign_lhs (def) != name)
  577. continue;
  578. code = gimple_assign_rhs_code (def);
  579. if (code != SSA_NAME
  580. && !IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
  581. && (get_gimple_rhs_class (code) != GIMPLE_SINGLE_RHS
  582. || !is_gimple_min_invariant (gimple_assign_rhs1 (def))))
  583. continue;
  584. /* We do not know whether the reference retains its value at the
  585. place where the expansion is used. */
  586. if (TREE_CODE_CLASS (code) == tcc_reference)
  587. continue;
  588. if (!*cache)
  589. *cache = new hash_map<tree, name_expansion *>;
  590. name_expansion **slot = &(*cache)->get_or_insert (e);
  591. exp = *slot;
  592. if (!exp)
  593. {
  594. exp = XNEW (struct name_expansion);
  595. exp->in_progress = 1;
  596. *slot = exp;
  597. /* In principle this is a generally valid folding, but
  598. it is not unconditionally an optimization, so do it
  599. here and not in fold_unary. */
  600. /* Convert (T1)(X *+- CST) into (T1)X *+- (T1)CST if T1 is wider
  601. than the type of X and overflow for the type of X is
  602. undefined. */
  603. if (e != name
  604. && INTEGRAL_TYPE_P (type)
  605. && INTEGRAL_TYPE_P (TREE_TYPE (name))
  606. && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (name))
  607. && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (name))
  608. && (code == PLUS_EXPR || code == MINUS_EXPR || code == MULT_EXPR)
  609. && TREE_CODE (gimple_assign_rhs2 (def)) == INTEGER_CST)
  610. rhs = fold_build2 (code, type,
  611. fold_convert (type, gimple_assign_rhs1 (def)),
  612. fold_convert (type, gimple_assign_rhs2 (def)));
  613. else
  614. {
  615. rhs = gimple_assign_rhs_to_tree (def);
  616. if (e != name)
  617. rhs = fold_convert (type, rhs);
  618. }
  619. tree_to_aff_combination_expand (rhs, comb->type, &current, cache);
  620. exp->expansion = current;
  621. exp->in_progress = 0;
  622. }
  623. else
  624. {
  625. /* Since we follow the definitions in the SSA form, we should not
  626. enter a cycle unless we pass through a phi node. */
  627. gcc_assert (!exp->in_progress);
  628. current = exp->expansion;
  629. }
  630. /* Accumulate the new terms to TO_ADD, so that we do not modify
  631. COMB while traversing it; include the term -coef * E, to remove
  632. it from COMB. */
  633. scale = comb->elts[i].coef;
  634. aff_combination_zero (&curre, comb->type);
  635. aff_combination_add_elt (&curre, e, -scale);
  636. aff_combination_scale (&current, scale);
  637. aff_combination_add (&to_add, &current);
  638. aff_combination_add (&to_add, &curre);
  639. }
  640. aff_combination_add (comb, &to_add);
  641. }
  642. /* Similar to tree_to_aff_combination, but follows SSA name definitions
  643. and expands them recursively. CACHE is used to cache the expansions
  644. of the ssa names, to avoid exponential time complexity for cases
  645. like
  646. a1 = a0 + a0;
  647. a2 = a1 + a1;
  648. a3 = a2 + a2;
  649. ... */
  650. void
  651. tree_to_aff_combination_expand (tree expr, tree type, aff_tree *comb,
  652. hash_map<tree, name_expansion *> **cache)
  653. {
  654. tree_to_aff_combination (expr, type, comb);
  655. aff_combination_expand (comb, cache);
  656. }
  657. /* Frees memory occupied by struct name_expansion in *VALUE. Callback for
  658. hash_map::traverse. */
  659. bool
  660. free_name_expansion (tree const &, name_expansion **value, void *)
  661. {
  662. free (*value);
  663. return true;
  664. }
  665. /* Frees memory allocated for the CACHE used by
  666. tree_to_aff_combination_expand. */
  667. void
  668. free_affine_expand_cache (hash_map<tree, name_expansion *> **cache)
  669. {
  670. if (!*cache)
  671. return;
  672. (*cache)->traverse<void *, free_name_expansion> (NULL);
  673. delete (*cache);
  674. *cache = NULL;
  675. }
  676. /* If VAL != CST * DIV for any constant CST, returns false.
  677. Otherwise, if *MULT_SET is true, additionally compares CST and MULT,
  678. and if they are different, returns false. Finally, if neither of these
  679. two cases occur, true is returned, and CST is stored to MULT and MULT_SET
  680. is set to true. */
  681. static bool
  682. wide_int_constant_multiple_p (const widest_int &val, const widest_int &div,
  683. bool *mult_set, widest_int *mult)
  684. {
  685. widest_int rem, cst;
  686. if (val == 0)
  687. {
  688. if (*mult_set && mult != 0)
  689. return false;
  690. *mult_set = true;
  691. *mult = 0;
  692. return true;
  693. }
  694. if (div == 0)
  695. return false;
  696. if (!wi::multiple_of_p (val, div, SIGNED, &cst))
  697. return false;
  698. if (*mult_set && *mult != cst)
  699. return false;
  700. *mult_set = true;
  701. *mult = cst;
  702. return true;
  703. }
  704. /* Returns true if VAL = X * DIV for some constant X. If this is the case,
  705. X is stored to MULT. */
  706. bool
  707. aff_combination_constant_multiple_p (aff_tree *val, aff_tree *div,
  708. widest_int *mult)
  709. {
  710. bool mult_set = false;
  711. unsigned i;
  712. if (val->n == 0 && val->offset == 0)
  713. {
  714. *mult = 0;
  715. return true;
  716. }
  717. if (val->n != div->n)
  718. return false;
  719. if (val->rest || div->rest)
  720. return false;
  721. if (!wide_int_constant_multiple_p (val->offset, div->offset,
  722. &mult_set, mult))
  723. return false;
  724. for (i = 0; i < div->n; i++)
  725. {
  726. struct aff_comb_elt *elt
  727. = aff_combination_find_elt (val, div->elts[i].val, NULL);
  728. if (!elt)
  729. return false;
  730. if (!wide_int_constant_multiple_p (elt->coef, div->elts[i].coef,
  731. &mult_set, mult))
  732. return false;
  733. }
  734. gcc_assert (mult_set);
  735. return true;
  736. }
  737. /* Prints the affine VAL to the FILE. */
  738. static void
  739. print_aff (FILE *file, aff_tree *val)
  740. {
  741. unsigned i;
  742. signop sgn = TYPE_SIGN (val->type);
  743. if (POINTER_TYPE_P (val->type))
  744. sgn = SIGNED;
  745. fprintf (file, "{\n type = ");
  746. print_generic_expr (file, val->type, TDF_VOPS|TDF_MEMSYMS);
  747. fprintf (file, "\n offset = ");
  748. print_dec (val->offset, file, sgn);
  749. if (val->n > 0)
  750. {
  751. fprintf (file, "\n elements = {\n");
  752. for (i = 0; i < val->n; i++)
  753. {
  754. fprintf (file, " [%d] = ", i);
  755. print_generic_expr (file, val->elts[i].val, TDF_VOPS|TDF_MEMSYMS);
  756. fprintf (file, " * ");
  757. print_dec (val->elts[i].coef, file, sgn);
  758. if (i != val->n - 1)
  759. fprintf (file, ", \n");
  760. }
  761. fprintf (file, "\n }");
  762. }
  763. if (val->rest)
  764. {
  765. fprintf (file, "\n rest = ");
  766. print_generic_expr (file, val->rest, TDF_VOPS|TDF_MEMSYMS);
  767. }
  768. fprintf (file, "\n}");
  769. }
  770. /* Prints the affine VAL to the standard error, used for debugging. */
  771. DEBUG_FUNCTION void
  772. debug_aff (aff_tree *val)
  773. {
  774. print_aff (stderr, val);
  775. fprintf (stderr, "\n");
  776. }
  777. /* Computes address of the reference REF in ADDR. The size of the accessed
  778. location is stored to SIZE. Returns the ultimate containing object to
  779. which REF refers. */
  780. tree
  781. get_inner_reference_aff (tree ref, aff_tree *addr, widest_int *size)
  782. {
  783. HOST_WIDE_INT bitsize, bitpos;
  784. tree toff;
  785. machine_mode mode;
  786. int uns, vol;
  787. aff_tree tmp;
  788. tree base = get_inner_reference (ref, &bitsize, &bitpos, &toff, &mode,
  789. &uns, &vol, false);
  790. tree base_addr = build_fold_addr_expr (base);
  791. /* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT. */
  792. tree_to_aff_combination (base_addr, sizetype, addr);
  793. if (toff)
  794. {
  795. tree_to_aff_combination (toff, sizetype, &tmp);
  796. aff_combination_add (addr, &tmp);
  797. }
  798. aff_combination_const (&tmp, sizetype, bitpos / BITS_PER_UNIT);
  799. aff_combination_add (addr, &tmp);
  800. *size = (bitsize + BITS_PER_UNIT - 1) / BITS_PER_UNIT;
  801. return base;
  802. }
  803. /* Returns true if a region of size SIZE1 at position 0 and a region of
  804. size SIZE2 at position DIFF cannot overlap. */
  805. bool
  806. aff_comb_cannot_overlap_p (aff_tree *diff, const widest_int &size1,
  807. const widest_int &size2)
  808. {
  809. /* Unless the difference is a constant, we fail. */
  810. if (diff->n != 0)
  811. return false;
  812. if (wi::neg_p (diff->offset))
  813. {
  814. /* The second object is before the first one, we succeed if the last
  815. element of the second object is before the start of the first one. */
  816. return wi::neg_p (diff->offset + size2 - 1);
  817. }
  818. else
  819. {
  820. /* We succeed if the second object starts after the first one ends. */
  821. return wi::les_p (size1, diff->offset);
  822. }
  823. }