tree-if-conv.c 74 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860
  1. /* If-conversion for vectorizer.
  2. Copyright (C) 2004-2015 Free Software Foundation, Inc.
  3. Contributed by Devang Patel <dpatel@apple.com>
  4. This file is part of GCC.
  5. GCC is free software; you can redistribute it and/or modify it under
  6. the terms of the GNU General Public License as published by the Free
  7. Software Foundation; either version 3, or (at your option) any later
  8. version.
  9. GCC is distributed in the hope that it will be useful, but WITHOUT ANY
  10. WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11. FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
  12. for more details.
  13. You should have received a copy of the GNU General Public License
  14. along with GCC; see the file COPYING3. If not see
  15. <http://www.gnu.org/licenses/>. */
  16. /* This pass implements a tree level if-conversion of loops. Its
  17. initial goal is to help the vectorizer to vectorize loops with
  18. conditions.
  19. A short description of if-conversion:
  20. o Decide if a loop is if-convertible or not.
  21. o Walk all loop basic blocks in breadth first order (BFS order).
  22. o Remove conditional statements (at the end of basic block)
  23. and propagate condition into destination basic blocks'
  24. predicate list.
  25. o Replace modify expression with conditional modify expression
  26. using current basic block's condition.
  27. o Merge all basic blocks
  28. o Replace phi nodes with conditional modify expr
  29. o Merge all basic blocks into header
  30. Sample transformation:
  31. INPUT
  32. -----
  33. # i_23 = PHI <0(0), i_18(10)>;
  34. <L0>:;
  35. j_15 = A[i_23];
  36. if (j_15 > 41) goto <L1>; else goto <L17>;
  37. <L17>:;
  38. goto <bb 3> (<L3>);
  39. <L1>:;
  40. # iftmp.2_4 = PHI <0(8), 42(2)>;
  41. <L3>:;
  42. A[i_23] = iftmp.2_4;
  43. i_18 = i_23 + 1;
  44. if (i_18 <= 15) goto <L19>; else goto <L18>;
  45. <L19>:;
  46. goto <bb 1> (<L0>);
  47. <L18>:;
  48. OUTPUT
  49. ------
  50. # i_23 = PHI <0(0), i_18(10)>;
  51. <L0>:;
  52. j_15 = A[i_23];
  53. <L3>:;
  54. iftmp.2_4 = j_15 > 41 ? 42 : 0;
  55. A[i_23] = iftmp.2_4;
  56. i_18 = i_23 + 1;
  57. if (i_18 <= 15) goto <L19>; else goto <L18>;
  58. <L19>:;
  59. goto <bb 1> (<L0>);
  60. <L18>:;
  61. */
  62. #include "config.h"
  63. #include "system.h"
  64. #include "coretypes.h"
  65. #include "tm.h"
  66. #include "hash-set.h"
  67. #include "machmode.h"
  68. #include "vec.h"
  69. #include "double-int.h"
  70. #include "input.h"
  71. #include "alias.h"
  72. #include "symtab.h"
  73. #include "wide-int.h"
  74. #include "inchash.h"
  75. #include "tree.h"
  76. #include "fold-const.h"
  77. #include "stor-layout.h"
  78. #include "flags.h"
  79. #include "predict.h"
  80. #include "hard-reg-set.h"
  81. #include "function.h"
  82. #include "dominance.h"
  83. #include "cfg.h"
  84. #include "basic-block.h"
  85. #include "gimple-pretty-print.h"
  86. #include "tree-ssa-alias.h"
  87. #include "internal-fn.h"
  88. #include "gimple-fold.h"
  89. #include "gimple-expr.h"
  90. #include "is-a.h"
  91. #include "gimple.h"
  92. #include "gimplify.h"
  93. #include "gimple-iterator.h"
  94. #include "gimplify-me.h"
  95. #include "gimple-ssa.h"
  96. #include "tree-cfg.h"
  97. #include "tree-phinodes.h"
  98. #include "ssa-iterators.h"
  99. #include "stringpool.h"
  100. #include "tree-ssanames.h"
  101. #include "tree-into-ssa.h"
  102. #include "tree-ssa.h"
  103. #include "cfgloop.h"
  104. #include "tree-chrec.h"
  105. #include "tree-data-ref.h"
  106. #include "tree-scalar-evolution.h"
  107. #include "tree-ssa-loop-ivopts.h"
  108. #include "tree-ssa-address.h"
  109. #include "tree-pass.h"
  110. #include "dbgcnt.h"
  111. #include "hashtab.h"
  112. #include "rtl.h"
  113. #include "statistics.h"
  114. #include "real.h"
  115. #include "fixed-value.h"
  116. #include "insn-config.h"
  117. #include "expmed.h"
  118. #include "dojump.h"
  119. #include "explow.h"
  120. #include "calls.h"
  121. #include "emit-rtl.h"
  122. #include "varasm.h"
  123. #include "stmt.h"
  124. #include "expr.h"
  125. #include "insn-codes.h"
  126. #include "optabs.h"
  127. #include "hash-map.h"
  128. /* List of basic blocks in if-conversion-suitable order. */
  129. static basic_block *ifc_bbs;
  130. /* Apply more aggressive (extended) if-conversion if true. */
  131. static bool aggressive_if_conv;
  132. /* Structure used to predicate basic blocks. This is attached to the
  133. ->aux field of the BBs in the loop to be if-converted. */
  134. typedef struct bb_predicate_s {
  135. /* The condition under which this basic block is executed. */
  136. tree predicate;
  137. /* PREDICATE is gimplified, and the sequence of statements is
  138. recorded here, in order to avoid the duplication of computations
  139. that occur in previous conditions. See PR44483. */
  140. gimple_seq predicate_gimplified_stmts;
  141. } *bb_predicate_p;
  142. /* Returns true when the basic block BB has a predicate. */
  143. static inline bool
  144. bb_has_predicate (basic_block bb)
  145. {
  146. return bb->aux != NULL;
  147. }
  148. /* Returns the gimplified predicate for basic block BB. */
  149. static inline tree
  150. bb_predicate (basic_block bb)
  151. {
  152. return ((bb_predicate_p) bb->aux)->predicate;
  153. }
  154. /* Sets the gimplified predicate COND for basic block BB. */
  155. static inline void
  156. set_bb_predicate (basic_block bb, tree cond)
  157. {
  158. gcc_assert ((TREE_CODE (cond) == TRUTH_NOT_EXPR
  159. && is_gimple_condexpr (TREE_OPERAND (cond, 0)))
  160. || is_gimple_condexpr (cond));
  161. ((bb_predicate_p) bb->aux)->predicate = cond;
  162. }
  163. /* Returns the sequence of statements of the gimplification of the
  164. predicate for basic block BB. */
  165. static inline gimple_seq
  166. bb_predicate_gimplified_stmts (basic_block bb)
  167. {
  168. return ((bb_predicate_p) bb->aux)->predicate_gimplified_stmts;
  169. }
  170. /* Sets the sequence of statements STMTS of the gimplification of the
  171. predicate for basic block BB. */
  172. static inline void
  173. set_bb_predicate_gimplified_stmts (basic_block bb, gimple_seq stmts)
  174. {
  175. ((bb_predicate_p) bb->aux)->predicate_gimplified_stmts = stmts;
  176. }
  177. /* Adds the sequence of statements STMTS to the sequence of statements
  178. of the predicate for basic block BB. */
  179. static inline void
  180. add_bb_predicate_gimplified_stmts (basic_block bb, gimple_seq stmts)
  181. {
  182. gimple_seq_add_seq
  183. (&(((bb_predicate_p) bb->aux)->predicate_gimplified_stmts), stmts);
  184. }
  185. /* Initializes to TRUE the predicate of basic block BB. */
  186. static inline void
  187. init_bb_predicate (basic_block bb)
  188. {
  189. bb->aux = XNEW (struct bb_predicate_s);
  190. set_bb_predicate_gimplified_stmts (bb, NULL);
  191. set_bb_predicate (bb, boolean_true_node);
  192. }
  193. /* Release the SSA_NAMEs associated with the predicate of basic block BB,
  194. but don't actually free it. */
  195. static inline void
  196. release_bb_predicate (basic_block bb)
  197. {
  198. gimple_seq stmts = bb_predicate_gimplified_stmts (bb);
  199. if (stmts)
  200. {
  201. gimple_stmt_iterator i;
  202. for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
  203. free_stmt_operands (cfun, gsi_stmt (i));
  204. set_bb_predicate_gimplified_stmts (bb, NULL);
  205. }
  206. }
  207. /* Free the predicate of basic block BB. */
  208. static inline void
  209. free_bb_predicate (basic_block bb)
  210. {
  211. if (!bb_has_predicate (bb))
  212. return;
  213. release_bb_predicate (bb);
  214. free (bb->aux);
  215. bb->aux = NULL;
  216. }
  217. /* Reinitialize predicate of BB with the true predicate. */
  218. static inline void
  219. reset_bb_predicate (basic_block bb)
  220. {
  221. if (!bb_has_predicate (bb))
  222. init_bb_predicate (bb);
  223. else
  224. {
  225. release_bb_predicate (bb);
  226. set_bb_predicate (bb, boolean_true_node);
  227. }
  228. }
  229. /* Returns a new SSA_NAME of type TYPE that is assigned the value of
  230. the expression EXPR. Inserts the statement created for this
  231. computation before GSI and leaves the iterator GSI at the same
  232. statement. */
  233. static tree
  234. ifc_temp_var (tree type, tree expr, gimple_stmt_iterator *gsi)
  235. {
  236. tree new_name = make_temp_ssa_name (type, NULL, "_ifc_");
  237. gimple stmt = gimple_build_assign (new_name, expr);
  238. gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
  239. return new_name;
  240. }
  241. /* Return true when COND is a true predicate. */
  242. static inline bool
  243. is_true_predicate (tree cond)
  244. {
  245. return (cond == NULL_TREE
  246. || cond == boolean_true_node
  247. || integer_onep (cond));
  248. }
  249. /* Returns true when BB has a predicate that is not trivial: true or
  250. NULL_TREE. */
  251. static inline bool
  252. is_predicated (basic_block bb)
  253. {
  254. return !is_true_predicate (bb_predicate (bb));
  255. }
  256. /* Parses the predicate COND and returns its comparison code and
  257. operands OP0 and OP1. */
  258. static enum tree_code
  259. parse_predicate (tree cond, tree *op0, tree *op1)
  260. {
  261. gimple s;
  262. if (TREE_CODE (cond) == SSA_NAME
  263. && is_gimple_assign (s = SSA_NAME_DEF_STMT (cond)))
  264. {
  265. if (TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
  266. {
  267. *op0 = gimple_assign_rhs1 (s);
  268. *op1 = gimple_assign_rhs2 (s);
  269. return gimple_assign_rhs_code (s);
  270. }
  271. else if (gimple_assign_rhs_code (s) == TRUTH_NOT_EXPR)
  272. {
  273. tree op = gimple_assign_rhs1 (s);
  274. tree type = TREE_TYPE (op);
  275. enum tree_code code = parse_predicate (op, op0, op1);
  276. return code == ERROR_MARK ? ERROR_MARK
  277. : invert_tree_comparison (code, HONOR_NANS (type));
  278. }
  279. return ERROR_MARK;
  280. }
  281. if (TREE_CODE_CLASS (TREE_CODE (cond)) == tcc_comparison)
  282. {
  283. *op0 = TREE_OPERAND (cond, 0);
  284. *op1 = TREE_OPERAND (cond, 1);
  285. return TREE_CODE (cond);
  286. }
  287. return ERROR_MARK;
  288. }
  289. /* Returns the fold of predicate C1 OR C2 at location LOC. */
  290. static tree
  291. fold_or_predicates (location_t loc, tree c1, tree c2)
  292. {
  293. tree op1a, op1b, op2a, op2b;
  294. enum tree_code code1 = parse_predicate (c1, &op1a, &op1b);
  295. enum tree_code code2 = parse_predicate (c2, &op2a, &op2b);
  296. if (code1 != ERROR_MARK && code2 != ERROR_MARK)
  297. {
  298. tree t = maybe_fold_or_comparisons (code1, op1a, op1b,
  299. code2, op2a, op2b);
  300. if (t)
  301. return t;
  302. }
  303. return fold_build2_loc (loc, TRUTH_OR_EXPR, boolean_type_node, c1, c2);
  304. }
  305. /* Returns true if N is either a constant or a SSA_NAME. */
  306. static bool
  307. constant_or_ssa_name (tree n)
  308. {
  309. switch (TREE_CODE (n))
  310. {
  311. case SSA_NAME:
  312. case INTEGER_CST:
  313. case REAL_CST:
  314. case COMPLEX_CST:
  315. case VECTOR_CST:
  316. return true;
  317. default:
  318. return false;
  319. }
  320. }
  321. /* Returns either a COND_EXPR or the folded expression if the folded
  322. expression is a MIN_EXPR, a MAX_EXPR, an ABS_EXPR,
  323. a constant or a SSA_NAME. */
  324. static tree
  325. fold_build_cond_expr (tree type, tree cond, tree rhs, tree lhs)
  326. {
  327. tree rhs1, lhs1, cond_expr;
  328. /* If COND is comparison r != 0 and r has boolean type, convert COND
  329. to SSA_NAME to accept by vect bool pattern. */
  330. if (TREE_CODE (cond) == NE_EXPR)
  331. {
  332. tree op0 = TREE_OPERAND (cond, 0);
  333. tree op1 = TREE_OPERAND (cond, 1);
  334. if (TREE_CODE (op0) == SSA_NAME
  335. && TREE_CODE (TREE_TYPE (op0)) == BOOLEAN_TYPE
  336. && (integer_zerop (op1)))
  337. cond = op0;
  338. }
  339. cond_expr = fold_ternary (COND_EXPR, type, cond,
  340. rhs, lhs);
  341. if (cond_expr == NULL_TREE)
  342. return build3 (COND_EXPR, type, cond, rhs, lhs);
  343. STRIP_USELESS_TYPE_CONVERSION (cond_expr);
  344. if (constant_or_ssa_name (cond_expr))
  345. return cond_expr;
  346. if (TREE_CODE (cond_expr) == ABS_EXPR)
  347. {
  348. rhs1 = TREE_OPERAND (cond_expr, 1);
  349. STRIP_USELESS_TYPE_CONVERSION (rhs1);
  350. if (constant_or_ssa_name (rhs1))
  351. return build1 (ABS_EXPR, type, rhs1);
  352. }
  353. if (TREE_CODE (cond_expr) == MIN_EXPR
  354. || TREE_CODE (cond_expr) == MAX_EXPR)
  355. {
  356. lhs1 = TREE_OPERAND (cond_expr, 0);
  357. STRIP_USELESS_TYPE_CONVERSION (lhs1);
  358. rhs1 = TREE_OPERAND (cond_expr, 1);
  359. STRIP_USELESS_TYPE_CONVERSION (rhs1);
  360. if (constant_or_ssa_name (rhs1)
  361. && constant_or_ssa_name (lhs1))
  362. return build2 (TREE_CODE (cond_expr), type, lhs1, rhs1);
  363. }
  364. return build3 (COND_EXPR, type, cond, rhs, lhs);
  365. }
  366. /* Add condition NC to the predicate list of basic block BB. LOOP is
  367. the loop to be if-converted. Use predicate of cd-equivalent block
  368. for join bb if it exists: we call basic blocks bb1 and bb2
  369. cd-equivalent if they are executed under the same condition. */
  370. static inline void
  371. add_to_predicate_list (struct loop *loop, basic_block bb, tree nc)
  372. {
  373. tree bc, *tp;
  374. basic_block dom_bb;
  375. if (is_true_predicate (nc))
  376. return;
  377. /* If dominance tells us this basic block is always executed,
  378. don't record any predicates for it. */
  379. if (dominated_by_p (CDI_DOMINATORS, loop->latch, bb))
  380. return;
  381. dom_bb = get_immediate_dominator (CDI_DOMINATORS, bb);
  382. /* We use notion of cd equivalence to get simpler predicate for
  383. join block, e.g. if join block has 2 predecessors with predicates
  384. p1 & p2 and p1 & !p2, we'd like to get p1 for it instead of
  385. p1 & p2 | p1 & !p2. */
  386. if (dom_bb != loop->header
  387. && get_immediate_dominator (CDI_POST_DOMINATORS, dom_bb) == bb)
  388. {
  389. gcc_assert (flow_bb_inside_loop_p (loop, dom_bb));
  390. bc = bb_predicate (dom_bb);
  391. if (!is_true_predicate (bc))
  392. set_bb_predicate (bb, bc);
  393. else
  394. gcc_assert (is_true_predicate (bb_predicate (bb)));
  395. if (dump_file && (dump_flags & TDF_DETAILS))
  396. fprintf (dump_file, "Use predicate of bb#%d for bb#%d\n",
  397. dom_bb->index, bb->index);
  398. return;
  399. }
  400. if (!is_predicated (bb))
  401. bc = nc;
  402. else
  403. {
  404. bc = bb_predicate (bb);
  405. bc = fold_or_predicates (EXPR_LOCATION (bc), nc, bc);
  406. if (is_true_predicate (bc))
  407. {
  408. reset_bb_predicate (bb);
  409. return;
  410. }
  411. }
  412. /* Allow a TRUTH_NOT_EXPR around the main predicate. */
  413. if (TREE_CODE (bc) == TRUTH_NOT_EXPR)
  414. tp = &TREE_OPERAND (bc, 0);
  415. else
  416. tp = &bc;
  417. if (!is_gimple_condexpr (*tp))
  418. {
  419. gimple_seq stmts;
  420. *tp = force_gimple_operand_1 (*tp, &stmts, is_gimple_condexpr, NULL_TREE);
  421. add_bb_predicate_gimplified_stmts (bb, stmts);
  422. }
  423. set_bb_predicate (bb, bc);
  424. }
  425. /* Add the condition COND to the previous condition PREV_COND, and add
  426. this to the predicate list of the destination of edge E. LOOP is
  427. the loop to be if-converted. */
  428. static void
  429. add_to_dst_predicate_list (struct loop *loop, edge e,
  430. tree prev_cond, tree cond)
  431. {
  432. if (!flow_bb_inside_loop_p (loop, e->dest))
  433. return;
  434. if (!is_true_predicate (prev_cond))
  435. cond = fold_build2 (TRUTH_AND_EXPR, boolean_type_node,
  436. prev_cond, cond);
  437. if (!dominated_by_p (CDI_DOMINATORS, loop->latch, e->dest))
  438. add_to_predicate_list (loop, e->dest, cond);
  439. }
  440. /* Return true if one of the successor edges of BB exits LOOP. */
  441. static bool
  442. bb_with_exit_edge_p (struct loop *loop, basic_block bb)
  443. {
  444. edge e;
  445. edge_iterator ei;
  446. FOR_EACH_EDGE (e, ei, bb->succs)
  447. if (loop_exit_edge_p (loop, e))
  448. return true;
  449. return false;
  450. }
  451. /* Return true when PHI is if-convertible. PHI is part of loop LOOP
  452. and it belongs to basic block BB.
  453. PHI is not if-convertible if:
  454. - it has more than 2 arguments.
  455. When the flag_tree_loop_if_convert_stores is not set, PHI is not
  456. if-convertible if:
  457. - a virtual PHI is immediately used in another PHI node,
  458. - there is a virtual PHI in a BB other than the loop->header.
  459. When the aggressive_if_conv is set, PHI can have more than
  460. two arguments. */
  461. static bool
  462. if_convertible_phi_p (struct loop *loop, basic_block bb, gphi *phi,
  463. bool any_mask_load_store)
  464. {
  465. if (dump_file && (dump_flags & TDF_DETAILS))
  466. {
  467. fprintf (dump_file, "-------------------------\n");
  468. print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
  469. }
  470. if (bb != loop->header)
  471. {
  472. if (gimple_phi_num_args (phi) != 2
  473. && !aggressive_if_conv)
  474. {
  475. if (dump_file && (dump_flags & TDF_DETAILS))
  476. fprintf (dump_file, "More than two phi node args.\n");
  477. return false;
  478. }
  479. }
  480. if (flag_tree_loop_if_convert_stores || any_mask_load_store)
  481. return true;
  482. /* When the flag_tree_loop_if_convert_stores is not set, check
  483. that there are no memory writes in the branches of the loop to be
  484. if-converted. */
  485. if (virtual_operand_p (gimple_phi_result (phi)))
  486. {
  487. imm_use_iterator imm_iter;
  488. use_operand_p use_p;
  489. if (bb != loop->header)
  490. {
  491. if (dump_file && (dump_flags & TDF_DETAILS))
  492. fprintf (dump_file, "Virtual phi not on loop->header.\n");
  493. return false;
  494. }
  495. FOR_EACH_IMM_USE_FAST (use_p, imm_iter, gimple_phi_result (phi))
  496. {
  497. if (gimple_code (USE_STMT (use_p)) == GIMPLE_PHI)
  498. {
  499. if (dump_file && (dump_flags & TDF_DETAILS))
  500. fprintf (dump_file, "Difficult to handle this virtual phi.\n");
  501. return false;
  502. }
  503. }
  504. }
  505. return true;
  506. }
  507. /* Records the status of a data reference. This struct is attached to
  508. each DR->aux field. */
  509. struct ifc_dr {
  510. /* -1 when not initialized, 0 when false, 1 when true. */
  511. int written_at_least_once;
  512. /* -1 when not initialized, 0 when false, 1 when true. */
  513. int rw_unconditionally;
  514. };
  515. #define IFC_DR(DR) ((struct ifc_dr *) (DR)->aux)
  516. #define DR_WRITTEN_AT_LEAST_ONCE(DR) (IFC_DR (DR)->written_at_least_once)
  517. #define DR_RW_UNCONDITIONALLY(DR) (IFC_DR (DR)->rw_unconditionally)
  518. /* Returns true when the memory references of STMT are read or written
  519. unconditionally. In other words, this function returns true when
  520. for every data reference A in STMT there exist other accesses to
  521. a data reference with the same base with predicates that add up (OR-up) to
  522. the true predicate: this ensures that the data reference A is touched
  523. (read or written) on every iteration of the if-converted loop. */
  524. static bool
  525. memrefs_read_or_written_unconditionally (gimple stmt,
  526. vec<data_reference_p> drs)
  527. {
  528. int i, j;
  529. data_reference_p a, b;
  530. tree ca = bb_predicate (gimple_bb (stmt));
  531. for (i = 0; drs.iterate (i, &a); i++)
  532. if (DR_STMT (a) == stmt)
  533. {
  534. bool found = false;
  535. int x = DR_RW_UNCONDITIONALLY (a);
  536. if (x == 0)
  537. return false;
  538. if (x == 1)
  539. continue;
  540. for (j = 0; drs.iterate (j, &b); j++)
  541. {
  542. tree ref_base_a = DR_REF (a);
  543. tree ref_base_b = DR_REF (b);
  544. if (DR_STMT (b) == stmt)
  545. continue;
  546. while (TREE_CODE (ref_base_a) == COMPONENT_REF
  547. || TREE_CODE (ref_base_a) == IMAGPART_EXPR
  548. || TREE_CODE (ref_base_a) == REALPART_EXPR)
  549. ref_base_a = TREE_OPERAND (ref_base_a, 0);
  550. while (TREE_CODE (ref_base_b) == COMPONENT_REF
  551. || TREE_CODE (ref_base_b) == IMAGPART_EXPR
  552. || TREE_CODE (ref_base_b) == REALPART_EXPR)
  553. ref_base_b = TREE_OPERAND (ref_base_b, 0);
  554. if (operand_equal_p (ref_base_a, ref_base_b, 0))
  555. {
  556. tree cb = bb_predicate (gimple_bb (DR_STMT (b)));
  557. if (DR_RW_UNCONDITIONALLY (b) == 1
  558. || is_true_predicate (cb)
  559. || is_true_predicate (ca
  560. = fold_or_predicates (EXPR_LOCATION (cb), ca, cb)))
  561. {
  562. DR_RW_UNCONDITIONALLY (a) = 1;
  563. DR_RW_UNCONDITIONALLY (b) = 1;
  564. found = true;
  565. break;
  566. }
  567. }
  568. }
  569. if (!found)
  570. {
  571. DR_RW_UNCONDITIONALLY (a) = 0;
  572. return false;
  573. }
  574. }
  575. return true;
  576. }
  577. /* Returns true when the memory references of STMT are unconditionally
  578. written. In other words, this function returns true when for every
  579. data reference A written in STMT, there exist other writes to the
  580. same data reference with predicates that add up (OR-up) to the true
  581. predicate: this ensures that the data reference A is written on
  582. every iteration of the if-converted loop. */
  583. static bool
  584. write_memrefs_written_at_least_once (gimple stmt,
  585. vec<data_reference_p> drs)
  586. {
  587. int i, j;
  588. data_reference_p a, b;
  589. tree ca = bb_predicate (gimple_bb (stmt));
  590. for (i = 0; drs.iterate (i, &a); i++)
  591. if (DR_STMT (a) == stmt
  592. && DR_IS_WRITE (a))
  593. {
  594. bool found = false;
  595. int x = DR_WRITTEN_AT_LEAST_ONCE (a);
  596. if (x == 0)
  597. return false;
  598. if (x == 1)
  599. continue;
  600. for (j = 0; drs.iterate (j, &b); j++)
  601. if (DR_STMT (b) != stmt
  602. && DR_IS_WRITE (b)
  603. && same_data_refs_base_objects (a, b))
  604. {
  605. tree cb = bb_predicate (gimple_bb (DR_STMT (b)));
  606. if (DR_WRITTEN_AT_LEAST_ONCE (b) == 1
  607. || is_true_predicate (cb)
  608. || is_true_predicate (ca = fold_or_predicates (EXPR_LOCATION (cb),
  609. ca, cb)))
  610. {
  611. DR_WRITTEN_AT_LEAST_ONCE (a) = 1;
  612. DR_WRITTEN_AT_LEAST_ONCE (b) = 1;
  613. found = true;
  614. break;
  615. }
  616. }
  617. if (!found)
  618. {
  619. DR_WRITTEN_AT_LEAST_ONCE (a) = 0;
  620. return false;
  621. }
  622. }
  623. return true;
  624. }
  625. /* Return true when the memory references of STMT won't trap in the
  626. if-converted code. There are two things that we have to check for:
  627. - writes to memory occur to writable memory: if-conversion of
  628. memory writes transforms the conditional memory writes into
  629. unconditional writes, i.e. "if (cond) A[i] = foo" is transformed
  630. into "A[i] = cond ? foo : A[i]", and as the write to memory may not
  631. be executed at all in the original code, it may be a readonly
  632. memory. To check that A is not const-qualified, we check that
  633. there exists at least an unconditional write to A in the current
  634. function.
  635. - reads or writes to memory are valid memory accesses for every
  636. iteration. To check that the memory accesses are correctly formed
  637. and that we are allowed to read and write in these locations, we
  638. check that the memory accesses to be if-converted occur at every
  639. iteration unconditionally. */
  640. static bool
  641. ifcvt_memrefs_wont_trap (gimple stmt, vec<data_reference_p> refs)
  642. {
  643. return write_memrefs_written_at_least_once (stmt, refs)
  644. && memrefs_read_or_written_unconditionally (stmt, refs);
  645. }
  646. /* Wrapper around gimple_could_trap_p refined for the needs of the
  647. if-conversion. Try to prove that the memory accesses of STMT could
  648. not trap in the innermost loop containing STMT. */
  649. static bool
  650. ifcvt_could_trap_p (gimple stmt, vec<data_reference_p> refs)
  651. {
  652. if (gimple_vuse (stmt)
  653. && !gimple_could_trap_p_1 (stmt, false, false)
  654. && ifcvt_memrefs_wont_trap (stmt, refs))
  655. return false;
  656. return gimple_could_trap_p (stmt);
  657. }
  658. /* Return true if STMT could be converted into a masked load or store
  659. (conditional load or store based on a mask computed from bb predicate). */
  660. static bool
  661. ifcvt_can_use_mask_load_store (gimple stmt)
  662. {
  663. tree lhs, ref;
  664. machine_mode mode;
  665. basic_block bb = gimple_bb (stmt);
  666. bool is_load;
  667. if (!(flag_tree_loop_vectorize || bb->loop_father->force_vectorize)
  668. || bb->loop_father->dont_vectorize
  669. || !gimple_assign_single_p (stmt)
  670. || gimple_has_volatile_ops (stmt))
  671. return false;
  672. /* Check whether this is a load or store. */
  673. lhs = gimple_assign_lhs (stmt);
  674. if (gimple_store_p (stmt))
  675. {
  676. if (!is_gimple_val (gimple_assign_rhs1 (stmt)))
  677. return false;
  678. is_load = false;
  679. ref = lhs;
  680. }
  681. else if (gimple_assign_load_p (stmt))
  682. {
  683. is_load = true;
  684. ref = gimple_assign_rhs1 (stmt);
  685. }
  686. else
  687. return false;
  688. if (may_be_nonaddressable_p (ref))
  689. return false;
  690. /* Mask should be integer mode of the same size as the load/store
  691. mode. */
  692. mode = TYPE_MODE (TREE_TYPE (lhs));
  693. if (int_mode_for_mode (mode) == BLKmode
  694. || VECTOR_MODE_P (mode))
  695. return false;
  696. if (can_vec_mask_load_store_p (mode, is_load))
  697. return true;
  698. return false;
  699. }
  700. /* Return true when STMT is if-convertible.
  701. GIMPLE_ASSIGN statement is not if-convertible if,
  702. - it is not movable,
  703. - it could trap,
  704. - LHS is not var decl. */
  705. static bool
  706. if_convertible_gimple_assign_stmt_p (gimple stmt,
  707. vec<data_reference_p> refs,
  708. bool *any_mask_load_store)
  709. {
  710. tree lhs = gimple_assign_lhs (stmt);
  711. basic_block bb;
  712. if (dump_file && (dump_flags & TDF_DETAILS))
  713. {
  714. fprintf (dump_file, "-------------------------\n");
  715. print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
  716. }
  717. if (!is_gimple_reg_type (TREE_TYPE (lhs)))
  718. return false;
  719. /* Some of these constrains might be too conservative. */
  720. if (stmt_ends_bb_p (stmt)
  721. || gimple_has_volatile_ops (stmt)
  722. || (TREE_CODE (lhs) == SSA_NAME
  723. && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
  724. || gimple_has_side_effects (stmt))
  725. {
  726. if (dump_file && (dump_flags & TDF_DETAILS))
  727. fprintf (dump_file, "stmt not suitable for ifcvt\n");
  728. return false;
  729. }
  730. /* tree-into-ssa.c uses GF_PLF_1, so avoid it, because
  731. in between if_convertible_loop_p and combine_blocks
  732. we can perform loop versioning. */
  733. gimple_set_plf (stmt, GF_PLF_2, false);
  734. if (flag_tree_loop_if_convert_stores)
  735. {
  736. if (ifcvt_could_trap_p (stmt, refs))
  737. {
  738. if (ifcvt_can_use_mask_load_store (stmt))
  739. {
  740. gimple_set_plf (stmt, GF_PLF_2, true);
  741. *any_mask_load_store = true;
  742. return true;
  743. }
  744. if (dump_file && (dump_flags & TDF_DETAILS))
  745. fprintf (dump_file, "tree could trap...\n");
  746. return false;
  747. }
  748. return true;
  749. }
  750. if (gimple_assign_rhs_could_trap_p (stmt))
  751. {
  752. if (ifcvt_can_use_mask_load_store (stmt))
  753. {
  754. gimple_set_plf (stmt, GF_PLF_2, true);
  755. *any_mask_load_store = true;
  756. return true;
  757. }
  758. if (dump_file && (dump_flags & TDF_DETAILS))
  759. fprintf (dump_file, "tree could trap...\n");
  760. return false;
  761. }
  762. bb = gimple_bb (stmt);
  763. if (TREE_CODE (lhs) != SSA_NAME
  764. && bb != bb->loop_father->header
  765. && !bb_with_exit_edge_p (bb->loop_father, bb))
  766. {
  767. if (ifcvt_can_use_mask_load_store (stmt))
  768. {
  769. gimple_set_plf (stmt, GF_PLF_2, true);
  770. *any_mask_load_store = true;
  771. return true;
  772. }
  773. if (dump_file && (dump_flags & TDF_DETAILS))
  774. {
  775. fprintf (dump_file, "LHS is not var\n");
  776. print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
  777. }
  778. return false;
  779. }
  780. return true;
  781. }
  782. /* Return true when STMT is if-convertible.
  783. A statement is if-convertible if:
  784. - it is an if-convertible GIMPLE_ASSIGN,
  785. - it is a GIMPLE_LABEL or a GIMPLE_COND,
  786. - it is builtins call. */
  787. static bool
  788. if_convertible_stmt_p (gimple stmt, vec<data_reference_p> refs,
  789. bool *any_mask_load_store)
  790. {
  791. switch (gimple_code (stmt))
  792. {
  793. case GIMPLE_LABEL:
  794. case GIMPLE_DEBUG:
  795. case GIMPLE_COND:
  796. return true;
  797. case GIMPLE_ASSIGN:
  798. return if_convertible_gimple_assign_stmt_p (stmt, refs,
  799. any_mask_load_store);
  800. case GIMPLE_CALL:
  801. {
  802. tree fndecl = gimple_call_fndecl (stmt);
  803. if (fndecl)
  804. {
  805. int flags = gimple_call_flags (stmt);
  806. if ((flags & ECF_CONST)
  807. && !(flags & ECF_LOOPING_CONST_OR_PURE)
  808. /* We can only vectorize some builtins at the moment,
  809. so restrict if-conversion to those. */
  810. && DECL_BUILT_IN (fndecl))
  811. return true;
  812. }
  813. return false;
  814. }
  815. default:
  816. /* Don't know what to do with 'em so don't do anything. */
  817. if (dump_file && (dump_flags & TDF_DETAILS))
  818. {
  819. fprintf (dump_file, "don't know what to do\n");
  820. print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
  821. }
  822. return false;
  823. break;
  824. }
  825. return true;
  826. }
  827. /* Assumes that BB has more than 1 predecessors.
  828. Returns false if at least one successor is not on critical edge
  829. and true otherwise. */
  830. static inline bool
  831. all_preds_critical_p (basic_block bb)
  832. {
  833. edge e;
  834. edge_iterator ei;
  835. FOR_EACH_EDGE (e, ei, bb->preds)
  836. if (EDGE_COUNT (e->src->succs) == 1)
  837. return false;
  838. return true;
  839. }
  840. /* Returns true if at least one successor in on critical edge. */
  841. static inline bool
  842. has_pred_critical_p (basic_block bb)
  843. {
  844. edge e;
  845. edge_iterator ei;
  846. FOR_EACH_EDGE (e, ei, bb->preds)
  847. if (EDGE_COUNT (e->src->succs) > 1)
  848. return true;
  849. return false;
  850. }
  851. /* Return true when BB is if-convertible. This routine does not check
  852. basic block's statements and phis.
  853. A basic block is not if-convertible if:
  854. - it is non-empty and it is after the exit block (in BFS order),
  855. - it is after the exit block but before the latch,
  856. - its edges are not normal.
  857. Last restriction is valid if aggressive_if_conv is false.
  858. EXIT_BB is the basic block containing the exit of the LOOP. BB is
  859. inside LOOP. */
  860. static bool
  861. if_convertible_bb_p (struct loop *loop, basic_block bb, basic_block exit_bb)
  862. {
  863. edge e;
  864. edge_iterator ei;
  865. if (dump_file && (dump_flags & TDF_DETAILS))
  866. fprintf (dump_file, "----------[%d]-------------\n", bb->index);
  867. if (EDGE_COUNT (bb->succs) > 2)
  868. return false;
  869. if (EDGE_COUNT (bb->preds) > 2
  870. && !aggressive_if_conv)
  871. return false;
  872. if (exit_bb)
  873. {
  874. if (bb != loop->latch)
  875. {
  876. if (dump_file && (dump_flags & TDF_DETAILS))
  877. fprintf (dump_file, "basic block after exit bb but before latch\n");
  878. return false;
  879. }
  880. else if (!empty_block_p (bb))
  881. {
  882. if (dump_file && (dump_flags & TDF_DETAILS))
  883. fprintf (dump_file, "non empty basic block after exit bb\n");
  884. return false;
  885. }
  886. else if (bb == loop->latch
  887. && bb != exit_bb
  888. && !dominated_by_p (CDI_DOMINATORS, bb, exit_bb))
  889. {
  890. if (dump_file && (dump_flags & TDF_DETAILS))
  891. fprintf (dump_file, "latch is not dominated by exit_block\n");
  892. return false;
  893. }
  894. }
  895. /* Be less adventurous and handle only normal edges. */
  896. FOR_EACH_EDGE (e, ei, bb->succs)
  897. if (e->flags & (EDGE_EH | EDGE_ABNORMAL | EDGE_IRREDUCIBLE_LOOP))
  898. {
  899. if (dump_file && (dump_flags & TDF_DETAILS))
  900. fprintf (dump_file, "Difficult to handle edges\n");
  901. return false;
  902. }
  903. /* At least one incoming edge has to be non-critical as otherwise edge
  904. predicates are not equal to basic-block predicates of the edge
  905. source. This check is skipped if aggressive_if_conv is true. */
  906. if (!aggressive_if_conv
  907. && EDGE_COUNT (bb->preds) > 1
  908. && bb != loop->header
  909. && all_preds_critical_p (bb))
  910. {
  911. if (dump_file && (dump_flags & TDF_DETAILS))
  912. fprintf (dump_file, "only critical predecessors\n");
  913. return false;
  914. }
  915. return true;
  916. }
  917. /* Return true when all predecessor blocks of BB are visited. The
  918. VISITED bitmap keeps track of the visited blocks. */
  919. static bool
  920. pred_blocks_visited_p (basic_block bb, bitmap *visited)
  921. {
  922. edge e;
  923. edge_iterator ei;
  924. FOR_EACH_EDGE (e, ei, bb->preds)
  925. if (!bitmap_bit_p (*visited, e->src->index))
  926. return false;
  927. return true;
  928. }
  929. /* Get body of a LOOP in suitable order for if-conversion. It is
  930. caller's responsibility to deallocate basic block list.
  931. If-conversion suitable order is, breadth first sort (BFS) order
  932. with an additional constraint: select a block only if all its
  933. predecessors are already selected. */
  934. static basic_block *
  935. get_loop_body_in_if_conv_order (const struct loop *loop)
  936. {
  937. basic_block *blocks, *blocks_in_bfs_order;
  938. basic_block bb;
  939. bitmap visited;
  940. unsigned int index = 0;
  941. unsigned int visited_count = 0;
  942. gcc_assert (loop->num_nodes);
  943. gcc_assert (loop->latch != EXIT_BLOCK_PTR_FOR_FN (cfun));
  944. blocks = XCNEWVEC (basic_block, loop->num_nodes);
  945. visited = BITMAP_ALLOC (NULL);
  946. blocks_in_bfs_order = get_loop_body_in_bfs_order (loop);
  947. index = 0;
  948. while (index < loop->num_nodes)
  949. {
  950. bb = blocks_in_bfs_order [index];
  951. if (bb->flags & BB_IRREDUCIBLE_LOOP)
  952. {
  953. free (blocks_in_bfs_order);
  954. BITMAP_FREE (visited);
  955. free (blocks);
  956. return NULL;
  957. }
  958. if (!bitmap_bit_p (visited, bb->index))
  959. {
  960. if (pred_blocks_visited_p (bb, &visited)
  961. || bb == loop->header)
  962. {
  963. /* This block is now visited. */
  964. bitmap_set_bit (visited, bb->index);
  965. blocks[visited_count++] = bb;
  966. }
  967. }
  968. index++;
  969. if (index == loop->num_nodes
  970. && visited_count != loop->num_nodes)
  971. /* Not done yet. */
  972. index = 0;
  973. }
  974. free (blocks_in_bfs_order);
  975. BITMAP_FREE (visited);
  976. return blocks;
  977. }
  978. /* Returns true when the analysis of the predicates for all the basic
  979. blocks in LOOP succeeded.
  980. predicate_bbs first allocates the predicates of the basic blocks.
  981. These fields are then initialized with the tree expressions
  982. representing the predicates under which a basic block is executed
  983. in the LOOP. As the loop->header is executed at each iteration, it
  984. has the "true" predicate. Other statements executed under a
  985. condition are predicated with that condition, for example
  986. | if (x)
  987. | S1;
  988. | else
  989. | S2;
  990. S1 will be predicated with "x", and
  991. S2 will be predicated with "!x". */
  992. static void
  993. predicate_bbs (loop_p loop)
  994. {
  995. unsigned int i;
  996. for (i = 0; i < loop->num_nodes; i++)
  997. init_bb_predicate (ifc_bbs[i]);
  998. for (i = 0; i < loop->num_nodes; i++)
  999. {
  1000. basic_block bb = ifc_bbs[i];
  1001. tree cond;
  1002. gimple stmt;
  1003. /* The loop latch and loop exit block are always executed and
  1004. have no extra conditions to be processed: skip them. */
  1005. if (bb == loop->latch
  1006. || bb_with_exit_edge_p (loop, bb))
  1007. {
  1008. reset_bb_predicate (bb);
  1009. continue;
  1010. }
  1011. cond = bb_predicate (bb);
  1012. stmt = last_stmt (bb);
  1013. if (stmt && gimple_code (stmt) == GIMPLE_COND)
  1014. {
  1015. tree c2;
  1016. edge true_edge, false_edge;
  1017. location_t loc = gimple_location (stmt);
  1018. tree c = build2_loc (loc, gimple_cond_code (stmt),
  1019. boolean_type_node,
  1020. gimple_cond_lhs (stmt),
  1021. gimple_cond_rhs (stmt));
  1022. /* Add new condition into destination's predicate list. */
  1023. extract_true_false_edges_from_block (gimple_bb (stmt),
  1024. &true_edge, &false_edge);
  1025. /* If C is true, then TRUE_EDGE is taken. */
  1026. add_to_dst_predicate_list (loop, true_edge, unshare_expr (cond),
  1027. unshare_expr (c));
  1028. /* If C is false, then FALSE_EDGE is taken. */
  1029. c2 = build1_loc (loc, TRUTH_NOT_EXPR, boolean_type_node,
  1030. unshare_expr (c));
  1031. add_to_dst_predicate_list (loop, false_edge,
  1032. unshare_expr (cond), c2);
  1033. cond = NULL_TREE;
  1034. }
  1035. /* If current bb has only one successor, then consider it as an
  1036. unconditional goto. */
  1037. if (single_succ_p (bb))
  1038. {
  1039. basic_block bb_n = single_succ (bb);
  1040. /* The successor bb inherits the predicate of its
  1041. predecessor. If there is no predicate in the predecessor
  1042. bb, then consider the successor bb as always executed. */
  1043. if (cond == NULL_TREE)
  1044. cond = boolean_true_node;
  1045. add_to_predicate_list (loop, bb_n, cond);
  1046. }
  1047. }
  1048. /* The loop header is always executed. */
  1049. reset_bb_predicate (loop->header);
  1050. gcc_assert (bb_predicate_gimplified_stmts (loop->header) == NULL
  1051. && bb_predicate_gimplified_stmts (loop->latch) == NULL);
  1052. }
  1053. /* Return true when LOOP is if-convertible. This is a helper function
  1054. for if_convertible_loop_p. REFS and DDRS are initialized and freed
  1055. in if_convertible_loop_p. */
  1056. static bool
  1057. if_convertible_loop_p_1 (struct loop *loop,
  1058. vec<loop_p> *loop_nest,
  1059. vec<data_reference_p> *refs,
  1060. vec<ddr_p> *ddrs, bool *any_mask_load_store)
  1061. {
  1062. bool res;
  1063. unsigned int i;
  1064. basic_block exit_bb = NULL;
  1065. /* Don't if-convert the loop when the data dependences cannot be
  1066. computed: the loop won't be vectorized in that case. */
  1067. res = compute_data_dependences_for_loop (loop, true, loop_nest, refs, ddrs);
  1068. if (!res)
  1069. return false;
  1070. calculate_dominance_info (CDI_DOMINATORS);
  1071. calculate_dominance_info (CDI_POST_DOMINATORS);
  1072. /* Allow statements that can be handled during if-conversion. */
  1073. ifc_bbs = get_loop_body_in_if_conv_order (loop);
  1074. if (!ifc_bbs)
  1075. {
  1076. if (dump_file && (dump_flags & TDF_DETAILS))
  1077. fprintf (dump_file, "Irreducible loop\n");
  1078. return false;
  1079. }
  1080. for (i = 0; i < loop->num_nodes; i++)
  1081. {
  1082. basic_block bb = ifc_bbs[i];
  1083. if (!if_convertible_bb_p (loop, bb, exit_bb))
  1084. return false;
  1085. if (bb_with_exit_edge_p (loop, bb))
  1086. exit_bb = bb;
  1087. }
  1088. for (i = 0; i < loop->num_nodes; i++)
  1089. {
  1090. basic_block bb = ifc_bbs[i];
  1091. gimple_stmt_iterator gsi;
  1092. for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
  1093. switch (gimple_code (gsi_stmt (gsi)))
  1094. {
  1095. case GIMPLE_LABEL:
  1096. case GIMPLE_ASSIGN:
  1097. case GIMPLE_CALL:
  1098. case GIMPLE_DEBUG:
  1099. case GIMPLE_COND:
  1100. break;
  1101. default:
  1102. return false;
  1103. }
  1104. }
  1105. if (flag_tree_loop_if_convert_stores)
  1106. {
  1107. data_reference_p dr;
  1108. for (i = 0; refs->iterate (i, &dr); i++)
  1109. {
  1110. dr->aux = XNEW (struct ifc_dr);
  1111. DR_WRITTEN_AT_LEAST_ONCE (dr) = -1;
  1112. DR_RW_UNCONDITIONALLY (dr) = -1;
  1113. }
  1114. predicate_bbs (loop);
  1115. }
  1116. for (i = 0; i < loop->num_nodes; i++)
  1117. {
  1118. basic_block bb = ifc_bbs[i];
  1119. gimple_stmt_iterator itr;
  1120. /* Check the if-convertibility of statements in predicated BBs. */
  1121. if (!dominated_by_p (CDI_DOMINATORS, loop->latch, bb))
  1122. for (itr = gsi_start_bb (bb); !gsi_end_p (itr); gsi_next (&itr))
  1123. if (!if_convertible_stmt_p (gsi_stmt (itr), *refs,
  1124. any_mask_load_store))
  1125. return false;
  1126. }
  1127. if (flag_tree_loop_if_convert_stores)
  1128. for (i = 0; i < loop->num_nodes; i++)
  1129. free_bb_predicate (ifc_bbs[i]);
  1130. /* Checking PHIs needs to be done after stmts, as the fact whether there
  1131. are any masked loads or stores affects the tests. */
  1132. for (i = 0; i < loop->num_nodes; i++)
  1133. {
  1134. basic_block bb = ifc_bbs[i];
  1135. gphi_iterator itr;
  1136. for (itr = gsi_start_phis (bb); !gsi_end_p (itr); gsi_next (&itr))
  1137. if (!if_convertible_phi_p (loop, bb, itr.phi (),
  1138. *any_mask_load_store))
  1139. return false;
  1140. }
  1141. if (dump_file)
  1142. fprintf (dump_file, "Applying if-conversion\n");
  1143. return true;
  1144. }
  1145. /* Return true when LOOP is if-convertible.
  1146. LOOP is if-convertible if:
  1147. - it is innermost,
  1148. - it has two or more basic blocks,
  1149. - it has only one exit,
  1150. - loop header is not the exit edge,
  1151. - if its basic blocks and phi nodes are if convertible. */
  1152. static bool
  1153. if_convertible_loop_p (struct loop *loop, bool *any_mask_load_store)
  1154. {
  1155. edge e;
  1156. edge_iterator ei;
  1157. bool res = false;
  1158. vec<data_reference_p> refs;
  1159. vec<ddr_p> ddrs;
  1160. /* Handle only innermost loop. */
  1161. if (!loop || loop->inner)
  1162. {
  1163. if (dump_file && (dump_flags & TDF_DETAILS))
  1164. fprintf (dump_file, "not innermost loop\n");
  1165. return false;
  1166. }
  1167. /* If only one block, no need for if-conversion. */
  1168. if (loop->num_nodes <= 2)
  1169. {
  1170. if (dump_file && (dump_flags & TDF_DETAILS))
  1171. fprintf (dump_file, "less than 2 basic blocks\n");
  1172. return false;
  1173. }
  1174. /* More than one loop exit is too much to handle. */
  1175. if (!single_exit (loop))
  1176. {
  1177. if (dump_file && (dump_flags & TDF_DETAILS))
  1178. fprintf (dump_file, "multiple exits\n");
  1179. return false;
  1180. }
  1181. /* If one of the loop header's edge is an exit edge then do not
  1182. apply if-conversion. */
  1183. FOR_EACH_EDGE (e, ei, loop->header->succs)
  1184. if (loop_exit_edge_p (loop, e))
  1185. return false;
  1186. refs.create (5);
  1187. ddrs.create (25);
  1188. auto_vec<loop_p, 3> loop_nest;
  1189. res = if_convertible_loop_p_1 (loop, &loop_nest, &refs, &ddrs,
  1190. any_mask_load_store);
  1191. if (flag_tree_loop_if_convert_stores)
  1192. {
  1193. data_reference_p dr;
  1194. unsigned int i;
  1195. for (i = 0; refs.iterate (i, &dr); i++)
  1196. free (dr->aux);
  1197. }
  1198. free_data_refs (refs);
  1199. free_dependence_relations (ddrs);
  1200. return res;
  1201. }
  1202. /* Returns true if def-stmt for phi argument ARG is simple increment/decrement
  1203. which is in predicated basic block.
  1204. In fact, the following PHI pattern is searching:
  1205. loop-header:
  1206. reduc_1 = PHI <..., reduc_2>
  1207. ...
  1208. if (...)
  1209. reduc_3 = ...
  1210. reduc_2 = PHI <reduc_1, reduc_3>
  1211. ARG_0 and ARG_1 are correspondent PHI arguments.
  1212. REDUC, OP0 and OP1 contain reduction stmt and its operands.
  1213. EXTENDED is true if PHI has > 2 arguments. */
  1214. static bool
  1215. is_cond_scalar_reduction (gimple phi, gimple *reduc, tree arg_0, tree arg_1,
  1216. tree *op0, tree *op1, bool extended)
  1217. {
  1218. tree lhs, r_op1, r_op2;
  1219. gimple stmt;
  1220. gimple header_phi = NULL;
  1221. enum tree_code reduction_op;
  1222. basic_block bb = gimple_bb (phi);
  1223. struct loop *loop = bb->loop_father;
  1224. edge latch_e = loop_latch_edge (loop);
  1225. imm_use_iterator imm_iter;
  1226. use_operand_p use_p;
  1227. edge e;
  1228. edge_iterator ei;
  1229. bool result = false;
  1230. if (TREE_CODE (arg_0) != SSA_NAME || TREE_CODE (arg_1) != SSA_NAME)
  1231. return false;
  1232. if (!extended && gimple_code (SSA_NAME_DEF_STMT (arg_0)) == GIMPLE_PHI)
  1233. {
  1234. lhs = arg_1;
  1235. header_phi = SSA_NAME_DEF_STMT (arg_0);
  1236. stmt = SSA_NAME_DEF_STMT (arg_1);
  1237. }
  1238. else if (gimple_code (SSA_NAME_DEF_STMT (arg_1)) == GIMPLE_PHI)
  1239. {
  1240. lhs = arg_0;
  1241. header_phi = SSA_NAME_DEF_STMT (arg_1);
  1242. stmt = SSA_NAME_DEF_STMT (arg_0);
  1243. }
  1244. else
  1245. return false;
  1246. if (gimple_bb (header_phi) != loop->header)
  1247. return false;
  1248. if (PHI_ARG_DEF_FROM_EDGE (header_phi, latch_e) != PHI_RESULT (phi))
  1249. return false;
  1250. if (gimple_code (stmt) != GIMPLE_ASSIGN
  1251. || gimple_has_volatile_ops (stmt))
  1252. return false;
  1253. if (!flow_bb_inside_loop_p (loop, gimple_bb (stmt)))
  1254. return false;
  1255. if (!is_predicated (gimple_bb (stmt)))
  1256. return false;
  1257. /* Check that stmt-block is predecessor of phi-block. */
  1258. FOR_EACH_EDGE (e, ei, gimple_bb (stmt)->succs)
  1259. if (e->dest == bb)
  1260. {
  1261. result = true;
  1262. break;
  1263. }
  1264. if (!result)
  1265. return false;
  1266. if (!has_single_use (lhs))
  1267. return false;
  1268. reduction_op = gimple_assign_rhs_code (stmt);
  1269. if (reduction_op != PLUS_EXPR && reduction_op != MINUS_EXPR)
  1270. return false;
  1271. r_op1 = gimple_assign_rhs1 (stmt);
  1272. r_op2 = gimple_assign_rhs2 (stmt);
  1273. /* Make R_OP1 to hold reduction variable. */
  1274. if (r_op2 == PHI_RESULT (header_phi)
  1275. && reduction_op == PLUS_EXPR)
  1276. {
  1277. tree tmp = r_op1;
  1278. r_op1 = r_op2;
  1279. r_op2 = tmp;
  1280. }
  1281. else if (r_op1 != PHI_RESULT (header_phi))
  1282. return false;
  1283. /* Check that R_OP1 is used in reduction stmt or in PHI only. */
  1284. FOR_EACH_IMM_USE_FAST (use_p, imm_iter, r_op1)
  1285. {
  1286. gimple use_stmt = USE_STMT (use_p);
  1287. if (is_gimple_debug (use_stmt))
  1288. continue;
  1289. if (use_stmt == stmt)
  1290. continue;
  1291. if (gimple_code (use_stmt) != GIMPLE_PHI)
  1292. return false;
  1293. }
  1294. *op0 = r_op1; *op1 = r_op2;
  1295. *reduc = stmt;
  1296. return true;
  1297. }
  1298. /* Converts conditional scalar reduction into unconditional form, e.g.
  1299. bb_4
  1300. if (_5 != 0) goto bb_5 else goto bb_6
  1301. end_bb_4
  1302. bb_5
  1303. res_6 = res_13 + 1;
  1304. end_bb_5
  1305. bb_6
  1306. # res_2 = PHI <res_13(4), res_6(5)>
  1307. end_bb_6
  1308. will be converted into sequence
  1309. _ifc__1 = _5 != 0 ? 1 : 0;
  1310. res_2 = res_13 + _ifc__1;
  1311. Argument SWAP tells that arguments of conditional expression should be
  1312. swapped.
  1313. Returns rhs of resulting PHI assignment. */
  1314. static tree
  1315. convert_scalar_cond_reduction (gimple reduc, gimple_stmt_iterator *gsi,
  1316. tree cond, tree op0, tree op1, bool swap)
  1317. {
  1318. gimple_stmt_iterator stmt_it;
  1319. gimple new_assign;
  1320. tree rhs;
  1321. tree rhs1 = gimple_assign_rhs1 (reduc);
  1322. tree tmp = make_temp_ssa_name (TREE_TYPE (rhs1), NULL, "_ifc_");
  1323. tree c;
  1324. tree zero = build_zero_cst (TREE_TYPE (rhs1));
  1325. if (dump_file && (dump_flags & TDF_DETAILS))
  1326. {
  1327. fprintf (dump_file, "Found cond scalar reduction.\n");
  1328. print_gimple_stmt (dump_file, reduc, 0, TDF_SLIM);
  1329. }
  1330. /* Build cond expression using COND and constant operand
  1331. of reduction rhs. */
  1332. c = fold_build_cond_expr (TREE_TYPE (rhs1),
  1333. unshare_expr (cond),
  1334. swap ? zero : op1,
  1335. swap ? op1 : zero);
  1336. /* Create assignment stmt and insert it at GSI. */
  1337. new_assign = gimple_build_assign (tmp, c);
  1338. gsi_insert_before (gsi, new_assign, GSI_SAME_STMT);
  1339. /* Build rhs for unconditional increment/decrement. */
  1340. rhs = fold_build2 (gimple_assign_rhs_code (reduc),
  1341. TREE_TYPE (rhs1), op0, tmp);
  1342. /* Delete original reduction stmt. */
  1343. stmt_it = gsi_for_stmt (reduc);
  1344. gsi_remove (&stmt_it, true);
  1345. release_defs (reduc);
  1346. return rhs;
  1347. }
  1348. /* Helpers for PHI arguments hashtable map. */
  1349. struct phi_args_hash_traits : default_hashmap_traits
  1350. {
  1351. static inline hashval_t hash (tree);
  1352. static inline bool equal_keys (tree, tree);
  1353. };
  1354. inline hashval_t
  1355. phi_args_hash_traits::hash (tree value)
  1356. {
  1357. return iterative_hash_expr (value, 0);
  1358. }
  1359. inline bool
  1360. phi_args_hash_traits::equal_keys (tree value1, tree value2)
  1361. {
  1362. return operand_equal_p (value1, value2, 0);
  1363. }
  1364. /* Produce condition for all occurrences of ARG in PHI node. */
  1365. static tree
  1366. gen_phi_arg_condition (gphi *phi, vec<int> *occur,
  1367. gimple_stmt_iterator *gsi)
  1368. {
  1369. int len;
  1370. int i;
  1371. tree cond = NULL_TREE;
  1372. tree c;
  1373. edge e;
  1374. len = occur->length ();
  1375. gcc_assert (len > 0);
  1376. for (i = 0; i < len; i++)
  1377. {
  1378. e = gimple_phi_arg_edge (phi, (*occur)[i]);
  1379. c = bb_predicate (e->src);
  1380. if (is_true_predicate (c))
  1381. continue;
  1382. c = force_gimple_operand_gsi_1 (gsi, unshare_expr (c),
  1383. is_gimple_condexpr, NULL_TREE,
  1384. true, GSI_SAME_STMT);
  1385. if (cond != NULL_TREE)
  1386. {
  1387. /* Must build OR expression. */
  1388. cond = fold_or_predicates (EXPR_LOCATION (c), c, cond);
  1389. cond = force_gimple_operand_gsi_1 (gsi, unshare_expr (cond),
  1390. is_gimple_condexpr, NULL_TREE,
  1391. true, GSI_SAME_STMT);
  1392. }
  1393. else
  1394. cond = c;
  1395. }
  1396. gcc_assert (cond != NULL_TREE);
  1397. return cond;
  1398. }
  1399. /* Replace a scalar PHI node with a COND_EXPR using COND as condition.
  1400. This routine can handle PHI nodes with more than two arguments.
  1401. For example,
  1402. S1: A = PHI <x1(1), x2(5)>
  1403. is converted into,
  1404. S2: A = cond ? x1 : x2;
  1405. The generated code is inserted at GSI that points to the top of
  1406. basic block's statement list.
  1407. If PHI node has more than two arguments a chain of conditional
  1408. expression is produced. */
  1409. static void
  1410. predicate_scalar_phi (gphi *phi, gimple_stmt_iterator *gsi)
  1411. {
  1412. gimple new_stmt = NULL, reduc;
  1413. tree rhs, res, arg0, arg1, op0, op1, scev;
  1414. tree cond;
  1415. unsigned int index0;
  1416. unsigned int max, args_len;
  1417. edge e;
  1418. basic_block bb;
  1419. unsigned int i;
  1420. res = gimple_phi_result (phi);
  1421. if (virtual_operand_p (res))
  1422. return;
  1423. if ((rhs = degenerate_phi_result (phi))
  1424. || ((scev = analyze_scalar_evolution (gimple_bb (phi)->loop_father,
  1425. res))
  1426. && !chrec_contains_undetermined (scev)
  1427. && scev != res
  1428. && (rhs = gimple_phi_arg_def (phi, 0))))
  1429. {
  1430. if (dump_file && (dump_flags & TDF_DETAILS))
  1431. {
  1432. fprintf (dump_file, "Degenerate phi!\n");
  1433. print_gimple_stmt (dump_file, phi, 0, TDF_SLIM);
  1434. }
  1435. new_stmt = gimple_build_assign (res, rhs);
  1436. gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
  1437. update_stmt (new_stmt);
  1438. return;
  1439. }
  1440. bb = gimple_bb (phi);
  1441. if (EDGE_COUNT (bb->preds) == 2)
  1442. {
  1443. /* Predicate ordinary PHI node with 2 arguments. */
  1444. edge first_edge, second_edge;
  1445. basic_block true_bb;
  1446. first_edge = EDGE_PRED (bb, 0);
  1447. second_edge = EDGE_PRED (bb, 1);
  1448. cond = bb_predicate (first_edge->src);
  1449. if (TREE_CODE (cond) == TRUTH_NOT_EXPR)
  1450. {
  1451. edge tmp_edge = first_edge;
  1452. first_edge = second_edge;
  1453. second_edge = tmp_edge;
  1454. }
  1455. if (EDGE_COUNT (first_edge->src->succs) > 1)
  1456. {
  1457. cond = bb_predicate (second_edge->src);
  1458. if (TREE_CODE (cond) == TRUTH_NOT_EXPR)
  1459. cond = TREE_OPERAND (cond, 0);
  1460. else
  1461. first_edge = second_edge;
  1462. }
  1463. else
  1464. cond = bb_predicate (first_edge->src);
  1465. /* Gimplify the condition to a valid cond-expr conditonal operand. */
  1466. cond = force_gimple_operand_gsi_1 (gsi, unshare_expr (cond),
  1467. is_gimple_condexpr, NULL_TREE,
  1468. true, GSI_SAME_STMT);
  1469. true_bb = first_edge->src;
  1470. if (EDGE_PRED (bb, 1)->src == true_bb)
  1471. {
  1472. arg0 = gimple_phi_arg_def (phi, 1);
  1473. arg1 = gimple_phi_arg_def (phi, 0);
  1474. }
  1475. else
  1476. {
  1477. arg0 = gimple_phi_arg_def (phi, 0);
  1478. arg1 = gimple_phi_arg_def (phi, 1);
  1479. }
  1480. if (is_cond_scalar_reduction (phi, &reduc, arg0, arg1,
  1481. &op0, &op1, false))
  1482. /* Convert reduction stmt into vectorizable form. */
  1483. rhs = convert_scalar_cond_reduction (reduc, gsi, cond, op0, op1,
  1484. true_bb != gimple_bb (reduc));
  1485. else
  1486. /* Build new RHS using selected condition and arguments. */
  1487. rhs = fold_build_cond_expr (TREE_TYPE (res), unshare_expr (cond),
  1488. arg0, arg1);
  1489. new_stmt = gimple_build_assign (res, rhs);
  1490. gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
  1491. update_stmt (new_stmt);
  1492. if (dump_file && (dump_flags & TDF_DETAILS))
  1493. {
  1494. fprintf (dump_file, "new phi replacement stmt\n");
  1495. print_gimple_stmt (dump_file, new_stmt, 0, TDF_SLIM);
  1496. }
  1497. return;
  1498. }
  1499. /* Create hashmap for PHI node which contain vector of argument indexes
  1500. having the same value. */
  1501. bool swap = false;
  1502. hash_map<tree, auto_vec<int>, phi_args_hash_traits> phi_arg_map;
  1503. unsigned int num_args = gimple_phi_num_args (phi);
  1504. int max_ind = -1;
  1505. /* Vector of different PHI argument values. */
  1506. auto_vec<tree> args (num_args);
  1507. /* Compute phi_arg_map. */
  1508. for (i = 0; i < num_args; i++)
  1509. {
  1510. tree arg;
  1511. arg = gimple_phi_arg_def (phi, i);
  1512. if (!phi_arg_map.get (arg))
  1513. args.quick_push (arg);
  1514. phi_arg_map.get_or_insert (arg).safe_push (i);
  1515. }
  1516. /* Determine element with max number of occurrences. */
  1517. max_ind = -1;
  1518. max = 1;
  1519. args_len = args.length ();
  1520. for (i = 0; i < args_len; i++)
  1521. {
  1522. unsigned int len;
  1523. if ((len = phi_arg_map.get (args[i])->length ()) > max)
  1524. {
  1525. max_ind = (int) i;
  1526. max = len;
  1527. }
  1528. }
  1529. /* Put element with max number of occurences to the end of ARGS. */
  1530. if (max_ind != -1 && max_ind +1 != (int) args_len)
  1531. {
  1532. tree tmp = args[args_len - 1];
  1533. args[args_len - 1] = args[max_ind];
  1534. args[max_ind] = tmp;
  1535. }
  1536. /* Handle one special case when number of arguments with different values
  1537. is equal 2 and one argument has the only occurrence. Such PHI can be
  1538. handled as if would have only 2 arguments. */
  1539. if (args_len == 2 && phi_arg_map.get (args[0])->length () == 1)
  1540. {
  1541. vec<int> *indexes;
  1542. indexes = phi_arg_map.get (args[0]);
  1543. index0 = (*indexes)[0];
  1544. arg0 = args[0];
  1545. arg1 = args[1];
  1546. e = gimple_phi_arg_edge (phi, index0);
  1547. cond = bb_predicate (e->src);
  1548. if (TREE_CODE (cond) == TRUTH_NOT_EXPR)
  1549. {
  1550. swap = true;
  1551. cond = TREE_OPERAND (cond, 0);
  1552. }
  1553. /* Gimplify the condition to a valid cond-expr conditonal operand. */
  1554. cond = force_gimple_operand_gsi_1 (gsi, unshare_expr (cond),
  1555. is_gimple_condexpr, NULL_TREE,
  1556. true, GSI_SAME_STMT);
  1557. if (!(is_cond_scalar_reduction (phi, &reduc, arg0 , arg1,
  1558. &op0, &op1, true)))
  1559. rhs = fold_build_cond_expr (TREE_TYPE (res), unshare_expr (cond),
  1560. swap? arg1 : arg0,
  1561. swap? arg0 : arg1);
  1562. else
  1563. /* Convert reduction stmt into vectorizable form. */
  1564. rhs = convert_scalar_cond_reduction (reduc, gsi, cond, op0, op1,
  1565. swap);
  1566. new_stmt = gimple_build_assign (res, rhs);
  1567. gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
  1568. update_stmt (new_stmt);
  1569. }
  1570. else
  1571. {
  1572. /* Common case. */
  1573. vec<int> *indexes;
  1574. tree type = TREE_TYPE (gimple_phi_result (phi));
  1575. tree lhs;
  1576. arg1 = args[1];
  1577. for (i = 0; i < args_len; i++)
  1578. {
  1579. arg0 = args[i];
  1580. indexes = phi_arg_map.get (args[i]);
  1581. if (i != args_len - 1)
  1582. lhs = make_temp_ssa_name (type, NULL, "_ifc_");
  1583. else
  1584. lhs = res;
  1585. cond = gen_phi_arg_condition (phi, indexes, gsi);
  1586. rhs = fold_build_cond_expr (type, unshare_expr (cond),
  1587. arg0, arg1);
  1588. new_stmt = gimple_build_assign (lhs, rhs);
  1589. gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
  1590. update_stmt (new_stmt);
  1591. arg1 = lhs;
  1592. }
  1593. }
  1594. if (dump_file && (dump_flags & TDF_DETAILS))
  1595. {
  1596. fprintf (dump_file, "new extended phi replacement stmt\n");
  1597. print_gimple_stmt (dump_file, new_stmt, 0, TDF_SLIM);
  1598. }
  1599. }
  1600. /* Replaces in LOOP all the scalar phi nodes other than those in the
  1601. LOOP->header block with conditional modify expressions. */
  1602. static void
  1603. predicate_all_scalar_phis (struct loop *loop)
  1604. {
  1605. basic_block bb;
  1606. unsigned int orig_loop_num_nodes = loop->num_nodes;
  1607. unsigned int i;
  1608. for (i = 1; i < orig_loop_num_nodes; i++)
  1609. {
  1610. gphi *phi;
  1611. gimple_stmt_iterator gsi;
  1612. gphi_iterator phi_gsi;
  1613. bb = ifc_bbs[i];
  1614. if (bb == loop->header)
  1615. continue;
  1616. if (EDGE_COUNT (bb->preds) == 1)
  1617. continue;
  1618. phi_gsi = gsi_start_phis (bb);
  1619. if (gsi_end_p (phi_gsi))
  1620. continue;
  1621. gsi = gsi_after_labels (bb);
  1622. while (!gsi_end_p (phi_gsi))
  1623. {
  1624. phi = phi_gsi.phi ();
  1625. predicate_scalar_phi (phi, &gsi);
  1626. release_phi_node (phi);
  1627. gsi_next (&phi_gsi);
  1628. }
  1629. set_phi_nodes (bb, NULL);
  1630. }
  1631. }
  1632. /* Insert in each basic block of LOOP the statements produced by the
  1633. gimplification of the predicates. */
  1634. static void
  1635. insert_gimplified_predicates (loop_p loop, bool any_mask_load_store)
  1636. {
  1637. unsigned int i;
  1638. for (i = 0; i < loop->num_nodes; i++)
  1639. {
  1640. basic_block bb = ifc_bbs[i];
  1641. gimple_seq stmts;
  1642. if (!is_predicated (bb))
  1643. gcc_assert (bb_predicate_gimplified_stmts (bb) == NULL);
  1644. if (!is_predicated (bb))
  1645. {
  1646. /* Do not insert statements for a basic block that is not
  1647. predicated. Also make sure that the predicate of the
  1648. basic block is set to true. */
  1649. reset_bb_predicate (bb);
  1650. continue;
  1651. }
  1652. stmts = bb_predicate_gimplified_stmts (bb);
  1653. if (stmts)
  1654. {
  1655. if (flag_tree_loop_if_convert_stores
  1656. || any_mask_load_store)
  1657. {
  1658. /* Insert the predicate of the BB just after the label,
  1659. as the if-conversion of memory writes will use this
  1660. predicate. */
  1661. gimple_stmt_iterator gsi = gsi_after_labels (bb);
  1662. gsi_insert_seq_before (&gsi, stmts, GSI_SAME_STMT);
  1663. }
  1664. else
  1665. {
  1666. /* Insert the predicate of the BB at the end of the BB
  1667. as this would reduce the register pressure: the only
  1668. use of this predicate will be in successor BBs. */
  1669. gimple_stmt_iterator gsi = gsi_last_bb (bb);
  1670. if (gsi_end_p (gsi)
  1671. || stmt_ends_bb_p (gsi_stmt (gsi)))
  1672. gsi_insert_seq_before (&gsi, stmts, GSI_SAME_STMT);
  1673. else
  1674. gsi_insert_seq_after (&gsi, stmts, GSI_SAME_STMT);
  1675. }
  1676. /* Once the sequence is code generated, set it to NULL. */
  1677. set_bb_predicate_gimplified_stmts (bb, NULL);
  1678. }
  1679. }
  1680. }
  1681. /* Helper function for predicate_mem_writes. Returns index of existent
  1682. mask if it was created for given SIZE and -1 otherwise. */
  1683. static int
  1684. mask_exists (int size, vec<int> vec)
  1685. {
  1686. unsigned int ix;
  1687. int v;
  1688. FOR_EACH_VEC_ELT (vec, ix, v)
  1689. if (v == size)
  1690. return (int) ix;
  1691. return -1;
  1692. }
  1693. /* Predicate each write to memory in LOOP.
  1694. This function transforms control flow constructs containing memory
  1695. writes of the form:
  1696. | for (i = 0; i < N; i++)
  1697. | if (cond)
  1698. | A[i] = expr;
  1699. into the following form that does not contain control flow:
  1700. | for (i = 0; i < N; i++)
  1701. | A[i] = cond ? expr : A[i];
  1702. The original CFG looks like this:
  1703. | bb_0
  1704. | i = 0
  1705. | end_bb_0
  1706. |
  1707. | bb_1
  1708. | if (i < N) goto bb_5 else goto bb_2
  1709. | end_bb_1
  1710. |
  1711. | bb_2
  1712. | cond = some_computation;
  1713. | if (cond) goto bb_3 else goto bb_4
  1714. | end_bb_2
  1715. |
  1716. | bb_3
  1717. | A[i] = expr;
  1718. | goto bb_4
  1719. | end_bb_3
  1720. |
  1721. | bb_4
  1722. | goto bb_1
  1723. | end_bb_4
  1724. insert_gimplified_predicates inserts the computation of the COND
  1725. expression at the beginning of the destination basic block:
  1726. | bb_0
  1727. | i = 0
  1728. | end_bb_0
  1729. |
  1730. | bb_1
  1731. | if (i < N) goto bb_5 else goto bb_2
  1732. | end_bb_1
  1733. |
  1734. | bb_2
  1735. | cond = some_computation;
  1736. | if (cond) goto bb_3 else goto bb_4
  1737. | end_bb_2
  1738. |
  1739. | bb_3
  1740. | cond = some_computation;
  1741. | A[i] = expr;
  1742. | goto bb_4
  1743. | end_bb_3
  1744. |
  1745. | bb_4
  1746. | goto bb_1
  1747. | end_bb_4
  1748. predicate_mem_writes is then predicating the memory write as follows:
  1749. | bb_0
  1750. | i = 0
  1751. | end_bb_0
  1752. |
  1753. | bb_1
  1754. | if (i < N) goto bb_5 else goto bb_2
  1755. | end_bb_1
  1756. |
  1757. | bb_2
  1758. | if (cond) goto bb_3 else goto bb_4
  1759. | end_bb_2
  1760. |
  1761. | bb_3
  1762. | cond = some_computation;
  1763. | A[i] = cond ? expr : A[i];
  1764. | goto bb_4
  1765. | end_bb_3
  1766. |
  1767. | bb_4
  1768. | goto bb_1
  1769. | end_bb_4
  1770. and finally combine_blocks removes the basic block boundaries making
  1771. the loop vectorizable:
  1772. | bb_0
  1773. | i = 0
  1774. | if (i < N) goto bb_5 else goto bb_1
  1775. | end_bb_0
  1776. |
  1777. | bb_1
  1778. | cond = some_computation;
  1779. | A[i] = cond ? expr : A[i];
  1780. | if (i < N) goto bb_5 else goto bb_4
  1781. | end_bb_1
  1782. |
  1783. | bb_4
  1784. | goto bb_1
  1785. | end_bb_4
  1786. */
  1787. static void
  1788. predicate_mem_writes (loop_p loop)
  1789. {
  1790. unsigned int i, orig_loop_num_nodes = loop->num_nodes;
  1791. auto_vec<int, 1> vect_sizes;
  1792. auto_vec<tree, 1> vect_masks;
  1793. for (i = 1; i < orig_loop_num_nodes; i++)
  1794. {
  1795. gimple_stmt_iterator gsi;
  1796. basic_block bb = ifc_bbs[i];
  1797. tree cond = bb_predicate (bb);
  1798. bool swap;
  1799. gimple stmt;
  1800. int index;
  1801. if (is_true_predicate (cond))
  1802. continue;
  1803. swap = false;
  1804. if (TREE_CODE (cond) == TRUTH_NOT_EXPR)
  1805. {
  1806. swap = true;
  1807. cond = TREE_OPERAND (cond, 0);
  1808. }
  1809. vect_sizes.truncate (0);
  1810. vect_masks.truncate (0);
  1811. for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
  1812. if (!gimple_assign_single_p (stmt = gsi_stmt (gsi)))
  1813. continue;
  1814. else if (gimple_plf (stmt, GF_PLF_2))
  1815. {
  1816. tree lhs = gimple_assign_lhs (stmt);
  1817. tree rhs = gimple_assign_rhs1 (stmt);
  1818. tree ref, addr, ptr, masktype, mask_op0, mask_op1, mask;
  1819. gimple new_stmt;
  1820. int bitsize = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (lhs)));
  1821. ref = TREE_CODE (lhs) == SSA_NAME ? rhs : lhs;
  1822. mark_addressable (ref);
  1823. addr = force_gimple_operand_gsi (&gsi, build_fold_addr_expr (ref),
  1824. true, NULL_TREE, true,
  1825. GSI_SAME_STMT);
  1826. if (!vect_sizes.is_empty ()
  1827. && (index = mask_exists (bitsize, vect_sizes)) != -1)
  1828. /* Use created mask. */
  1829. mask = vect_masks[index];
  1830. else
  1831. {
  1832. masktype = build_nonstandard_integer_type (bitsize, 1);
  1833. mask_op0 = build_int_cst (masktype, swap ? 0 : -1);
  1834. mask_op1 = build_int_cst (masktype, swap ? -1 : 0);
  1835. cond = force_gimple_operand_gsi_1 (&gsi, unshare_expr (cond),
  1836. is_gimple_condexpr,
  1837. NULL_TREE,
  1838. true, GSI_SAME_STMT);
  1839. mask = fold_build_cond_expr (masktype, unshare_expr (cond),
  1840. mask_op0, mask_op1);
  1841. mask = ifc_temp_var (masktype, mask, &gsi);
  1842. /* Save mask and its size for further use. */
  1843. vect_sizes.safe_push (bitsize);
  1844. vect_masks.safe_push (mask);
  1845. }
  1846. ptr = build_int_cst (reference_alias_ptr_type (ref), 0);
  1847. /* Copy points-to info if possible. */
  1848. if (TREE_CODE (addr) == SSA_NAME && !SSA_NAME_PTR_INFO (addr))
  1849. copy_ref_info (build2 (MEM_REF, TREE_TYPE (ref), addr, ptr),
  1850. ref);
  1851. if (TREE_CODE (lhs) == SSA_NAME)
  1852. {
  1853. new_stmt
  1854. = gimple_build_call_internal (IFN_MASK_LOAD, 3, addr,
  1855. ptr, mask);
  1856. gimple_call_set_lhs (new_stmt, lhs);
  1857. }
  1858. else
  1859. new_stmt
  1860. = gimple_build_call_internal (IFN_MASK_STORE, 4, addr, ptr,
  1861. mask, rhs);
  1862. gsi_replace (&gsi, new_stmt, true);
  1863. }
  1864. else if (gimple_vdef (stmt))
  1865. {
  1866. tree lhs = gimple_assign_lhs (stmt);
  1867. tree rhs = gimple_assign_rhs1 (stmt);
  1868. tree type = TREE_TYPE (lhs);
  1869. lhs = ifc_temp_var (type, unshare_expr (lhs), &gsi);
  1870. rhs = ifc_temp_var (type, unshare_expr (rhs), &gsi);
  1871. if (swap)
  1872. {
  1873. tree tem = lhs;
  1874. lhs = rhs;
  1875. rhs = tem;
  1876. }
  1877. cond = force_gimple_operand_gsi_1 (&gsi, unshare_expr (cond),
  1878. is_gimple_condexpr, NULL_TREE,
  1879. true, GSI_SAME_STMT);
  1880. rhs = fold_build_cond_expr (type, unshare_expr (cond), rhs, lhs);
  1881. gimple_assign_set_rhs1 (stmt, ifc_temp_var (type, rhs, &gsi));
  1882. update_stmt (stmt);
  1883. }
  1884. }
  1885. }
  1886. /* Remove all GIMPLE_CONDs and GIMPLE_LABELs of all the basic blocks
  1887. other than the exit and latch of the LOOP. Also resets the
  1888. GIMPLE_DEBUG information. */
  1889. static void
  1890. remove_conditions_and_labels (loop_p loop)
  1891. {
  1892. gimple_stmt_iterator gsi;
  1893. unsigned int i;
  1894. for (i = 0; i < loop->num_nodes; i++)
  1895. {
  1896. basic_block bb = ifc_bbs[i];
  1897. if (bb_with_exit_edge_p (loop, bb)
  1898. || bb == loop->latch)
  1899. continue;
  1900. for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
  1901. switch (gimple_code (gsi_stmt (gsi)))
  1902. {
  1903. case GIMPLE_COND:
  1904. case GIMPLE_LABEL:
  1905. gsi_remove (&gsi, true);
  1906. break;
  1907. case GIMPLE_DEBUG:
  1908. /* ??? Should there be conditional GIMPLE_DEBUG_BINDs? */
  1909. if (gimple_debug_bind_p (gsi_stmt (gsi)))
  1910. {
  1911. gimple_debug_bind_reset_value (gsi_stmt (gsi));
  1912. update_stmt (gsi_stmt (gsi));
  1913. }
  1914. gsi_next (&gsi);
  1915. break;
  1916. default:
  1917. gsi_next (&gsi);
  1918. }
  1919. }
  1920. }
  1921. /* Combine all the basic blocks from LOOP into one or two super basic
  1922. blocks. Replace PHI nodes with conditional modify expressions. */
  1923. static void
  1924. combine_blocks (struct loop *loop, bool any_mask_load_store)
  1925. {
  1926. basic_block bb, exit_bb, merge_target_bb;
  1927. unsigned int orig_loop_num_nodes = loop->num_nodes;
  1928. unsigned int i;
  1929. edge e;
  1930. edge_iterator ei;
  1931. predicate_bbs (loop);
  1932. remove_conditions_and_labels (loop);
  1933. insert_gimplified_predicates (loop, any_mask_load_store);
  1934. predicate_all_scalar_phis (loop);
  1935. if (flag_tree_loop_if_convert_stores || any_mask_load_store)
  1936. predicate_mem_writes (loop);
  1937. /* Merge basic blocks: first remove all the edges in the loop,
  1938. except for those from the exit block. */
  1939. exit_bb = NULL;
  1940. for (i = 0; i < orig_loop_num_nodes; i++)
  1941. {
  1942. bb = ifc_bbs[i];
  1943. free_bb_predicate (bb);
  1944. if (bb_with_exit_edge_p (loop, bb))
  1945. {
  1946. gcc_assert (exit_bb == NULL);
  1947. exit_bb = bb;
  1948. }
  1949. }
  1950. gcc_assert (exit_bb != loop->latch);
  1951. for (i = 1; i < orig_loop_num_nodes; i++)
  1952. {
  1953. bb = ifc_bbs[i];
  1954. for (ei = ei_start (bb->preds); (e = ei_safe_edge (ei));)
  1955. {
  1956. if (e->src == exit_bb)
  1957. ei_next (&ei);
  1958. else
  1959. remove_edge (e);
  1960. }
  1961. }
  1962. if (exit_bb != NULL)
  1963. {
  1964. if (exit_bb != loop->header)
  1965. {
  1966. /* Connect this node to loop header. */
  1967. make_edge (loop->header, exit_bb, EDGE_FALLTHRU);
  1968. set_immediate_dominator (CDI_DOMINATORS, exit_bb, loop->header);
  1969. }
  1970. /* Redirect non-exit edges to loop->latch. */
  1971. FOR_EACH_EDGE (e, ei, exit_bb->succs)
  1972. {
  1973. if (!loop_exit_edge_p (loop, e))
  1974. redirect_edge_and_branch (e, loop->latch);
  1975. }
  1976. set_immediate_dominator (CDI_DOMINATORS, loop->latch, exit_bb);
  1977. }
  1978. else
  1979. {
  1980. /* If the loop does not have an exit, reconnect header and latch. */
  1981. make_edge (loop->header, loop->latch, EDGE_FALLTHRU);
  1982. set_immediate_dominator (CDI_DOMINATORS, loop->latch, loop->header);
  1983. }
  1984. merge_target_bb = loop->header;
  1985. for (i = 1; i < orig_loop_num_nodes; i++)
  1986. {
  1987. gimple_stmt_iterator gsi;
  1988. gimple_stmt_iterator last;
  1989. bb = ifc_bbs[i];
  1990. if (bb == exit_bb || bb == loop->latch)
  1991. continue;
  1992. /* Make stmts member of loop->header. */
  1993. for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
  1994. gimple_set_bb (gsi_stmt (gsi), merge_target_bb);
  1995. /* Update stmt list. */
  1996. last = gsi_last_bb (merge_target_bb);
  1997. gsi_insert_seq_after (&last, bb_seq (bb), GSI_NEW_STMT);
  1998. set_bb_seq (bb, NULL);
  1999. delete_basic_block (bb);
  2000. }
  2001. /* If possible, merge loop header to the block with the exit edge.
  2002. This reduces the number of basic blocks to two, to please the
  2003. vectorizer that handles only loops with two nodes. */
  2004. if (exit_bb
  2005. && exit_bb != loop->header
  2006. && can_merge_blocks_p (loop->header, exit_bb))
  2007. merge_blocks (loop->header, exit_bb);
  2008. free (ifc_bbs);
  2009. ifc_bbs = NULL;
  2010. }
  2011. /* Version LOOP before if-converting it, the original loop
  2012. will be then if-converted, the new copy of the loop will not,
  2013. and the LOOP_VECTORIZED internal call will be guarding which
  2014. loop to execute. The vectorizer pass will fold this
  2015. internal call into either true or false. */
  2016. static bool
  2017. version_loop_for_if_conversion (struct loop *loop)
  2018. {
  2019. basic_block cond_bb;
  2020. tree cond = make_ssa_name (boolean_type_node);
  2021. struct loop *new_loop;
  2022. gimple g;
  2023. gimple_stmt_iterator gsi;
  2024. g = gimple_build_call_internal (IFN_LOOP_VECTORIZED, 2,
  2025. build_int_cst (integer_type_node, loop->num),
  2026. integer_zero_node);
  2027. gimple_call_set_lhs (g, cond);
  2028. initialize_original_copy_tables ();
  2029. new_loop = loop_version (loop, cond, &cond_bb,
  2030. REG_BR_PROB_BASE, REG_BR_PROB_BASE,
  2031. REG_BR_PROB_BASE, true);
  2032. free_original_copy_tables ();
  2033. if (new_loop == NULL)
  2034. return false;
  2035. new_loop->dont_vectorize = true;
  2036. new_loop->force_vectorize = false;
  2037. gsi = gsi_last_bb (cond_bb);
  2038. gimple_call_set_arg (g, 1, build_int_cst (integer_type_node, new_loop->num));
  2039. gsi_insert_before (&gsi, g, GSI_SAME_STMT);
  2040. update_ssa (TODO_update_ssa);
  2041. return true;
  2042. }
  2043. /* Performs splitting of critical edges if aggressive_if_conv is true.
  2044. Returns false if loop won't be if converted and true otherwise. */
  2045. static bool
  2046. ifcvt_split_critical_edges (struct loop *loop)
  2047. {
  2048. basic_block *body;
  2049. basic_block bb;
  2050. unsigned int num = loop->num_nodes;
  2051. unsigned int i;
  2052. gimple stmt;
  2053. edge e;
  2054. edge_iterator ei;
  2055. if (num <= 2)
  2056. return false;
  2057. if (loop->inner)
  2058. return false;
  2059. if (!single_exit (loop))
  2060. return false;
  2061. body = get_loop_body (loop);
  2062. for (i = 0; i < num; i++)
  2063. {
  2064. bb = body[i];
  2065. if (bb == loop->latch
  2066. || bb_with_exit_edge_p (loop, bb))
  2067. continue;
  2068. stmt = last_stmt (bb);
  2069. /* Skip basic blocks not ending with conditional branch. */
  2070. if (!(stmt && gimple_code (stmt) == GIMPLE_COND))
  2071. continue;
  2072. FOR_EACH_EDGE (e, ei, bb->succs)
  2073. if (EDGE_CRITICAL_P (e) && e->dest->loop_father == loop)
  2074. split_edge (e);
  2075. }
  2076. free (body);
  2077. return true;
  2078. }
  2079. /* Assumes that lhs of DEF_STMT have multiple uses.
  2080. Delete one use by (1) creation of copy DEF_STMT with
  2081. unique lhs; (2) change original use of lhs in one
  2082. use statement with newly created lhs. */
  2083. static void
  2084. ifcvt_split_def_stmt (gimple def_stmt, gimple use_stmt)
  2085. {
  2086. tree var;
  2087. tree lhs;
  2088. gimple copy_stmt;
  2089. gimple_stmt_iterator gsi;
  2090. use_operand_p use_p;
  2091. imm_use_iterator imm_iter;
  2092. var = gimple_assign_lhs (def_stmt);
  2093. copy_stmt = gimple_copy (def_stmt);
  2094. lhs = make_temp_ssa_name (TREE_TYPE (var), NULL, "_ifc_");
  2095. gimple_assign_set_lhs (copy_stmt, lhs);
  2096. SSA_NAME_DEF_STMT (lhs) = copy_stmt;
  2097. /* Insert copy of DEF_STMT. */
  2098. gsi = gsi_for_stmt (def_stmt);
  2099. gsi_insert_after (&gsi, copy_stmt, GSI_SAME_STMT);
  2100. /* Change use of var to lhs in use_stmt. */
  2101. if (dump_file && (dump_flags & TDF_DETAILS))
  2102. {
  2103. fprintf (dump_file, "Change use of var ");
  2104. print_generic_expr (dump_file, var, TDF_SLIM);
  2105. fprintf (dump_file, " to ");
  2106. print_generic_expr (dump_file, lhs, TDF_SLIM);
  2107. fprintf (dump_file, "\n");
  2108. }
  2109. FOR_EACH_IMM_USE_FAST (use_p, imm_iter, var)
  2110. {
  2111. if (USE_STMT (use_p) != use_stmt)
  2112. continue;
  2113. SET_USE (use_p, lhs);
  2114. break;
  2115. }
  2116. }
  2117. /* Traverse bool pattern recursively starting from VAR.
  2118. Save its def and use statements to defuse_list if VAR does
  2119. not have single use. */
  2120. static void
  2121. ifcvt_walk_pattern_tree (tree var, vec<gimple> *defuse_list,
  2122. gimple use_stmt)
  2123. {
  2124. tree rhs1, rhs2;
  2125. enum tree_code code;
  2126. gimple def_stmt;
  2127. def_stmt = SSA_NAME_DEF_STMT (var);
  2128. if (gimple_code (def_stmt) != GIMPLE_ASSIGN)
  2129. return;
  2130. if (!has_single_use (var))
  2131. {
  2132. /* Put def and use stmts into defuse_list. */
  2133. defuse_list->safe_push (def_stmt);
  2134. defuse_list->safe_push (use_stmt);
  2135. if (dump_file && (dump_flags & TDF_DETAILS))
  2136. {
  2137. fprintf (dump_file, "Multiple lhs uses in stmt\n");
  2138. print_gimple_stmt (dump_file, def_stmt, 0, TDF_SLIM);
  2139. }
  2140. }
  2141. rhs1 = gimple_assign_rhs1 (def_stmt);
  2142. code = gimple_assign_rhs_code (def_stmt);
  2143. switch (code)
  2144. {
  2145. case SSA_NAME:
  2146. ifcvt_walk_pattern_tree (rhs1, defuse_list, def_stmt);
  2147. break;
  2148. CASE_CONVERT:
  2149. if ((TYPE_PRECISION (TREE_TYPE (rhs1)) != 1
  2150. || !TYPE_UNSIGNED (TREE_TYPE (rhs1)))
  2151. && TREE_CODE (TREE_TYPE (rhs1)) != BOOLEAN_TYPE)
  2152. break;
  2153. ifcvt_walk_pattern_tree (rhs1, defuse_list, def_stmt);
  2154. break;
  2155. case BIT_NOT_EXPR:
  2156. ifcvt_walk_pattern_tree (rhs1, defuse_list, def_stmt);
  2157. break;
  2158. case BIT_AND_EXPR:
  2159. case BIT_IOR_EXPR:
  2160. case BIT_XOR_EXPR:
  2161. ifcvt_walk_pattern_tree (rhs1, defuse_list, def_stmt);
  2162. rhs2 = gimple_assign_rhs2 (def_stmt);
  2163. ifcvt_walk_pattern_tree (rhs2, defuse_list, def_stmt);
  2164. break;
  2165. default:
  2166. break;
  2167. }
  2168. return;
  2169. }
  2170. /* Returns true if STMT can be a root of bool pattern apllied
  2171. by vectorizer. */
  2172. static bool
  2173. stmt_is_root_of_bool_pattern (gimple stmt)
  2174. {
  2175. enum tree_code code;
  2176. tree lhs, rhs;
  2177. code = gimple_assign_rhs_code (stmt);
  2178. if (CONVERT_EXPR_CODE_P (code))
  2179. {
  2180. lhs = gimple_assign_lhs (stmt);
  2181. rhs = gimple_assign_rhs1 (stmt);
  2182. if (TREE_CODE (TREE_TYPE (rhs)) != BOOLEAN_TYPE)
  2183. return false;
  2184. if (TREE_CODE (TREE_TYPE (lhs)) == BOOLEAN_TYPE)
  2185. return false;
  2186. return true;
  2187. }
  2188. else if (code == COND_EXPR)
  2189. {
  2190. rhs = gimple_assign_rhs1 (stmt);
  2191. if (TREE_CODE (rhs) != SSA_NAME)
  2192. return false;
  2193. return true;
  2194. }
  2195. return false;
  2196. }
  2197. /* Traverse all statements in BB which correspondent to loop header to
  2198. find out all statements which can start bool pattern applied by
  2199. vectorizer and convert multiple uses in it to conform pattern
  2200. restrictions. Such case can occur if the same predicate is used both
  2201. for phi node conversion and load/store mask. */
  2202. static void
  2203. ifcvt_repair_bool_pattern (basic_block bb)
  2204. {
  2205. tree rhs;
  2206. gimple stmt;
  2207. gimple_stmt_iterator gsi;
  2208. vec<gimple> defuse_list = vNULL;
  2209. vec<gimple> pattern_roots = vNULL;
  2210. bool repeat = true;
  2211. int niter = 0;
  2212. unsigned int ix;
  2213. /* Collect all root pattern statements. */
  2214. for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
  2215. {
  2216. stmt = gsi_stmt (gsi);
  2217. if (gimple_code (stmt) != GIMPLE_ASSIGN)
  2218. continue;
  2219. if (!stmt_is_root_of_bool_pattern (stmt))
  2220. continue;
  2221. pattern_roots.safe_push (stmt);
  2222. }
  2223. if (pattern_roots.is_empty ())
  2224. return;
  2225. /* Split all statements with multiple uses iteratively since splitting
  2226. may create new multiple uses. */
  2227. while (repeat)
  2228. {
  2229. repeat = false;
  2230. niter++;
  2231. FOR_EACH_VEC_ELT (pattern_roots, ix, stmt)
  2232. {
  2233. rhs = gimple_assign_rhs1 (stmt);
  2234. ifcvt_walk_pattern_tree (rhs, &defuse_list, stmt);
  2235. while (defuse_list.length () > 0)
  2236. {
  2237. repeat = true;
  2238. gimple def_stmt, use_stmt;
  2239. use_stmt = defuse_list.pop ();
  2240. def_stmt = defuse_list.pop ();
  2241. ifcvt_split_def_stmt (def_stmt, use_stmt);
  2242. }
  2243. }
  2244. }
  2245. if (dump_file && (dump_flags & TDF_DETAILS))
  2246. fprintf (dump_file, "Repair bool pattern takes %d iterations. \n",
  2247. niter);
  2248. }
  2249. /* Delete redundant statements produced by predication which prevents
  2250. loop vectorization. */
  2251. static void
  2252. ifcvt_local_dce (basic_block bb)
  2253. {
  2254. gimple stmt;
  2255. gimple stmt1;
  2256. gimple phi;
  2257. gimple_stmt_iterator gsi;
  2258. vec<gimple> worklist;
  2259. enum gimple_code code;
  2260. use_operand_p use_p;
  2261. imm_use_iterator imm_iter;
  2262. worklist.create (64);
  2263. /* Consider all phi as live statements. */
  2264. for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
  2265. {
  2266. phi = gsi_stmt (gsi);
  2267. gimple_set_plf (phi, GF_PLF_2, true);
  2268. worklist.safe_push (phi);
  2269. }
  2270. /* Consider load/store statemnts, CALL and COND as live. */
  2271. for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
  2272. {
  2273. stmt = gsi_stmt (gsi);
  2274. if (gimple_store_p (stmt)
  2275. || gimple_assign_load_p (stmt)
  2276. || is_gimple_debug (stmt))
  2277. {
  2278. gimple_set_plf (stmt, GF_PLF_2, true);
  2279. worklist.safe_push (stmt);
  2280. continue;
  2281. }
  2282. code = gimple_code (stmt);
  2283. if (code == GIMPLE_COND || code == GIMPLE_CALL)
  2284. {
  2285. gimple_set_plf (stmt, GF_PLF_2, true);
  2286. worklist.safe_push (stmt);
  2287. continue;
  2288. }
  2289. gimple_set_plf (stmt, GF_PLF_2, false);
  2290. if (code == GIMPLE_ASSIGN)
  2291. {
  2292. tree lhs = gimple_assign_lhs (stmt);
  2293. FOR_EACH_IMM_USE_FAST (use_p, imm_iter, lhs)
  2294. {
  2295. stmt1 = USE_STMT (use_p);
  2296. if (gimple_bb (stmt1) != bb)
  2297. {
  2298. gimple_set_plf (stmt, GF_PLF_2, true);
  2299. worklist.safe_push (stmt);
  2300. break;
  2301. }
  2302. }
  2303. }
  2304. }
  2305. /* Propagate liveness through arguments of live stmt. */
  2306. while (worklist.length () > 0)
  2307. {
  2308. ssa_op_iter iter;
  2309. use_operand_p use_p;
  2310. tree use;
  2311. stmt = worklist.pop ();
  2312. FOR_EACH_PHI_OR_STMT_USE (use_p, stmt, iter, SSA_OP_USE)
  2313. {
  2314. use = USE_FROM_PTR (use_p);
  2315. if (TREE_CODE (use) != SSA_NAME)
  2316. continue;
  2317. stmt1 = SSA_NAME_DEF_STMT (use);
  2318. if (gimple_bb (stmt1) != bb
  2319. || gimple_plf (stmt1, GF_PLF_2))
  2320. continue;
  2321. gimple_set_plf (stmt1, GF_PLF_2, true);
  2322. worklist.safe_push (stmt1);
  2323. }
  2324. }
  2325. /* Delete dead statements. */
  2326. gsi = gsi_start_bb (bb);
  2327. while (!gsi_end_p (gsi))
  2328. {
  2329. stmt = gsi_stmt (gsi);
  2330. if (gimple_plf (stmt, GF_PLF_2))
  2331. {
  2332. gsi_next (&gsi);
  2333. continue;
  2334. }
  2335. if (dump_file && (dump_flags & TDF_DETAILS))
  2336. {
  2337. fprintf (dump_file, "Delete dead stmt in bb#%d\n", bb->index);
  2338. print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
  2339. }
  2340. gsi_remove (&gsi, true);
  2341. release_defs (stmt);
  2342. }
  2343. }
  2344. /* If-convert LOOP when it is legal. For the moment this pass has no
  2345. profitability analysis. Returns non-zero todo flags when something
  2346. changed. */
  2347. static unsigned int
  2348. tree_if_conversion (struct loop *loop)
  2349. {
  2350. unsigned int todo = 0;
  2351. ifc_bbs = NULL;
  2352. bool any_mask_load_store = false;
  2353. /* Set-up aggressive if-conversion for loops marked with simd pragma. */
  2354. aggressive_if_conv = loop->force_vectorize;
  2355. /* Check either outer loop was marked with simd pragma. */
  2356. if (!aggressive_if_conv)
  2357. {
  2358. struct loop *outer_loop = loop_outer (loop);
  2359. if (outer_loop && outer_loop->force_vectorize)
  2360. aggressive_if_conv = true;
  2361. }
  2362. if (aggressive_if_conv)
  2363. if (!ifcvt_split_critical_edges (loop))
  2364. goto cleanup;
  2365. if (!if_convertible_loop_p (loop, &any_mask_load_store)
  2366. || !dbg_cnt (if_conversion_tree))
  2367. goto cleanup;
  2368. if (any_mask_load_store
  2369. && ((!flag_tree_loop_vectorize && !loop->force_vectorize)
  2370. || loop->dont_vectorize))
  2371. goto cleanup;
  2372. if (any_mask_load_store && !version_loop_for_if_conversion (loop))
  2373. goto cleanup;
  2374. /* Now all statements are if-convertible. Combine all the basic
  2375. blocks into one huge basic block doing the if-conversion
  2376. on-the-fly. */
  2377. combine_blocks (loop, any_mask_load_store);
  2378. /* Delete dead predicate computations and repair tree correspondent
  2379. to bool pattern to delete multiple uses of preidcates. */
  2380. if (aggressive_if_conv)
  2381. {
  2382. ifcvt_local_dce (loop->header);
  2383. ifcvt_repair_bool_pattern (loop->header);
  2384. }
  2385. todo |= TODO_cleanup_cfg;
  2386. if (flag_tree_loop_if_convert_stores || any_mask_load_store)
  2387. {
  2388. mark_virtual_operands_for_renaming (cfun);
  2389. todo |= TODO_update_ssa_only_virtuals;
  2390. }
  2391. cleanup:
  2392. if (ifc_bbs)
  2393. {
  2394. unsigned int i;
  2395. for (i = 0; i < loop->num_nodes; i++)
  2396. free_bb_predicate (ifc_bbs[i]);
  2397. free (ifc_bbs);
  2398. ifc_bbs = NULL;
  2399. }
  2400. free_dominance_info (CDI_POST_DOMINATORS);
  2401. return todo;
  2402. }
  2403. /* Tree if-conversion pass management. */
  2404. namespace {
  2405. const pass_data pass_data_if_conversion =
  2406. {
  2407. GIMPLE_PASS, /* type */
  2408. "ifcvt", /* name */
  2409. OPTGROUP_NONE, /* optinfo_flags */
  2410. TV_NONE, /* tv_id */
  2411. ( PROP_cfg | PROP_ssa ), /* properties_required */
  2412. 0, /* properties_provided */
  2413. 0, /* properties_destroyed */
  2414. 0, /* todo_flags_start */
  2415. 0, /* todo_flags_finish */
  2416. };
  2417. class pass_if_conversion : public gimple_opt_pass
  2418. {
  2419. public:
  2420. pass_if_conversion (gcc::context *ctxt)
  2421. : gimple_opt_pass (pass_data_if_conversion, ctxt)
  2422. {}
  2423. /* opt_pass methods: */
  2424. virtual bool gate (function *);
  2425. virtual unsigned int execute (function *);
  2426. }; // class pass_if_conversion
  2427. bool
  2428. pass_if_conversion::gate (function *fun)
  2429. {
  2430. return (((flag_tree_loop_vectorize || fun->has_force_vectorize_loops)
  2431. && flag_tree_loop_if_convert != 0)
  2432. || flag_tree_loop_if_convert == 1
  2433. || flag_tree_loop_if_convert_stores == 1);
  2434. }
  2435. unsigned int
  2436. pass_if_conversion::execute (function *fun)
  2437. {
  2438. struct loop *loop;
  2439. unsigned todo = 0;
  2440. if (number_of_loops (fun) <= 1)
  2441. return 0;
  2442. FOR_EACH_LOOP (loop, 0)
  2443. if (flag_tree_loop_if_convert == 1
  2444. || flag_tree_loop_if_convert_stores == 1
  2445. || ((flag_tree_loop_vectorize || loop->force_vectorize)
  2446. && !loop->dont_vectorize))
  2447. todo |= tree_if_conversion (loop);
  2448. #ifdef ENABLE_CHECKING
  2449. {
  2450. basic_block bb;
  2451. FOR_EACH_BB_FN (bb, fun)
  2452. gcc_assert (!bb->aux);
  2453. }
  2454. #endif
  2455. return todo;
  2456. }
  2457. } // anon namespace
  2458. gimple_opt_pass *
  2459. make_pass_if_conversion (gcc::context *ctxt)
  2460. {
  2461. return new pass_if_conversion (ctxt);
  2462. }