ddg.c 34 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279
  1. /* DDG - Data Dependence Graph implementation.
  2. Copyright (C) 2004-2015 Free Software Foundation, Inc.
  3. Contributed by Ayal Zaks and Mustafa Hagog <zaks,mustafa@il.ibm.com>
  4. This file is part of GCC.
  5. GCC is free software; you can redistribute it and/or modify it under
  6. the terms of the GNU General Public License as published by the Free
  7. Software Foundation; either version 3, or (at your option) any later
  8. version.
  9. GCC is distributed in the hope that it will be useful, but WITHOUT ANY
  10. WARRANTY; without even the implied warranty of MERCHANTABILITY or
  11. FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
  12. for more details.
  13. You should have received a copy of the GNU General Public License
  14. along with GCC; see the file COPYING3. If not see
  15. <http://www.gnu.org/licenses/>. */
  16. #include "config.h"
  17. #include "system.h"
  18. #include "coretypes.h"
  19. #include "tm.h"
  20. #include "diagnostic-core.h"
  21. #include "rtl.h"
  22. #include "tm_p.h"
  23. #include "hard-reg-set.h"
  24. #include "regs.h"
  25. #include "hashtab.h"
  26. #include "hash-set.h"
  27. #include "vec.h"
  28. #include "machmode.h"
  29. #include "input.h"
  30. #include "function.h"
  31. #include "flags.h"
  32. #include "insn-config.h"
  33. #include "insn-attr.h"
  34. #include "except.h"
  35. #include "recog.h"
  36. #include "predict.h"
  37. #include "basic-block.h"
  38. #include "sched-int.h"
  39. #include "target.h"
  40. #include "cfgloop.h"
  41. #include "sbitmap.h"
  42. #include "symtab.h"
  43. #include "statistics.h"
  44. #include "double-int.h"
  45. #include "real.h"
  46. #include "fixed-value.h"
  47. #include "alias.h"
  48. #include "wide-int.h"
  49. #include "inchash.h"
  50. #include "tree.h"
  51. #include "expmed.h"
  52. #include "dojump.h"
  53. #include "explow.h"
  54. #include "calls.h"
  55. #include "emit-rtl.h"
  56. #include "varasm.h"
  57. #include "stmt.h"
  58. #include "expr.h"
  59. #include "bitmap.h"
  60. #include "df.h"
  61. #include "ddg.h"
  62. #include "rtl-iter.h"
  63. #ifdef INSN_SCHEDULING
  64. /* A flag indicating that a ddg edge belongs to an SCC or not. */
  65. enum edge_flag {NOT_IN_SCC = 0, IN_SCC};
  66. /* Forward declarations. */
  67. static void add_backarc_to_ddg (ddg_ptr, ddg_edge_ptr);
  68. static void add_backarc_to_scc (ddg_scc_ptr, ddg_edge_ptr);
  69. static void add_scc_to_ddg (ddg_all_sccs_ptr, ddg_scc_ptr);
  70. static void create_ddg_dep_from_intra_loop_link (ddg_ptr, ddg_node_ptr,
  71. ddg_node_ptr, dep_t);
  72. static void create_ddg_dep_no_link (ddg_ptr, ddg_node_ptr, ddg_node_ptr,
  73. dep_type, dep_data_type, int);
  74. static ddg_edge_ptr create_ddg_edge (ddg_node_ptr, ddg_node_ptr, dep_type,
  75. dep_data_type, int, int);
  76. static void add_edge_to_ddg (ddg_ptr g, ddg_edge_ptr);
  77. /* Auxiliary variable for mem_read_insn_p/mem_write_insn_p. */
  78. static bool mem_ref_p;
  79. /* Auxiliary function for mem_read_insn_p. */
  80. static void
  81. mark_mem_use (rtx *x, void *)
  82. {
  83. subrtx_iterator::array_type array;
  84. FOR_EACH_SUBRTX (iter, array, *x, NONCONST)
  85. if (MEM_P (*iter))
  86. {
  87. mem_ref_p = true;
  88. break;
  89. }
  90. }
  91. /* Returns nonzero if INSN reads from memory. */
  92. static bool
  93. mem_read_insn_p (rtx_insn *insn)
  94. {
  95. mem_ref_p = false;
  96. note_uses (&PATTERN (insn), mark_mem_use, NULL);
  97. return mem_ref_p;
  98. }
  99. static void
  100. mark_mem_store (rtx loc, const_rtx setter ATTRIBUTE_UNUSED, void *data ATTRIBUTE_UNUSED)
  101. {
  102. if (MEM_P (loc))
  103. mem_ref_p = true;
  104. }
  105. /* Returns nonzero if INSN writes to memory. */
  106. static bool
  107. mem_write_insn_p (rtx_insn *insn)
  108. {
  109. mem_ref_p = false;
  110. note_stores (PATTERN (insn), mark_mem_store, NULL);
  111. return mem_ref_p;
  112. }
  113. /* Returns nonzero if X has access to memory. */
  114. static bool
  115. rtx_mem_access_p (rtx x)
  116. {
  117. int i, j;
  118. const char *fmt;
  119. enum rtx_code code;
  120. if (x == 0)
  121. return false;
  122. if (MEM_P (x))
  123. return true;
  124. code = GET_CODE (x);
  125. fmt = GET_RTX_FORMAT (code);
  126. for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
  127. {
  128. if (fmt[i] == 'e')
  129. {
  130. if (rtx_mem_access_p (XEXP (x, i)))
  131. return true;
  132. }
  133. else if (fmt[i] == 'E')
  134. for (j = 0; j < XVECLEN (x, i); j++)
  135. {
  136. if (rtx_mem_access_p (XVECEXP (x, i, j)))
  137. return true;
  138. }
  139. }
  140. return false;
  141. }
  142. /* Returns nonzero if INSN reads to or writes from memory. */
  143. static bool
  144. mem_access_insn_p (rtx_insn *insn)
  145. {
  146. return rtx_mem_access_p (PATTERN (insn));
  147. }
  148. /* Return true if DEF_INSN contains address being auto-inc or auto-dec
  149. which is used in USE_INSN. Otherwise return false. The result is
  150. being used to decide whether to remove the edge between def_insn and
  151. use_insn when -fmodulo-sched-allow-regmoves is set. This function
  152. doesn't need to consider the specific address register; no reg_moves
  153. will be allowed for any life range defined by def_insn and used
  154. by use_insn, if use_insn uses an address register auto-inc'ed by
  155. def_insn. */
  156. bool
  157. autoinc_var_is_used_p (rtx_insn *def_insn, rtx_insn *use_insn)
  158. {
  159. rtx note;
  160. for (note = REG_NOTES (def_insn); note; note = XEXP (note, 1))
  161. if (REG_NOTE_KIND (note) == REG_INC
  162. && reg_referenced_p (XEXP (note, 0), PATTERN (use_insn)))
  163. return true;
  164. return false;
  165. }
  166. /* Return true if one of the definitions in INSN has MODE_CC. Otherwise
  167. return false. */
  168. static bool
  169. def_has_ccmode_p (rtx_insn *insn)
  170. {
  171. df_ref def;
  172. FOR_EACH_INSN_DEF (def, insn)
  173. {
  174. machine_mode mode = GET_MODE (DF_REF_REG (def));
  175. if (GET_MODE_CLASS (mode) == MODE_CC)
  176. return true;
  177. }
  178. return false;
  179. }
  180. /* Computes the dependence parameters (latency, distance etc.), creates
  181. a ddg_edge and adds it to the given DDG. */
  182. static void
  183. create_ddg_dep_from_intra_loop_link (ddg_ptr g, ddg_node_ptr src_node,
  184. ddg_node_ptr dest_node, dep_t link)
  185. {
  186. ddg_edge_ptr e;
  187. int latency, distance = 0;
  188. dep_type t = TRUE_DEP;
  189. dep_data_type dt = (mem_access_insn_p (src_node->insn)
  190. && mem_access_insn_p (dest_node->insn) ? MEM_DEP
  191. : REG_DEP);
  192. gcc_assert (src_node->cuid < dest_node->cuid);
  193. gcc_assert (link);
  194. /* Note: REG_DEP_ANTI applies to MEM ANTI_DEP as well!! */
  195. if (DEP_TYPE (link) == REG_DEP_ANTI)
  196. t = ANTI_DEP;
  197. else if (DEP_TYPE (link) == REG_DEP_OUTPUT)
  198. t = OUTPUT_DEP;
  199. gcc_assert (!DEBUG_INSN_P (dest_node->insn) || t == ANTI_DEP);
  200. gcc_assert (!DEBUG_INSN_P (src_node->insn) || t == ANTI_DEP);
  201. /* We currently choose not to create certain anti-deps edges and
  202. compensate for that by generating reg-moves based on the life-range
  203. analysis. The anti-deps that will be deleted are the ones which
  204. have true-deps edges in the opposite direction (in other words
  205. the kernel has only one def of the relevant register).
  206. If the address that is being auto-inc or auto-dec in DEST_NODE
  207. is used in SRC_NODE then do not remove the edge to make sure
  208. reg-moves will not be created for this address.
  209. TODO: support the removal of all anti-deps edges, i.e. including those
  210. whose register has multiple defs in the loop. */
  211. if (flag_modulo_sched_allow_regmoves
  212. && (t == ANTI_DEP && dt == REG_DEP)
  213. && !def_has_ccmode_p (dest_node->insn)
  214. && !autoinc_var_is_used_p (dest_node->insn, src_node->insn))
  215. {
  216. rtx set;
  217. set = single_set (dest_node->insn);
  218. /* TODO: Handle registers that REG_P is not true for them, i.e.
  219. subregs and special registers. */
  220. if (set && REG_P (SET_DEST (set)))
  221. {
  222. int regno = REGNO (SET_DEST (set));
  223. df_ref first_def;
  224. struct df_rd_bb_info *bb_info = DF_RD_BB_INFO (g->bb);
  225. first_def = df_bb_regno_first_def_find (g->bb, regno);
  226. gcc_assert (first_def);
  227. if (bitmap_bit_p (&bb_info->gen, DF_REF_ID (first_def)))
  228. return;
  229. }
  230. }
  231. latency = dep_cost (link);
  232. e = create_ddg_edge (src_node, dest_node, t, dt, latency, distance);
  233. add_edge_to_ddg (g, e);
  234. }
  235. /* The same as the above function, but it doesn't require a link parameter. */
  236. static void
  237. create_ddg_dep_no_link (ddg_ptr g, ddg_node_ptr from, ddg_node_ptr to,
  238. dep_type d_t, dep_data_type d_dt, int distance)
  239. {
  240. ddg_edge_ptr e;
  241. int l;
  242. enum reg_note dep_kind;
  243. struct _dep _dep, *dep = &_dep;
  244. gcc_assert (!DEBUG_INSN_P (to->insn) || d_t == ANTI_DEP);
  245. gcc_assert (!DEBUG_INSN_P (from->insn) || d_t == ANTI_DEP);
  246. if (d_t == ANTI_DEP)
  247. dep_kind = REG_DEP_ANTI;
  248. else if (d_t == OUTPUT_DEP)
  249. dep_kind = REG_DEP_OUTPUT;
  250. else
  251. {
  252. gcc_assert (d_t == TRUE_DEP);
  253. dep_kind = REG_DEP_TRUE;
  254. }
  255. init_dep (dep, from->insn, to->insn, dep_kind);
  256. l = dep_cost (dep);
  257. e = create_ddg_edge (from, to, d_t, d_dt, l, distance);
  258. if (distance > 0)
  259. add_backarc_to_ddg (g, e);
  260. else
  261. add_edge_to_ddg (g, e);
  262. }
  263. /* Given a downwards exposed register def LAST_DEF (which is the last
  264. definition of that register in the bb), add inter-loop true dependences
  265. to all its uses in the next iteration, an output dependence to the
  266. first def of the same register (possibly itself) in the next iteration
  267. and anti-dependences from its uses in the current iteration to the
  268. first definition in the next iteration. */
  269. static void
  270. add_cross_iteration_register_deps (ddg_ptr g, df_ref last_def)
  271. {
  272. int regno = DF_REF_REGNO (last_def);
  273. struct df_link *r_use;
  274. int has_use_in_bb_p = false;
  275. rtx_insn *def_insn = DF_REF_INSN (last_def);
  276. ddg_node_ptr last_def_node = get_node_of_insn (g, def_insn);
  277. ddg_node_ptr use_node;
  278. #ifdef ENABLE_CHECKING
  279. struct df_rd_bb_info *bb_info = DF_RD_BB_INFO (g->bb);
  280. #endif
  281. df_ref first_def = df_bb_regno_first_def_find (g->bb, regno);
  282. gcc_assert (last_def_node);
  283. gcc_assert (first_def);
  284. #ifdef ENABLE_CHECKING
  285. if (DF_REF_ID (last_def) != DF_REF_ID (first_def))
  286. gcc_assert (!bitmap_bit_p (&bb_info->gen,
  287. DF_REF_ID (first_def)));
  288. #endif
  289. /* Create inter-loop true dependences and anti dependences. */
  290. for (r_use = DF_REF_CHAIN (last_def); r_use != NULL; r_use = r_use->next)
  291. {
  292. rtx_insn *use_insn = DF_REF_INSN (r_use->ref);
  293. if (BLOCK_FOR_INSN (use_insn) != g->bb)
  294. continue;
  295. /* ??? Do not handle uses with DF_REF_IN_NOTE notes. */
  296. use_node = get_node_of_insn (g, use_insn);
  297. gcc_assert (use_node);
  298. has_use_in_bb_p = true;
  299. if (use_node->cuid <= last_def_node->cuid)
  300. {
  301. /* Add true deps from last_def to it's uses in the next
  302. iteration. Any such upwards exposed use appears before
  303. the last_def def. */
  304. create_ddg_dep_no_link (g, last_def_node, use_node,
  305. DEBUG_INSN_P (use_insn) ? ANTI_DEP : TRUE_DEP,
  306. REG_DEP, 1);
  307. }
  308. else if (!DEBUG_INSN_P (use_insn))
  309. {
  310. /* Add anti deps from last_def's uses in the current iteration
  311. to the first def in the next iteration. We do not add ANTI
  312. dep when there is an intra-loop TRUE dep in the opposite
  313. direction, but use regmoves to fix such disregarded ANTI
  314. deps when broken. If the first_def reaches the USE then
  315. there is such a dep. */
  316. ddg_node_ptr first_def_node = get_node_of_insn (g,
  317. DF_REF_INSN (first_def));
  318. gcc_assert (first_def_node);
  319. /* Always create the edge if the use node is a branch in
  320. order to prevent the creation of reg-moves.
  321. If the address that is being auto-inc or auto-dec in LAST_DEF
  322. is used in USE_INSN then do not remove the edge to make sure
  323. reg-moves will not be created for that address. */
  324. if (DF_REF_ID (last_def) != DF_REF_ID (first_def)
  325. || !flag_modulo_sched_allow_regmoves
  326. || JUMP_P (use_node->insn)
  327. || autoinc_var_is_used_p (DF_REF_INSN (last_def), use_insn)
  328. || def_has_ccmode_p (DF_REF_INSN (last_def)))
  329. create_ddg_dep_no_link (g, use_node, first_def_node, ANTI_DEP,
  330. REG_DEP, 1);
  331. }
  332. }
  333. /* Create an inter-loop output dependence between LAST_DEF (which is the
  334. last def in its block, being downwards exposed) and the first def in
  335. its block. Avoid creating a self output dependence. Avoid creating
  336. an output dependence if there is a dependence path between the two
  337. defs starting with a true dependence to a use which can be in the
  338. next iteration; followed by an anti dependence of that use to the
  339. first def (i.e. if there is a use between the two defs.) */
  340. if (!has_use_in_bb_p)
  341. {
  342. ddg_node_ptr dest_node;
  343. if (DF_REF_ID (last_def) == DF_REF_ID (first_def))
  344. return;
  345. dest_node = get_node_of_insn (g, DF_REF_INSN (first_def));
  346. gcc_assert (dest_node);
  347. create_ddg_dep_no_link (g, last_def_node, dest_node,
  348. OUTPUT_DEP, REG_DEP, 1);
  349. }
  350. }
  351. /* Build inter-loop dependencies, by looking at DF analysis backwards. */
  352. static void
  353. build_inter_loop_deps (ddg_ptr g)
  354. {
  355. unsigned rd_num;
  356. struct df_rd_bb_info *rd_bb_info;
  357. bitmap_iterator bi;
  358. rd_bb_info = DF_RD_BB_INFO (g->bb);
  359. /* Find inter-loop register output, true and anti deps. */
  360. EXECUTE_IF_SET_IN_BITMAP (&rd_bb_info->gen, 0, rd_num, bi)
  361. {
  362. df_ref rd = DF_DEFS_GET (rd_num);
  363. add_cross_iteration_register_deps (g, rd);
  364. }
  365. }
  366. /* Return true if two specified instructions have mem expr with conflict
  367. alias sets. */
  368. static bool
  369. insns_may_alias_p (rtx_insn *insn1, rtx_insn *insn2)
  370. {
  371. subrtx_iterator::array_type array1;
  372. subrtx_iterator::array_type array2;
  373. FOR_EACH_SUBRTX (iter1, array1, PATTERN (insn1), NONCONST)
  374. {
  375. const_rtx x1 = *iter1;
  376. if (MEM_P (x1))
  377. FOR_EACH_SUBRTX (iter2, array2, PATTERN (insn2), NONCONST)
  378. {
  379. const_rtx x2 = *iter2;
  380. if (MEM_P (x2) && may_alias_p (x2, x1))
  381. return true;
  382. }
  383. }
  384. return false;
  385. }
  386. /* Given two nodes, analyze their RTL insns and add intra-loop mem deps
  387. to ddg G. */
  388. static void
  389. add_intra_loop_mem_dep (ddg_ptr g, ddg_node_ptr from, ddg_node_ptr to)
  390. {
  391. if ((from->cuid == to->cuid)
  392. || !insns_may_alias_p (from->insn, to->insn))
  393. /* Do not create edge if memory references have disjoint alias sets
  394. or 'to' and 'from' are the same instruction. */
  395. return;
  396. if (mem_write_insn_p (from->insn))
  397. {
  398. if (mem_read_insn_p (to->insn))
  399. create_ddg_dep_no_link (g, from, to,
  400. DEBUG_INSN_P (to->insn)
  401. ? ANTI_DEP : TRUE_DEP, MEM_DEP, 0);
  402. else
  403. create_ddg_dep_no_link (g, from, to,
  404. DEBUG_INSN_P (to->insn)
  405. ? ANTI_DEP : OUTPUT_DEP, MEM_DEP, 0);
  406. }
  407. else if (!mem_read_insn_p (to->insn))
  408. create_ddg_dep_no_link (g, from, to, ANTI_DEP, MEM_DEP, 0);
  409. }
  410. /* Given two nodes, analyze their RTL insns and add inter-loop mem deps
  411. to ddg G. */
  412. static void
  413. add_inter_loop_mem_dep (ddg_ptr g, ddg_node_ptr from, ddg_node_ptr to)
  414. {
  415. if (!insns_may_alias_p (from->insn, to->insn))
  416. /* Do not create edge if memory references have disjoint alias sets. */
  417. return;
  418. if (mem_write_insn_p (from->insn))
  419. {
  420. if (mem_read_insn_p (to->insn))
  421. create_ddg_dep_no_link (g, from, to,
  422. DEBUG_INSN_P (to->insn)
  423. ? ANTI_DEP : TRUE_DEP, MEM_DEP, 1);
  424. else if (from->cuid != to->cuid)
  425. create_ddg_dep_no_link (g, from, to,
  426. DEBUG_INSN_P (to->insn)
  427. ? ANTI_DEP : OUTPUT_DEP, MEM_DEP, 1);
  428. }
  429. else
  430. {
  431. if (mem_read_insn_p (to->insn))
  432. return;
  433. else if (from->cuid != to->cuid)
  434. {
  435. create_ddg_dep_no_link (g, from, to, ANTI_DEP, MEM_DEP, 1);
  436. if (DEBUG_INSN_P (from->insn) || DEBUG_INSN_P (to->insn))
  437. create_ddg_dep_no_link (g, to, from, ANTI_DEP, MEM_DEP, 1);
  438. else
  439. create_ddg_dep_no_link (g, to, from, TRUE_DEP, MEM_DEP, 1);
  440. }
  441. }
  442. }
  443. /* Perform intra-block Data Dependency analysis and connect the nodes in
  444. the DDG. We assume the loop has a single basic block. */
  445. static void
  446. build_intra_loop_deps (ddg_ptr g)
  447. {
  448. int i;
  449. /* Hold the dependency analysis state during dependency calculations. */
  450. struct deps_desc tmp_deps;
  451. rtx_insn *head, *tail;
  452. /* Build the dependence information, using the sched_analyze function. */
  453. init_deps_global ();
  454. init_deps (&tmp_deps, false);
  455. /* Do the intra-block data dependence analysis for the given block. */
  456. get_ebb_head_tail (g->bb, g->bb, &head, &tail);
  457. sched_analyze (&tmp_deps, head, tail);
  458. /* Build intra-loop data dependencies using the scheduler dependency
  459. analysis. */
  460. for (i = 0; i < g->num_nodes; i++)
  461. {
  462. ddg_node_ptr dest_node = &g->nodes[i];
  463. sd_iterator_def sd_it;
  464. dep_t dep;
  465. if (! INSN_P (dest_node->insn))
  466. continue;
  467. FOR_EACH_DEP (dest_node->insn, SD_LIST_BACK, sd_it, dep)
  468. {
  469. rtx_insn *src_insn = DEP_PRO (dep);
  470. ddg_node_ptr src_node;
  471. /* Don't add dependencies on debug insns to non-debug insns
  472. to avoid codegen differences between -g and -g0. */
  473. if (DEBUG_INSN_P (src_insn) && !DEBUG_INSN_P (dest_node->insn))
  474. continue;
  475. src_node = get_node_of_insn (g, src_insn);
  476. if (!src_node)
  477. continue;
  478. create_ddg_dep_from_intra_loop_link (g, src_node, dest_node, dep);
  479. }
  480. /* If this insn modifies memory, add an edge to all insns that access
  481. memory. */
  482. if (mem_access_insn_p (dest_node->insn))
  483. {
  484. int j;
  485. for (j = 0; j <= i; j++)
  486. {
  487. ddg_node_ptr j_node = &g->nodes[j];
  488. if (DEBUG_INSN_P (j_node->insn))
  489. continue;
  490. if (mem_access_insn_p (j_node->insn))
  491. {
  492. /* Don't bother calculating inter-loop dep if an intra-loop dep
  493. already exists. */
  494. if (! bitmap_bit_p (dest_node->successors, j))
  495. add_inter_loop_mem_dep (g, dest_node, j_node);
  496. /* If -fmodulo-sched-allow-regmoves
  497. is set certain anti-dep edges are not created.
  498. It might be that these anti-dep edges are on the
  499. path from one memory instruction to another such that
  500. removing these edges could cause a violation of the
  501. memory dependencies. Thus we add intra edges between
  502. every two memory instructions in this case. */
  503. if (flag_modulo_sched_allow_regmoves
  504. && !bitmap_bit_p (dest_node->predecessors, j))
  505. add_intra_loop_mem_dep (g, j_node, dest_node);
  506. }
  507. }
  508. }
  509. }
  510. /* Free the INSN_LISTs. */
  511. finish_deps_global ();
  512. free_deps (&tmp_deps);
  513. /* Free dependencies. */
  514. sched_free_deps (head, tail, false);
  515. }
  516. /* Given a basic block, create its DDG and return a pointer to a variable
  517. of ddg type that represents it.
  518. Initialize the ddg structure fields to the appropriate values. */
  519. ddg_ptr
  520. create_ddg (basic_block bb, int closing_branch_deps)
  521. {
  522. ddg_ptr g;
  523. rtx_insn *insn, *first_note;
  524. int i;
  525. int num_nodes = 0;
  526. g = (ddg_ptr) xcalloc (1, sizeof (struct ddg));
  527. g->bb = bb;
  528. g->closing_branch_deps = closing_branch_deps;
  529. /* Count the number of insns in the BB. */
  530. for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
  531. insn = NEXT_INSN (insn))
  532. {
  533. if (! INSN_P (insn) || GET_CODE (PATTERN (insn)) == USE)
  534. continue;
  535. if (DEBUG_INSN_P (insn))
  536. g->num_debug++;
  537. else
  538. {
  539. if (mem_read_insn_p (insn))
  540. g->num_loads++;
  541. if (mem_write_insn_p (insn))
  542. g->num_stores++;
  543. }
  544. num_nodes++;
  545. }
  546. /* There is nothing to do for this BB. */
  547. if ((num_nodes - g->num_debug) <= 1)
  548. {
  549. free (g);
  550. return NULL;
  551. }
  552. /* Allocate the nodes array, and initialize the nodes. */
  553. g->num_nodes = num_nodes;
  554. g->nodes = (ddg_node_ptr) xcalloc (num_nodes, sizeof (struct ddg_node));
  555. g->closing_branch = NULL;
  556. i = 0;
  557. first_note = NULL;
  558. for (insn = BB_HEAD (bb); insn != NEXT_INSN (BB_END (bb));
  559. insn = NEXT_INSN (insn))
  560. {
  561. if (! INSN_P (insn))
  562. {
  563. if (! first_note && NOTE_P (insn)
  564. && NOTE_KIND (insn) != NOTE_INSN_BASIC_BLOCK)
  565. first_note = insn;
  566. continue;
  567. }
  568. if (JUMP_P (insn))
  569. {
  570. gcc_assert (!g->closing_branch);
  571. g->closing_branch = &g->nodes[i];
  572. }
  573. else if (GET_CODE (PATTERN (insn)) == USE)
  574. {
  575. if (! first_note)
  576. first_note = insn;
  577. continue;
  578. }
  579. g->nodes[i].cuid = i;
  580. g->nodes[i].successors = sbitmap_alloc (num_nodes);
  581. bitmap_clear (g->nodes[i].successors);
  582. g->nodes[i].predecessors = sbitmap_alloc (num_nodes);
  583. bitmap_clear (g->nodes[i].predecessors);
  584. g->nodes[i].first_note = (first_note ? first_note : insn);
  585. g->nodes[i++].insn = insn;
  586. first_note = NULL;
  587. }
  588. /* We must have found a branch in DDG. */
  589. gcc_assert (g->closing_branch);
  590. /* Build the data dependency graph. */
  591. build_intra_loop_deps (g);
  592. build_inter_loop_deps (g);
  593. return g;
  594. }
  595. /* Free all the memory allocated for the DDG. */
  596. void
  597. free_ddg (ddg_ptr g)
  598. {
  599. int i;
  600. if (!g)
  601. return;
  602. for (i = 0; i < g->num_nodes; i++)
  603. {
  604. ddg_edge_ptr e = g->nodes[i].out;
  605. while (e)
  606. {
  607. ddg_edge_ptr next = e->next_out;
  608. free (e);
  609. e = next;
  610. }
  611. sbitmap_free (g->nodes[i].successors);
  612. sbitmap_free (g->nodes[i].predecessors);
  613. }
  614. if (g->num_backarcs > 0)
  615. free (g->backarcs);
  616. free (g->nodes);
  617. free (g);
  618. }
  619. void
  620. print_ddg_edge (FILE *file, ddg_edge_ptr e)
  621. {
  622. char dep_c;
  623. switch (e->type)
  624. {
  625. case OUTPUT_DEP :
  626. dep_c = 'O';
  627. break;
  628. case ANTI_DEP :
  629. dep_c = 'A';
  630. break;
  631. default:
  632. dep_c = 'T';
  633. }
  634. fprintf (file, " [%d -(%c,%d,%d)-> %d] ", INSN_UID (e->src->insn),
  635. dep_c, e->latency, e->distance, INSN_UID (e->dest->insn));
  636. }
  637. /* Print the DDG nodes with there in/out edges to the dump file. */
  638. void
  639. print_ddg (FILE *file, ddg_ptr g)
  640. {
  641. int i;
  642. for (i = 0; i < g->num_nodes; i++)
  643. {
  644. ddg_edge_ptr e;
  645. fprintf (file, "Node num: %d\n", g->nodes[i].cuid);
  646. print_rtl_single (file, g->nodes[i].insn);
  647. fprintf (file, "OUT ARCS: ");
  648. for (e = g->nodes[i].out; e; e = e->next_out)
  649. print_ddg_edge (file, e);
  650. fprintf (file, "\nIN ARCS: ");
  651. for (e = g->nodes[i].in; e; e = e->next_in)
  652. print_ddg_edge (file, e);
  653. fprintf (file, "\n");
  654. }
  655. }
  656. /* Print the given DDG in VCG format. */
  657. DEBUG_FUNCTION void
  658. vcg_print_ddg (FILE *file, ddg_ptr g)
  659. {
  660. int src_cuid;
  661. fprintf (file, "graph: {\n");
  662. for (src_cuid = 0; src_cuid < g->num_nodes; src_cuid++)
  663. {
  664. ddg_edge_ptr e;
  665. int src_uid = INSN_UID (g->nodes[src_cuid].insn);
  666. fprintf (file, "node: {title: \"%d_%d\" info1: \"", src_cuid, src_uid);
  667. print_rtl_single (file, g->nodes[src_cuid].insn);
  668. fprintf (file, "\"}\n");
  669. for (e = g->nodes[src_cuid].out; e; e = e->next_out)
  670. {
  671. int dst_uid = INSN_UID (e->dest->insn);
  672. int dst_cuid = e->dest->cuid;
  673. /* Give the backarcs a different color. */
  674. if (e->distance > 0)
  675. fprintf (file, "backedge: {color: red ");
  676. else
  677. fprintf (file, "edge: { ");
  678. fprintf (file, "sourcename: \"%d_%d\" ", src_cuid, src_uid);
  679. fprintf (file, "targetname: \"%d_%d\" ", dst_cuid, dst_uid);
  680. fprintf (file, "label: \"%d_%d\"}\n", e->latency, e->distance);
  681. }
  682. }
  683. fprintf (file, "}\n");
  684. }
  685. /* Dump the sccs in SCCS. */
  686. void
  687. print_sccs (FILE *file, ddg_all_sccs_ptr sccs, ddg_ptr g)
  688. {
  689. unsigned int u = 0;
  690. sbitmap_iterator sbi;
  691. int i;
  692. if (!file)
  693. return;
  694. fprintf (file, "\n;; Number of SCC nodes - %d\n", sccs->num_sccs);
  695. for (i = 0; i < sccs->num_sccs; i++)
  696. {
  697. fprintf (file, "SCC number: %d\n", i);
  698. EXECUTE_IF_SET_IN_BITMAP (sccs->sccs[i]->nodes, 0, u, sbi)
  699. {
  700. fprintf (file, "insn num %d\n", u);
  701. print_rtl_single (file, g->nodes[u].insn);
  702. }
  703. }
  704. fprintf (file, "\n");
  705. }
  706. /* Create an edge and initialize it with given values. */
  707. static ddg_edge_ptr
  708. create_ddg_edge (ddg_node_ptr src, ddg_node_ptr dest,
  709. dep_type t, dep_data_type dt, int l, int d)
  710. {
  711. ddg_edge_ptr e = (ddg_edge_ptr) xmalloc (sizeof (struct ddg_edge));
  712. e->src = src;
  713. e->dest = dest;
  714. e->type = t;
  715. e->data_type = dt;
  716. e->latency = l;
  717. e->distance = d;
  718. e->next_in = e->next_out = NULL;
  719. e->aux.info = 0;
  720. return e;
  721. }
  722. /* Add the given edge to the in/out linked lists of the DDG nodes. */
  723. static void
  724. add_edge_to_ddg (ddg_ptr g ATTRIBUTE_UNUSED, ddg_edge_ptr e)
  725. {
  726. ddg_node_ptr src = e->src;
  727. ddg_node_ptr dest = e->dest;
  728. /* Should have allocated the sbitmaps. */
  729. gcc_assert (src->successors && dest->predecessors);
  730. bitmap_set_bit (src->successors, dest->cuid);
  731. bitmap_set_bit (dest->predecessors, src->cuid);
  732. e->next_in = dest->in;
  733. dest->in = e;
  734. e->next_out = src->out;
  735. src->out = e;
  736. }
  737. /* Algorithm for computing the recurrence_length of an scc. We assume at
  738. for now that cycles in the data dependence graph contain a single backarc.
  739. This simplifies the algorithm, and can be generalized later. */
  740. static void
  741. set_recurrence_length (ddg_scc_ptr scc, ddg_ptr g)
  742. {
  743. int j;
  744. int result = -1;
  745. for (j = 0; j < scc->num_backarcs; j++)
  746. {
  747. ddg_edge_ptr backarc = scc->backarcs[j];
  748. int length;
  749. int distance = backarc->distance;
  750. ddg_node_ptr src = backarc->dest;
  751. ddg_node_ptr dest = backarc->src;
  752. length = longest_simple_path (g, src->cuid, dest->cuid, scc->nodes);
  753. if (length < 0 )
  754. {
  755. /* fprintf (stderr, "Backarc not on simple cycle in SCC.\n"); */
  756. continue;
  757. }
  758. length += backarc->latency;
  759. result = MAX (result, (length / distance));
  760. }
  761. scc->recurrence_length = result;
  762. }
  763. /* Create a new SCC given the set of its nodes. Compute its recurrence_length
  764. and mark edges that belong to this scc as IN_SCC. */
  765. static ddg_scc_ptr
  766. create_scc (ddg_ptr g, sbitmap nodes)
  767. {
  768. ddg_scc_ptr scc;
  769. unsigned int u = 0;
  770. sbitmap_iterator sbi;
  771. scc = (ddg_scc_ptr) xmalloc (sizeof (struct ddg_scc));
  772. scc->backarcs = NULL;
  773. scc->num_backarcs = 0;
  774. scc->nodes = sbitmap_alloc (g->num_nodes);
  775. bitmap_copy (scc->nodes, nodes);
  776. /* Mark the backarcs that belong to this SCC. */
  777. EXECUTE_IF_SET_IN_BITMAP (nodes, 0, u, sbi)
  778. {
  779. ddg_edge_ptr e;
  780. ddg_node_ptr n = &g->nodes[u];
  781. for (e = n->out; e; e = e->next_out)
  782. if (bitmap_bit_p (nodes, e->dest->cuid))
  783. {
  784. e->aux.count = IN_SCC;
  785. if (e->distance > 0)
  786. add_backarc_to_scc (scc, e);
  787. }
  788. }
  789. set_recurrence_length (scc, g);
  790. return scc;
  791. }
  792. /* Cleans the memory allocation of a given SCC. */
  793. static void
  794. free_scc (ddg_scc_ptr scc)
  795. {
  796. if (!scc)
  797. return;
  798. sbitmap_free (scc->nodes);
  799. if (scc->num_backarcs > 0)
  800. free (scc->backarcs);
  801. free (scc);
  802. }
  803. /* Add a given edge known to be a backarc to the given DDG. */
  804. static void
  805. add_backarc_to_ddg (ddg_ptr g, ddg_edge_ptr e)
  806. {
  807. int size = (g->num_backarcs + 1) * sizeof (ddg_edge_ptr);
  808. add_edge_to_ddg (g, e);
  809. g->backarcs = (ddg_edge_ptr *) xrealloc (g->backarcs, size);
  810. g->backarcs[g->num_backarcs++] = e;
  811. }
  812. /* Add backarc to an SCC. */
  813. static void
  814. add_backarc_to_scc (ddg_scc_ptr scc, ddg_edge_ptr e)
  815. {
  816. int size = (scc->num_backarcs + 1) * sizeof (ddg_edge_ptr);
  817. scc->backarcs = (ddg_edge_ptr *) xrealloc (scc->backarcs, size);
  818. scc->backarcs[scc->num_backarcs++] = e;
  819. }
  820. /* Add the given SCC to the DDG. */
  821. static void
  822. add_scc_to_ddg (ddg_all_sccs_ptr g, ddg_scc_ptr scc)
  823. {
  824. int size = (g->num_sccs + 1) * sizeof (ddg_scc_ptr);
  825. g->sccs = (ddg_scc_ptr *) xrealloc (g->sccs, size);
  826. g->sccs[g->num_sccs++] = scc;
  827. }
  828. /* Given the instruction INSN return the node that represents it. */
  829. ddg_node_ptr
  830. get_node_of_insn (ddg_ptr g, rtx_insn *insn)
  831. {
  832. int i;
  833. for (i = 0; i < g->num_nodes; i++)
  834. if (insn == g->nodes[i].insn)
  835. return &g->nodes[i];
  836. return NULL;
  837. }
  838. /* Given a set OPS of nodes in the DDG, find the set of their successors
  839. which are not in OPS, and set their bits in SUCC. Bits corresponding to
  840. OPS are cleared from SUCC. Leaves the other bits in SUCC unchanged. */
  841. void
  842. find_successors (sbitmap succ, ddg_ptr g, sbitmap ops)
  843. {
  844. unsigned int i = 0;
  845. sbitmap_iterator sbi;
  846. EXECUTE_IF_SET_IN_BITMAP (ops, 0, i, sbi)
  847. {
  848. const sbitmap node_succ = NODE_SUCCESSORS (&g->nodes[i]);
  849. bitmap_ior (succ, succ, node_succ);
  850. };
  851. /* We want those that are not in ops. */
  852. bitmap_and_compl (succ, succ, ops);
  853. }
  854. /* Given a set OPS of nodes in the DDG, find the set of their predecessors
  855. which are not in OPS, and set their bits in PREDS. Bits corresponding to
  856. OPS are cleared from PREDS. Leaves the other bits in PREDS unchanged. */
  857. void
  858. find_predecessors (sbitmap preds, ddg_ptr g, sbitmap ops)
  859. {
  860. unsigned int i = 0;
  861. sbitmap_iterator sbi;
  862. EXECUTE_IF_SET_IN_BITMAP (ops, 0, i, sbi)
  863. {
  864. const sbitmap node_preds = NODE_PREDECESSORS (&g->nodes[i]);
  865. bitmap_ior (preds, preds, node_preds);
  866. };
  867. /* We want those that are not in ops. */
  868. bitmap_and_compl (preds, preds, ops);
  869. }
  870. /* Compare function to be passed to qsort to order the backarcs in descending
  871. recMII order. */
  872. static int
  873. compare_sccs (const void *s1, const void *s2)
  874. {
  875. const int rec_l1 = (*(const ddg_scc_ptr *)s1)->recurrence_length;
  876. const int rec_l2 = (*(const ddg_scc_ptr *)s2)->recurrence_length;
  877. return ((rec_l2 > rec_l1) - (rec_l2 < rec_l1));
  878. }
  879. /* Order the backarcs in descending recMII order using compare_sccs. */
  880. static void
  881. order_sccs (ddg_all_sccs_ptr g)
  882. {
  883. qsort (g->sccs, g->num_sccs, sizeof (ddg_scc_ptr),
  884. (int (*) (const void *, const void *)) compare_sccs);
  885. }
  886. #ifdef ENABLE_CHECKING
  887. /* Check that every node in SCCS belongs to exactly one strongly connected
  888. component and that no element of SCCS is empty. */
  889. static void
  890. check_sccs (ddg_all_sccs_ptr sccs, int num_nodes)
  891. {
  892. int i = 0;
  893. sbitmap tmp = sbitmap_alloc (num_nodes);
  894. bitmap_clear (tmp);
  895. for (i = 0; i < sccs->num_sccs; i++)
  896. {
  897. gcc_assert (!bitmap_empty_p (sccs->sccs[i]->nodes));
  898. /* Verify that every node in sccs is in exactly one strongly
  899. connected component. */
  900. gcc_assert (!bitmap_intersect_p (tmp, sccs->sccs[i]->nodes));
  901. bitmap_ior (tmp, tmp, sccs->sccs[i]->nodes);
  902. }
  903. sbitmap_free (tmp);
  904. }
  905. #endif
  906. /* Perform the Strongly Connected Components decomposing algorithm on the
  907. DDG and return DDG_ALL_SCCS structure that contains them. */
  908. ddg_all_sccs_ptr
  909. create_ddg_all_sccs (ddg_ptr g)
  910. {
  911. int i;
  912. int num_nodes = g->num_nodes;
  913. sbitmap from = sbitmap_alloc (num_nodes);
  914. sbitmap to = sbitmap_alloc (num_nodes);
  915. sbitmap scc_nodes = sbitmap_alloc (num_nodes);
  916. ddg_all_sccs_ptr sccs = (ddg_all_sccs_ptr)
  917. xmalloc (sizeof (struct ddg_all_sccs));
  918. sccs->ddg = g;
  919. sccs->sccs = NULL;
  920. sccs->num_sccs = 0;
  921. for (i = 0; i < g->num_backarcs; i++)
  922. {
  923. ddg_scc_ptr scc;
  924. ddg_edge_ptr backarc = g->backarcs[i];
  925. ddg_node_ptr src = backarc->src;
  926. ddg_node_ptr dest = backarc->dest;
  927. /* If the backarc already belongs to an SCC, continue. */
  928. if (backarc->aux.count == IN_SCC)
  929. continue;
  930. bitmap_clear (scc_nodes);
  931. bitmap_clear (from);
  932. bitmap_clear (to);
  933. bitmap_set_bit (from, dest->cuid);
  934. bitmap_set_bit (to, src->cuid);
  935. if (find_nodes_on_paths (scc_nodes, g, from, to))
  936. {
  937. scc = create_scc (g, scc_nodes);
  938. add_scc_to_ddg (sccs, scc);
  939. }
  940. }
  941. order_sccs (sccs);
  942. sbitmap_free (from);
  943. sbitmap_free (to);
  944. sbitmap_free (scc_nodes);
  945. #ifdef ENABLE_CHECKING
  946. check_sccs (sccs, num_nodes);
  947. #endif
  948. return sccs;
  949. }
  950. /* Frees the memory allocated for all SCCs of the DDG, but keeps the DDG. */
  951. void
  952. free_ddg_all_sccs (ddg_all_sccs_ptr all_sccs)
  953. {
  954. int i;
  955. if (!all_sccs)
  956. return;
  957. for (i = 0; i < all_sccs->num_sccs; i++)
  958. free_scc (all_sccs->sccs[i]);
  959. free (all_sccs->sccs);
  960. free (all_sccs);
  961. }
  962. /* Given FROM - a bitmap of source nodes - and TO - a bitmap of destination
  963. nodes - find all nodes that lie on paths from FROM to TO (not excluding
  964. nodes from FROM and TO). Return nonzero if nodes exist. */
  965. int
  966. find_nodes_on_paths (sbitmap result, ddg_ptr g, sbitmap from, sbitmap to)
  967. {
  968. int answer;
  969. int change;
  970. unsigned int u = 0;
  971. int num_nodes = g->num_nodes;
  972. sbitmap_iterator sbi;
  973. sbitmap workset = sbitmap_alloc (num_nodes);
  974. sbitmap reachable_from = sbitmap_alloc (num_nodes);
  975. sbitmap reach_to = sbitmap_alloc (num_nodes);
  976. sbitmap tmp = sbitmap_alloc (num_nodes);
  977. bitmap_copy (reachable_from, from);
  978. bitmap_copy (tmp, from);
  979. change = 1;
  980. while (change)
  981. {
  982. change = 0;
  983. bitmap_copy (workset, tmp);
  984. bitmap_clear (tmp);
  985. EXECUTE_IF_SET_IN_BITMAP (workset, 0, u, sbi)
  986. {
  987. ddg_edge_ptr e;
  988. ddg_node_ptr u_node = &g->nodes[u];
  989. for (e = u_node->out; e != (ddg_edge_ptr) 0; e = e->next_out)
  990. {
  991. ddg_node_ptr v_node = e->dest;
  992. int v = v_node->cuid;
  993. if (!bitmap_bit_p (reachable_from, v))
  994. {
  995. bitmap_set_bit (reachable_from, v);
  996. bitmap_set_bit (tmp, v);
  997. change = 1;
  998. }
  999. }
  1000. }
  1001. }
  1002. bitmap_copy (reach_to, to);
  1003. bitmap_copy (tmp, to);
  1004. change = 1;
  1005. while (change)
  1006. {
  1007. change = 0;
  1008. bitmap_copy (workset, tmp);
  1009. bitmap_clear (tmp);
  1010. EXECUTE_IF_SET_IN_BITMAP (workset, 0, u, sbi)
  1011. {
  1012. ddg_edge_ptr e;
  1013. ddg_node_ptr u_node = &g->nodes[u];
  1014. for (e = u_node->in; e != (ddg_edge_ptr) 0; e = e->next_in)
  1015. {
  1016. ddg_node_ptr v_node = e->src;
  1017. int v = v_node->cuid;
  1018. if (!bitmap_bit_p (reach_to, v))
  1019. {
  1020. bitmap_set_bit (reach_to, v);
  1021. bitmap_set_bit (tmp, v);
  1022. change = 1;
  1023. }
  1024. }
  1025. }
  1026. }
  1027. answer = bitmap_and (result, reachable_from, reach_to);
  1028. sbitmap_free (workset);
  1029. sbitmap_free (reachable_from);
  1030. sbitmap_free (reach_to);
  1031. sbitmap_free (tmp);
  1032. return answer;
  1033. }
  1034. /* Updates the counts of U_NODE's successors (that belong to NODES) to be
  1035. at-least as large as the count of U_NODE plus the latency between them.
  1036. Sets a bit in TMP for each successor whose count was changed (increased).
  1037. Returns nonzero if any count was changed. */
  1038. static int
  1039. update_dist_to_successors (ddg_node_ptr u_node, sbitmap nodes, sbitmap tmp)
  1040. {
  1041. ddg_edge_ptr e;
  1042. int result = 0;
  1043. for (e = u_node->out; e; e = e->next_out)
  1044. {
  1045. ddg_node_ptr v_node = e->dest;
  1046. int v = v_node->cuid;
  1047. if (bitmap_bit_p (nodes, v)
  1048. && (e->distance == 0)
  1049. && (v_node->aux.count < u_node->aux.count + e->latency))
  1050. {
  1051. v_node->aux.count = u_node->aux.count + e->latency;
  1052. bitmap_set_bit (tmp, v);
  1053. result = 1;
  1054. }
  1055. }
  1056. return result;
  1057. }
  1058. /* Find the length of a longest path from SRC to DEST in G,
  1059. going only through NODES, and disregarding backarcs. */
  1060. int
  1061. longest_simple_path (struct ddg * g, int src, int dest, sbitmap nodes)
  1062. {
  1063. int i;
  1064. unsigned int u = 0;
  1065. int change = 1;
  1066. int result;
  1067. int num_nodes = g->num_nodes;
  1068. sbitmap workset = sbitmap_alloc (num_nodes);
  1069. sbitmap tmp = sbitmap_alloc (num_nodes);
  1070. /* Data will hold the distance of the longest path found so far from
  1071. src to each node. Initialize to -1 = less than minimum. */
  1072. for (i = 0; i < g->num_nodes; i++)
  1073. g->nodes[i].aux.count = -1;
  1074. g->nodes[src].aux.count = 0;
  1075. bitmap_clear (tmp);
  1076. bitmap_set_bit (tmp, src);
  1077. while (change)
  1078. {
  1079. sbitmap_iterator sbi;
  1080. change = 0;
  1081. bitmap_copy (workset, tmp);
  1082. bitmap_clear (tmp);
  1083. EXECUTE_IF_SET_IN_BITMAP (workset, 0, u, sbi)
  1084. {
  1085. ddg_node_ptr u_node = &g->nodes[u];
  1086. change |= update_dist_to_successors (u_node, nodes, tmp);
  1087. }
  1088. }
  1089. result = g->nodes[dest].aux.count;
  1090. sbitmap_free (workset);
  1091. sbitmap_free (tmp);
  1092. return result;
  1093. }
  1094. #endif /* INSN_SCHEDULING */