aboutsummaryrefslogtreecommitdiff
path: root/gcc/cfganal.c
diff options
context:
space:
mode:
authordberlin <dberlin@138bc75d-0d04-0410-961f-82ee72b054a4>2007-06-11 18:02:15 +0000
committerdberlin <dberlin@138bc75d-0d04-0410-961f-82ee72b054a4>2007-06-11 18:02:15 +0000
commit3072d30e7983a3ca5ad030f1f98a5c39bcc2c07b (patch)
treefdb9e9f8a0700a2713dc690fed1a2cf20dae8392 /gcc/cfganal.c
parent8ceb1bfd33bc40bf0cbe1fab8903c2c31efd10ee (diff)
Merge dataflow branch into mainline
git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@125624 138bc75d-0d04-0410-961f-82ee72b054a4
Diffstat (limited to 'gcc/cfganal.c')
-rw-r--r--gcc/cfganal.c230
1 files changed, 223 insertions, 7 deletions
diff --git a/gcc/cfganal.c b/gcc/cfganal.c
index 467c399c84c..18cef5e98f2 100644
--- a/gcc/cfganal.c
+++ b/gcc/cfganal.c
@@ -1,6 +1,6 @@
/* Control flow graph analysis code for GNU compiler.
Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
- 1999, 2000, 2001, 2003, 2004, 2005 Free Software Foundation, Inc.
+ 1999, 2000, 2001, 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
This file is part of GCC.
@@ -645,16 +645,20 @@ connect_infinite_loops_to_exit (void)
return;
}
-/* Compute reverse top sort order.
- This is computing a post order numbering of the graph. */
+/* Compute reverse top sort order. This is computing a post order
+ numbering of the graph. If INCLUDE_ENTRY_EXIT is true, then then
+ ENTRY_BLOCK and EXIT_BLOCK are included. If DELETE_UNREACHABLE is
+ true, unreachable blocks are deleted. */
int
-post_order_compute (int *post_order, bool include_entry_exit)
+post_order_compute (int *post_order, bool include_entry_exit,
+ bool delete_unreachable)
{
edge_iterator *stack;
int sp;
int post_order_num = 0;
sbitmap visited;
+ int count;
if (include_entry_exit)
post_order[post_order_num++] = EXIT_BLOCK;
@@ -699,7 +703,7 @@ post_order_compute (int *post_order, bool include_entry_exit)
else
{
if (ei_one_before_end_p (ei) && src != ENTRY_BLOCK_PTR)
- post_order[post_order_num++] = src->index;
+ post_order[post_order_num++] = src->index;
if (!ei_one_before_end_p (ei))
ei_next (&stack[sp - 1]);
@@ -709,7 +713,220 @@ post_order_compute (int *post_order, bool include_entry_exit)
}
if (include_entry_exit)
- post_order[post_order_num++] = ENTRY_BLOCK;
+ {
+ post_order[post_order_num++] = ENTRY_BLOCK;
+ count = post_order_num;
+ }
+ else
+ count = post_order_num + 2;
+
+ /* Delete the unreachable blocks if some were found and we are
+ supposed to do it. */
+ if (delete_unreachable && (count != n_basic_blocks))
+ {
+ basic_block b;
+ basic_block next_bb;
+ for (b = ENTRY_BLOCK_PTR->next_bb; b != EXIT_BLOCK_PTR; b = next_bb)
+ {
+ next_bb = b->next_bb;
+
+ if (!(TEST_BIT (visited, b->index)))
+ delete_basic_block (b);
+ }
+
+ tidy_fallthru_edges ();
+ }
+
+ free (stack);
+ sbitmap_free (visited);
+ return post_order_num;
+}
+
+
+/* Helper routine for inverted_post_order_compute.
+ BB has to belong to a region of CFG
+ unreachable by inverted traversal from the exit.
+ i.e. there's no control flow path from ENTRY to EXIT
+ that contains this BB.
+ This can happen in two cases - if there's an infinite loop
+ or if there's a block that has no successor
+ (call to a function with no return).
+ Some RTL passes deal with this condition by
+ calling connect_infinite_loops_to_exit () and/or
+ add_noreturn_fake_exit_edges ().
+ However, those methods involve modifying the CFG itself
+ which may not be desirable.
+ Hence, we deal with the infinite loop/no return cases
+ by identifying a unique basic block that can reach all blocks
+ in such a region by inverted traversal.
+ This function returns a basic block that guarantees
+ that all blocks in the region are reachable
+ by starting an inverted traversal from the returned block. */
+
+static basic_block
+dfs_find_deadend (basic_block bb)
+{
+ sbitmap visited = sbitmap_alloc (last_basic_block);
+ sbitmap_zero (visited);
+
+ for (;;)
+ {
+ SET_BIT (visited, bb->index);
+ if (EDGE_COUNT (bb->succs) == 0
+ || TEST_BIT (visited, EDGE_SUCC (bb, 0)->dest->index))
+ {
+ sbitmap_free (visited);
+ return bb;
+ }
+
+ bb = EDGE_SUCC (bb, 0)->dest;
+ }
+
+ gcc_unreachable ();
+}
+
+
+/* Compute the reverse top sort order of the inverted CFG
+ i.e. starting from the exit block and following the edges backward
+ (from successors to predecessors).
+ This ordering can be used for forward dataflow problems among others.
+
+ This function assumes that all blocks in the CFG are reachable
+ from the ENTRY (but not necessarily from EXIT).
+
+ If there's an infinite loop,
+ a simple inverted traversal starting from the blocks
+ with no successors can't visit all blocks.
+ To solve this problem, we first do inverted traversal
+ starting from the blocks with no successor.
+ And if there's any block left that's not visited by the regular
+ inverted traversal from EXIT,
+ those blocks are in such problematic region.
+ Among those, we find one block that has
+ any visited predecessor (which is an entry into such a region),
+ and start looking for a "dead end" from that block
+ and do another inverted traversal from that block. */
+
+int
+inverted_post_order_compute (int *post_order)
+{
+ basic_block bb;
+ edge_iterator *stack;
+ int sp;
+ int post_order_num = 0;
+ sbitmap visited;
+
+ /* Allocate stack for back-tracking up CFG. */
+ stack = XNEWVEC (edge_iterator, n_basic_blocks + 1);
+ sp = 0;
+
+ /* Allocate bitmap to track nodes that have been visited. */
+ visited = sbitmap_alloc (last_basic_block);
+
+ /* None of the nodes in the CFG have been visited yet. */
+ sbitmap_zero (visited);
+
+ /* Put all blocks that have no successor into the initial work list. */
+ FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
+ if (EDGE_COUNT (bb->succs) == 0)
+ {
+ /* Push the initial edge on to the stack. */
+ if (EDGE_COUNT (bb->preds) > 0)
+ {
+ stack[sp++] = ei_start (bb->preds);
+ SET_BIT (visited, bb->index);
+ }
+ }
+
+ do
+ {
+ bool has_unvisited_bb = false;
+
+ /* The inverted traversal loop. */
+ while (sp)
+ {
+ edge_iterator ei;
+ basic_block pred;
+
+ /* Look at the edge on the top of the stack. */
+ ei = stack[sp - 1];
+ bb = ei_edge (ei)->dest;
+ pred = ei_edge (ei)->src;
+
+ /* Check if the predecessor has been visited yet. */
+ if (! TEST_BIT (visited, pred->index))
+ {
+ /* Mark that we have visited the destination. */
+ SET_BIT (visited, pred->index);
+
+ if (EDGE_COUNT (pred->preds) > 0)
+ /* Since the predecessor node has been visited for the first
+ time, check its predecessors. */
+ stack[sp++] = ei_start (pred->preds);
+ else
+ post_order[post_order_num++] = pred->index;
+ }
+ else
+ {
+ if (bb != EXIT_BLOCK_PTR && ei_one_before_end_p (ei))
+ post_order[post_order_num++] = bb->index;
+
+ if (!ei_one_before_end_p (ei))
+ ei_next (&stack[sp - 1]);
+ else
+ sp--;
+ }
+ }
+
+ /* Detect any infinite loop and activate the kludge.
+ Note that this doesn't check EXIT_BLOCK itself
+ since EXIT_BLOCK is always added after the outer do-while loop. */
+ FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
+ if (!TEST_BIT (visited, bb->index))
+ {
+ has_unvisited_bb = true;
+
+ if (EDGE_COUNT (bb->preds) > 0)
+ {
+ edge_iterator ei;
+ edge e;
+ basic_block visited_pred = NULL;
+
+ /* Find an already visited predecessor. */
+ FOR_EACH_EDGE (e, ei, bb->preds)
+ {
+ if (TEST_BIT (visited, e->src->index))
+ visited_pred = e->src;
+ }
+
+ if (visited_pred)
+ {
+ basic_block be = dfs_find_deadend (bb);
+ gcc_assert (be != NULL);
+ SET_BIT (visited, be->index);
+ stack[sp++] = ei_start (be->preds);
+ break;
+ }
+ }
+ }
+
+ if (has_unvisited_bb && sp == 0)
+ {
+ /* No blocks are reachable from EXIT at all.
+ Find a dead-end from the ENTRY, and restart the iteration. */
+ basic_block be = dfs_find_deadend (ENTRY_BLOCK_PTR);
+ gcc_assert (be != NULL);
+ SET_BIT (visited, be->index);
+ stack[sp++] = ei_start (be->preds);
+ }
+
+ /* The only case the below while fires is
+ when there's an infinite loop. */
+ }
+ while (sp);
+
+ /* EXIT_BLOCK is always included. */
+ post_order[post_order_num++] = EXIT_BLOCK;
free (stack);
sbitmap_free (visited);
@@ -1076,4 +1293,3 @@ compute_dominance_frontiers (bitmap *frontiers)
timevar_pop (TV_DOM_FRONTIERS);
}
-