aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorRichard Biener <rguenther@suse.de>2024-03-27 16:19:01 +0100
committerRichard Biener <rguenther@suse.de>2024-03-28 09:16:34 +0100
commit0bad303944a1d2311c07d59912b4dfa7bff988c8 (patch)
tree7d845eded4fb3f363971f3586467b6fdf449d48d
parentf6d7ff4796709c0639317bfd8fa58a2957a1e299 (diff)
middle-end/114480 - IDF compute is slow
The testcase in this PR shows very slow IDF compute: tree SSA rewrite : 76.99 ( 31%) 24.78% 243663 cc1plus cc1plus [.] compute_idf which can be mitigated to some extent by refactoring the bitmap operations to simpler variants. With the patch below this becomes tree SSA rewrite : 15.23 ( 8%) when not optimizing and in addition to that tree SSA incremental : 181.52 ( 30%) to tree SSA incremental : 24.09 ( 6%) when optimizing. PR middle-end/114480 * cfganal.cc (compute_idf): Use simpler bitmap iteration, touch work_set only when phi_insertion_points changed.
-rw-r--r--gcc/cfganal.cc15
1 files changed, 8 insertions, 7 deletions
diff --git a/gcc/cfganal.cc b/gcc/cfganal.cc
index 432775decf1..35c59f3f5d2 100644
--- a/gcc/cfganal.cc
+++ b/gcc/cfganal.cc
@@ -1701,8 +1701,7 @@ compute_idf (bitmap def_blocks, bitmap_head *dfs)
on earlier blocks first is better.
??? Basic blocks are by no means guaranteed to be ordered in
optimal order for this iteration. */
- bb_index = bitmap_first_set_bit (work_set);
- bitmap_clear_bit (work_set, bb_index);
+ bb_index = bitmap_clear_first_set_bit (work_set);
/* Since the registration of NEW -> OLD name mappings is done
separately from the call to update_ssa, when updating the SSA
@@ -1712,12 +1711,14 @@ compute_idf (bitmap def_blocks, bitmap_head *dfs)
gcc_checking_assert (bb_index
< (unsigned) last_basic_block_for_fn (cfun));
- EXECUTE_IF_AND_COMPL_IN_BITMAP (&dfs[bb_index], phi_insertion_points,
- 0, i, bi)
- {
+ /* The population counts of the dominance frontiers is low
+ compared to that of phi_insertion_points which approaches
+ the IDF and of work_set which is at most that of the IDF
+ as well. That makes iterating over the DFS bitmap preferential
+ to whole bitmap operations involving also phi_insertion_points. */
+ EXECUTE_IF_SET_IN_BITMAP (&dfs[bb_index], 0, i, bi)
+ if (bitmap_set_bit (phi_insertion_points, i))
bitmap_set_bit (work_set, i);
- bitmap_set_bit (phi_insertion_points, i);
- }
}
return phi_insertion_points;