diff options
Diffstat (limited to 'gcc/flow.c')
-rw-r--r-- | gcc/flow.c | 179 |
1 files changed, 148 insertions, 31 deletions
diff --git a/gcc/flow.c b/gcc/flow.c index f356a6ee396..d3850cc9618 100644 --- a/gcc/flow.c +++ b/gcc/flow.c @@ -165,6 +165,11 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA #endif #endif +/* This is the maximum number of times we process any given block if the + latest loop depth count is smaller than this number. Only used for the + failure strategy to avoid infinite loops in calculate_global_regs_live. */ +#define MAX_LIVENESS_ROUNDS 20 + /* Nonzero if the second flow pass has completed. */ int flow2_completed; @@ -729,21 +734,10 @@ update_life_info_in_dirty_blocks (enum update_life_extent extent, int prop_flags sbitmap_zero (update_life_blocks); FOR_EACH_BB (bb) { - if (extent == UPDATE_LIFE_LOCAL) + if (bb->flags & BB_DIRTY) { - if (bb->flags & BB_DIRTY) - { - SET_BIT (update_life_blocks, bb->index); - n++; - } - } - else - { - /* ??? Bootstrap with -march=pentium4 fails to terminate - with only a partial life update. */ SET_BIT (update_life_blocks, bb->index); - if (bb->flags & BB_DIRTY) - n++; + n++; } } @@ -823,21 +817,35 @@ delete_noop_moves (void) void delete_dead_jumptables (void) { - rtx insn, next; - for (insn = get_insns (); insn; insn = next) + basic_block bb; + + /* A dead jump table does not belong to any basic block. Scan insns + between two adjacent basic blocks. */ + FOR_EACH_BB (bb) { - next = NEXT_INSN (insn); - if (LABEL_P (insn) - && LABEL_NUSES (insn) == LABEL_PRESERVE_P (insn) - && JUMP_P (next) - && (GET_CODE (PATTERN (next)) == ADDR_VEC - || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC)) + rtx insn, next; + + for (insn = NEXT_INSN (BB_END (bb)); + insn && !NOTE_INSN_BASIC_BLOCK_P (insn); + insn = next) { - if (dump_file) - fprintf (dump_file, "Dead jumptable %i removed\n", INSN_UID (insn)); - delete_insn (NEXT_INSN (insn)); - delete_insn (insn); - next = NEXT_INSN (next); + next = NEXT_INSN (insn); + if (LABEL_P (insn) + && LABEL_NUSES (insn) == LABEL_PRESERVE_P (insn) + && JUMP_P (next) + && (GET_CODE (PATTERN (next)) == ADDR_VEC + || GET_CODE (PATTERN (next)) == ADDR_DIFF_VEC)) + { + rtx label = insn, jump = next; + + if (dump_file) + fprintf (dump_file, "Dead jumptable %i removed\n", + INSN_UID (insn)); + + next = NEXT_INSN (next); + delete_insn (jump); + delete_insn (label); + } } } } @@ -1010,6 +1018,9 @@ calculate_global_regs_live (sbitmap blocks_in, sbitmap blocks_out, int flags) { basic_block *queue, *qhead, *qtail, *qend, bb; regset tmp, new_live_at_end, invalidated_by_call; + regset registers_made_dead; + bool failure_strategy_required = false; + int *block_accesses; /* The registers that are modified within this in block. */ regset *local_sets; @@ -1030,6 +1041,7 @@ calculate_global_regs_live (sbitmap blocks_in, sbitmap blocks_out, int flags) tmp = ALLOC_REG_SET (®_obstack); new_live_at_end = ALLOC_REG_SET (®_obstack); invalidated_by_call = ALLOC_REG_SET (®_obstack); + registers_made_dead = ALLOC_REG_SET (®_obstack); /* Inconveniently, this is only readily available in hard reg set form. */ for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i) @@ -1070,6 +1082,8 @@ calculate_global_regs_live (sbitmap blocks_in, sbitmap blocks_out, int flags) } } + block_accesses = xcalloc (last_basic_block, sizeof (int)); + /* We clean aux when we remove the initially-enqueued bbs, but we don't enqueue ENTRY and EXIT initially, so clean them upfront and unconditionally. */ @@ -1095,7 +1109,41 @@ calculate_global_regs_live (sbitmap blocks_in, sbitmap blocks_out, int flags) from GLOBAL_LIVE_AT_START. In the former case, the register could go away only if it disappeared from GLOBAL_LIVE_AT_START for one of the successor blocks. By induction, that cannot - occur. */ + occur. + + ??? This reasoning doesn't work if we start from non-empty initial + GLOBAL_LIVE_AT_START sets. And there are actually two problems: + 1) Updating may not terminate (endless oscillation). + 2) Even if it does (and it usually does), the resulting information + may be inaccurate. Consider for example the following case: + + a = ...; + while (...) {...} -- 'a' not mentioned at all + ... = a; + + If the use of 'a' is deleted between two calculations of liveness + information and the initial sets are not cleared, the information + about a's liveness will get stuck inside the loop and the set will + appear not to be dead. + + We do not attempt to solve 2) -- the information is conservatively + correct (i.e. we never claim that something live is dead) and the + amount of optimization opportunities missed due to this problem is + not significant. + + 1) is more serious. In order to fix it, we monitor the number of times + each block is processed. Once one of the blocks has been processed more + times than the maximum number of rounds, we use the following strategy: + When a register disappears from one of the sets, we add it to a MAKE_DEAD + set, remove all registers in this set from all GLOBAL_LIVE_AT_* sets and + add the blocks with changed sets into the queue. Thus we are guaranteed + to terminate (the worst case corresponds to all registers in MADE_DEAD, + in which case the original reasoning above is valid), but in general we + only fix up a few offending registers. + + The maximum number of rounds for computing liveness is the largest of + MAX_LIVENESS_ROUNDS and the latest loop depth count for this function. */ + while (qhead != qtail) { int rescan, changed; @@ -1108,6 +1156,17 @@ calculate_global_regs_live (sbitmap blocks_in, sbitmap blocks_out, int flags) qhead = queue; bb->aux = NULL; + /* Should we start using the failure strategy? */ + if (bb != ENTRY_BLOCK_PTR) + { + int max_liveness_rounds = + MAX (MAX_LIVENESS_ROUNDS, cfun->max_loop_depth); + + block_accesses[bb->index]++; + if (block_accesses[bb->index] > max_liveness_rounds) + failure_strategy_required = true; + } + /* Begin by propagating live_at_start from the successor blocks. */ CLEAR_REG_SET (new_live_at_end); @@ -1263,6 +1322,62 @@ calculate_global_regs_live (sbitmap blocks_in, sbitmap blocks_out, int flags) if (REG_SET_EQUAL_P (bb->global_live_at_start, new_live_at_end)) continue; + if (failure_strategy_required) + { + /* Get the list of registers that were removed from the + bb->global_live_at_start set. */ + bitmap_and_compl (tmp, bb->global_live_at_start, + new_live_at_end); + if (!bitmap_empty_p (tmp)) + { + bool pbb_changed; + basic_block pbb; + + /* It should not happen that one of registers we have + removed last time is disappears again before any other + register does. */ + pbb_changed = bitmap_ior_into (registers_made_dead, tmp); + gcc_assert (pbb_changed); + + /* Now remove the registers from all sets. */ + FOR_EACH_BB (pbb) + { + pbb_changed = false; + + pbb_changed + |= bitmap_and_compl_into (pbb->global_live_at_start, + registers_made_dead); + pbb_changed + |= bitmap_and_compl_into (pbb->global_live_at_end, + registers_made_dead); + if (!pbb_changed) + continue; + + /* Note the (possible) change. */ + if (blocks_out) + SET_BIT (blocks_out, pbb->index); + + /* Makes sure to really rescan the block. */ + if (local_sets[pbb->index - (INVALID_BLOCK + 1)]) + { + FREE_REG_SET (local_sets[pbb->index - (INVALID_BLOCK + 1)]); + FREE_REG_SET (cond_local_sets[pbb->index - (INVALID_BLOCK + 1)]); + local_sets[pbb->index - (INVALID_BLOCK + 1)] = 0; + } + + /* Add it to the queue. */ + if (pbb->aux == NULL) + { + *qtail++ = pbb; + if (qtail == qend) + qtail = queue; + pbb->aux = pbb; + } + } + continue; + } + } /* end of failure_strategy_required */ + COPY_REG_SET (bb->global_live_at_start, new_live_at_end); } @@ -1284,6 +1399,7 @@ calculate_global_regs_live (sbitmap blocks_in, sbitmap blocks_out, int flags) FREE_REG_SET (tmp); FREE_REG_SET (new_live_at_end); FREE_REG_SET (invalidated_by_call); + FREE_REG_SET (registers_made_dead); if (blocks_out) { @@ -1303,6 +1419,7 @@ calculate_global_regs_live (sbitmap blocks_in, sbitmap blocks_out, int flags) } } + free (block_accesses); free (queue); free (cond_local_sets); free (local_sets); @@ -1826,12 +1943,12 @@ init_propagate_block_info (basic_block bb, regset live, regset local_set, else pbi->reg_next_use = NULL; - pbi->new_set = BITMAP_XMALLOC (); + pbi->new_set = BITMAP_ALLOC (NULL); #ifdef HAVE_conditional_execution pbi->reg_cond_dead = splay_tree_new (splay_tree_compare_ints, NULL, free_reg_cond_life_info); - pbi->reg_cond_reg = BITMAP_XMALLOC (); + pbi->reg_cond_reg = BITMAP_ALLOC (NULL); /* If this block ends in a conditional branch, for each register live from one side of the branch and not the other, record the @@ -1973,11 +2090,11 @@ free_propagate_block_info (struct propagate_block_info *pbi) { free_EXPR_LIST_list (&pbi->mem_set_list); - BITMAP_XFREE (pbi->new_set); + BITMAP_FREE (pbi->new_set); #ifdef HAVE_conditional_execution splay_tree_delete (pbi->reg_cond_dead); - BITMAP_XFREE (pbi->reg_cond_reg); + BITMAP_FREE (pbi->reg_cond_reg); #endif if (pbi->flags & PROP_REG_INFO) |