aboutsummaryrefslogtreecommitdiff
path: root/gcc/df-problems.c
diff options
context:
space:
mode:
authorBernd Schmidt <bernds@codesourcery.com>2010-12-14 00:23:40 +0000
committerBernd Schmidt <bernds@codesourcery.com>2010-12-14 00:23:40 +0000
commit32902f334535ed4e2155890269e2af6d4d3a6c15 (patch)
tree7224059fe740ae1327ffaffd7f83fc7d51dee038 /gcc/df-problems.c
parenta34555fce1dfaefb23f379c8a906f29b5b0eb7cd (diff)
gcc/
PR rtl-optimization/44374 Reapply patch with fixes. * basic-block.h (enum bb_flags): Add BB_MODIFIED. * df-core.c (df_set_bb_dirty): Set it. * ifcvt.c (find_memory): Remove function. (dead_or_predicable): Use can_move_insns_across. * df.h (can_move_insns_across): Declare function. * cfgcleanup.c (block_was_dirty): New static variable. (flow_find_head_matching_sequence): Test for epilogue notes. (try_crossjump_bb, try_forward_edges): Test BB_MODIFIED flag rather than df_get_bb_dirty. (try_head_merge_bb): New static function. (try_optimize_cfg): Call it. Call df_analyze if block_was_dirty is set. * df-problems.c: Include "target.h" (df_simulate_find_uses): New static function. (MEMREF_NORMAL, MEMREF_VOLATILE): New macros. (find_memory, find_memory_store): New static functions. (can_move_insns_across): New function. * Makefile.in (df-problems.o): Update dependencies. gcc/testsuite/ PR rtl-optimization/44374 Reapply patch with fixes. * gcc.target/arm/headmerge-1.c: New test. * gcc.target/arm/headmerge-2.c: New test. * gcc.target/i386/headmerge-1.c: New test. * gcc.target/i386/headmerge-2.c: New test. git-svn-id: https://gcc.gnu.org/svn/gcc/trunk@167779 138bc75d-0d04-0410-961f-82ee72b054a4
Diffstat (limited to 'gcc/df-problems.c')
-rw-r--r--gcc/df-problems.c318
1 files changed, 318 insertions, 0 deletions
diff --git a/gcc/df-problems.c b/gcc/df-problems.c
index 559af918462..d9fbc85882c 100644
--- a/gcc/df-problems.c
+++ b/gcc/df-problems.c
@@ -39,6 +39,7 @@ along with GCC; see the file COPYING3. If not see
#include "basic-block.h"
#include "sbitmap.h"
#include "bitmap.h"
+#include "target.h"
#include "timevar.h"
#include "df.h"
#include "except.h"
@@ -3541,6 +3542,27 @@ df_simulate_find_defs (rtx insn, bitmap defs)
}
}
+/* Find the set of uses for INSN. This includes partial defs. */
+
+static void
+df_simulate_find_uses (rtx insn, bitmap uses)
+{
+ df_ref *rec;
+ unsigned int uid = INSN_UID (insn);
+
+ for (rec = DF_INSN_UID_DEFS (uid); *rec; rec++)
+ {
+ df_ref def = *rec;
+ if (DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL))
+ bitmap_set_bit (uses, DF_REF_REGNO (def));
+ }
+ for (rec = DF_INSN_UID_USES (uid); *rec; rec++)
+ {
+ df_ref use = *rec;
+ bitmap_set_bit (uses, DF_REF_REGNO (use));
+ }
+}
+
/* Find the set of real DEFs, which are not clobbers, for INSN. */
void
@@ -3768,7 +3790,303 @@ df_simulate_one_insn_forwards (basic_block bb, rtx insn, bitmap live)
}
df_simulate_fixup_sets (bb, live);
}
+
+/* Used by the next two functions to encode information about the
+ memory references we found. */
+#define MEMREF_NORMAL 1
+#define MEMREF_VOLATILE 2
+
+/* A subroutine of can_move_insns_across_p called through for_each_rtx.
+ Return either MEMREF_NORMAL or MEMREF_VOLATILE if a memory is found. */
+
+static int
+find_memory (rtx *px, void *data ATTRIBUTE_UNUSED)
+{
+ rtx x = *px;
+
+ if (GET_CODE (x) == ASM_OPERANDS && MEM_VOLATILE_P (x))
+ return MEMREF_VOLATILE;
+
+ if (!MEM_P (x))
+ return 0;
+ if (MEM_VOLATILE_P (x))
+ return MEMREF_VOLATILE;
+ if (MEM_READONLY_P (x))
+ return 0;
+
+ return MEMREF_NORMAL;
+}
+
+/* A subroutine of can_move_insns_across_p called through note_stores.
+ DATA points to an integer in which we set either the bit for
+ MEMREF_NORMAL or the bit for MEMREF_VOLATILE if we find a MEM
+ of either kind. */
+
+static void
+find_memory_stores (rtx x, const_rtx pat ATTRIBUTE_UNUSED,
+ void *data ATTRIBUTE_UNUSED)
+{
+ int *pflags = (int *)data;
+ if (GET_CODE (x) == SUBREG)
+ x = XEXP (x, 0);
+ /* Treat stores to SP as stores to memory, this will prevent problems
+ when there are references to the stack frame. */
+ if (x == stack_pointer_rtx)
+ *pflags |= MEMREF_VOLATILE;
+ if (!MEM_P (x))
+ return;
+ *pflags |= MEM_VOLATILE_P (x) ? MEMREF_VOLATILE : MEMREF_NORMAL;
+}
+
+/* Scan BB backwards, using df_simulate functions to keep track of
+ lifetimes, up to insn POINT. The result is stored in LIVE. */
+
+void
+simulate_backwards_to_point (basic_block bb, regset live, rtx point)
+{
+ rtx insn;
+ bitmap_copy (live, df_get_live_out (bb));
+ df_simulate_initialize_backwards (bb, live);
+
+ /* Scan and update life information until we reach the point we're
+ interested in. */
+ for (insn = BB_END (bb); insn != point; insn = PREV_INSN (insn))
+ df_simulate_one_insn_backwards (bb, insn, live);
+}
+
+/* Return true if it is safe to move a group of insns, described by
+ the range FROM to TO, backwards across another group of insns,
+ described by ACROSS_FROM to ACROSS_TO. It is assumed that there
+ are no insns between ACROSS_TO and FROM, but they may be in
+ different basic blocks; MERGE_BB is the block from which the
+ insns will be moved. The caller must pass in a regset MERGE_LIVE
+ which specifies the registers live after TO.
+
+ This function may be called in one of two cases: either we try to
+ move identical instructions from all successor blocks into their
+ predecessor, or we try to move from only one successor block. If
+ OTHER_BRANCH_LIVE is nonnull, it indicates that we're dealing with
+ the second case. It should contain a set of registers live at the
+ end of ACROSS_TO which must not be clobbered by moving the insns.
+ In that case, we're also more careful about moving memory references
+ and trapping insns.
+
+ We return false if it is not safe to move the entire group, but it
+ may still be possible to move a subgroup. PMOVE_UPTO, if nonnull,
+ is set to point at the last moveable insn in such a case. */
+bool
+can_move_insns_across (rtx from, rtx to, rtx across_from, rtx across_to,
+ basic_block merge_bb, regset merge_live,
+ regset other_branch_live, rtx *pmove_upto)
+{
+ rtx insn, next, max_to;
+ bitmap merge_set, merge_use, local_merge_live;
+ bitmap test_set, test_use;
+ unsigned i, fail = 0;
+ bitmap_iterator bi;
+ int memrefs_in_across = 0;
+ int mem_sets_in_across = 0;
+ bool trapping_insns_in_across = false;
+
+ if (pmove_upto != NULL)
+ *pmove_upto = NULL_RTX;
+
+ /* Find real bounds, ignoring debug insns. */
+ while (!NONDEBUG_INSN_P (from) && from != to)
+ from = NEXT_INSN (from);
+ while (!NONDEBUG_INSN_P (to) && from != to)
+ to = PREV_INSN (to);
+
+ for (insn = across_to; ; insn = next)
+ {
+ if (NONDEBUG_INSN_P (insn))
+ {
+ memrefs_in_across |= for_each_rtx (&PATTERN (insn), find_memory,
+ NULL);
+ note_stores (PATTERN (insn), find_memory_stores,
+ &mem_sets_in_across);
+ /* This is used just to find sets of the stack pointer. */
+ memrefs_in_across |= mem_sets_in_across;
+ trapping_insns_in_across |= may_trap_p (PATTERN (insn));
+ }
+ next = PREV_INSN (insn);
+ if (insn == across_from)
+ break;
+ }
+
+ /* Collect:
+ MERGE_SET = set of registers set in MERGE_BB
+ MERGE_USE = set of registers used in MERGE_BB and live at its top
+ MERGE_LIVE = set of registers live at the point inside the MERGE
+ range that we've reached during scanning
+ TEST_SET = set of registers set between ACROSS_FROM and ACROSS_END.
+ TEST_USE = set of registers used between ACROSS_FROM and ACROSS_END,
+ and live before ACROSS_FROM. */
+
+ merge_set = BITMAP_ALLOC (&reg_obstack);
+ merge_use = BITMAP_ALLOC (&reg_obstack);
+ local_merge_live = BITMAP_ALLOC (&reg_obstack);
+ test_set = BITMAP_ALLOC (&reg_obstack);
+ test_use = BITMAP_ALLOC (&reg_obstack);
+
+ /* Compute the set of registers set and used in the ACROSS range. */
+ if (other_branch_live != NULL)
+ bitmap_copy (test_use, other_branch_live);
+ df_simulate_initialize_backwards (merge_bb, test_use);
+ for (insn = across_to; ; insn = next)
+ {
+ if (NONDEBUG_INSN_P (insn))
+ {
+ df_simulate_find_defs (insn, test_set);
+ df_simulate_defs (insn, test_use);
+ df_simulate_uses (insn, test_use);
+ }
+ next = PREV_INSN (insn);
+ if (insn == across_from)
+ break;
+ }
+
+ /* Compute an upper bound for the amount of insns moved, by finding
+ the first insn in MERGE that sets a register in TEST_USE, or uses
+ a register in TEST_SET. We also check for calls, trapping operations,
+ and memory references. */
+ max_to = NULL_RTX;
+ for (insn = from; ; insn = next)
+ {
+ if (CALL_P (insn))
+ break;
+ if (NOTE_P (insn) && NOTE_KIND (insn) == NOTE_INSN_EPILOGUE_BEG)
+ break;
+ if (NONDEBUG_INSN_P (insn))
+ {
+ if (may_trap_p (PATTERN (insn))
+ && (trapping_insns_in_across || other_branch_live != NULL))
+ break;
+
+ /* We cannot move memory stores past each other, or move memory
+ reads past stores, at least not without tracking them and
+ calling true_dependence on every pair.
+
+ If there is no other branch and no memory references or
+ sets in the ACROSS range, we can move memory references
+ freely, even volatile ones.
+
+ Otherwise, the rules are as follows: volatile memory
+ references and stores can't be moved at all, and any type
+ of memory reference can't be moved if there are volatile
+ accesses or stores in the ACROSS range. That leaves
+ normal reads, which can be moved, as the trapping case is
+ dealt with elsewhere. */
+ if (other_branch_live != NULL || memrefs_in_across != 0)
+ {
+ int mem_ref_flags = 0;
+ int mem_set_flags = 0;
+ note_stores (PATTERN (insn), find_memory_stores, &mem_set_flags);
+ mem_ref_flags = for_each_rtx (&PATTERN (insn), find_memory,
+ NULL);
+ /* Catch sets of the stack pointer. */
+ mem_ref_flags |= mem_set_flags;
+
+ if ((mem_ref_flags | mem_set_flags) & MEMREF_VOLATILE)
+ break;
+ if ((memrefs_in_across & MEMREF_VOLATILE) && mem_ref_flags != 0)
+ break;
+ if (mem_set_flags != 0
+ || (mem_sets_in_across != 0 && mem_ref_flags != 0))
+ break;
+ }
+ df_simulate_find_uses (insn, merge_use);
+ /* We're only interested in uses which use a value live at
+ the top, not one previously set in this block. */
+ bitmap_and_compl_into (merge_use, merge_set);
+ df_simulate_find_defs (insn, merge_set);
+ if (bitmap_intersect_p (merge_set, test_use)
+ || bitmap_intersect_p (merge_use, test_set))
+ break;
+ max_to = insn;
+ }
+ next = NEXT_INSN (insn);
+ if (insn == to)
+ break;
+ }
+ if (max_to != to)
+ fail = 1;
+
+ if (max_to == NULL_RTX || (fail && pmove_upto == NULL))
+ goto out;
+
+ /* Now, lower this upper bound by also taking into account that
+ a range of insns moved across ACROSS must not leave a register
+ live at the end that will be clobbered in ACROSS. We need to
+ find a point where TEST_SET & LIVE == 0.
+
+ Insns in the MERGE range that set registers which are also set
+ in the ACROSS range may still be moved as long as we also move
+ later insns which use the results of the set, and make the
+ register dead again. This is verified by the condition stated
+ above. We only need to test it for registers that are set in
+ the moved region.
+
+ MERGE_LIVE is provided by the caller and holds live registers after
+ TO. */
+ bitmap_copy (local_merge_live, merge_live);
+ for (insn = to; insn != max_to; insn = PREV_INSN (insn))
+ df_simulate_one_insn_backwards (merge_bb, insn, local_merge_live);
+
+ /* We're not interested in registers that aren't set in the moved
+ region at all. */
+ bitmap_and_into (local_merge_live, merge_set);
+ for (;;)
+ {
+ if (NONDEBUG_INSN_P (insn))
+ {
+ if (!bitmap_intersect_p (test_set, local_merge_live))
+ {
+ max_to = insn;
+ break;
+ }
+
+ df_simulate_one_insn_backwards (merge_bb, insn,
+ local_merge_live);
+ }
+ if (insn == from)
+ {
+ fail = 1;
+ goto out;
+ }
+ insn = PREV_INSN (insn);
+ }
+
+ if (max_to != to)
+ fail = 1;
+
+ if (pmove_upto)
+ *pmove_upto = max_to;
+
+ /* For small register class machines, don't lengthen lifetimes of
+ hard registers before reload. */
+ if (! reload_completed
+ && targetm.small_register_classes_for_mode_p (VOIDmode))
+ {
+ EXECUTE_IF_SET_IN_BITMAP (merge_set, 0, i, bi)
+ {
+ if (i < FIRST_PSEUDO_REGISTER
+ && ! fixed_regs[i]
+ && ! global_regs[i])
+ fail = 1;
+ }
+ }
+
+ out:
+ BITMAP_FREE (merge_set);
+ BITMAP_FREE (merge_use);
+ BITMAP_FREE (local_merge_live);
+ BITMAP_FREE (test_set);
+ BITMAP_FREE (test_use);
+
+ return !fail;
+}
/*----------------------------------------------------------------------------