aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorandreao <andreao@138bc75d-0d04-0410-961f-82ee72b054a4>2009-04-20 16:41:05 +0000
committerandreao <andreao@138bc75d-0d04-0410-961f-82ee72b054a4>2009-04-20 16:41:05 +0000
commit10b5c208be3d3e81d708deade45660884186840b (patch)
tree061626868454c71c7c93be4e174822f8df2ffbb1
parent3a5d771aad30cd87f79bebb9cefdafbf3411cc40 (diff)
merge from trunk revisions 127002-132392, 11/N
git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/branches/st/cli-be@146442 138bc75d-0d04-0410-961f-82ee72b054a4
-rw-r--r--gcc/df-core.c204
-rw-r--r--gcc/df-problems.c1493
-rw-r--r--gcc/df-scan.c107
-rw-r--r--gcc/testsuite/gcc.dg/vect/vect-106.c1
4 files changed, 328 insertions, 1477 deletions
diff --git a/gcc/df-core.c b/gcc/df-core.c
index 16e55b752ff..5404000ef39 100644
--- a/gcc/df-core.c
+++ b/gcc/df-core.c
@@ -71,7 +71,7 @@ USAGE:
Here is an example of using the dataflow routines.
- df_[ru,rd,urec,ri,chain]_add_problem (flags);
+ df_[chain,live,note,rd]_add_problem (flags);
df_set_blocks (blocks);
@@ -79,9 +79,9 @@ Here is an example of using the dataflow routines.
df_dump (stderr);
- df_finish_pass ();
+ df_finish_pass (false);
-DF_[ru,rd,urec,ri,chain]_ADD_PROBLEM adds a problem, defined by an
+DF_[chain,live,note,rd]_ADD_PROBLEM adds a problem, defined by an
instance to struct df_problem, to the set of problems solved in this
instance of df. All calls to add a problem for a given instance of df
must occur before the first call to DF_ANALYZE.
@@ -144,7 +144,7 @@ There are four ways of doing the incremental scanning:
For most modern rtl passes, this is certainly the easiest way to
manage rescanning the insns. This technique also has the advantage
that the scanning information is always correct and can be relied
- apon even after changes have been made to the instructions. This
+ upon even after changes have been made to the instructions. This
technique is contra indicated in several cases:
a) If def-use chains OR use-def chains (but not both) are built,
@@ -399,6 +399,7 @@ are write-only operations.
#include "timevar.h"
#include "df.h"
#include "tree-pass.h"
+#include "params.h"
static void *df_get_bb_info (struct dataflow *, unsigned int);
static void df_set_bb_info (struct dataflow *, unsigned int, void *);
@@ -628,12 +629,12 @@ df_remove_problem (struct dataflow *dflow)
}
-/* Remove all of the problems that are not permanent. Scanning, lr,
- ur and live are permanent, the rest are removable. Also clear all
- of the changeable_flags. */
+/* Remove all of the problems that are not permanent. Scanning, LR
+ and (at -O2 or higher) LIVE are permanent, the rest are removable.
+ Also clear all of the changeable_flags. */
void
-df_finish_pass (void)
+df_finish_pass (bool verify ATTRIBUTE_UNUSED)
{
int i;
int removed = 0;
@@ -694,6 +695,11 @@ df_finish_pass (void)
df_set_clean_cfg ();
#endif
#endif
+
+#ifdef ENABLE_CHECKING
+ if (verify)
+ df->changeable_flags |= DF_VERIFY_SCHEDULED;
+#endif
}
@@ -926,6 +932,105 @@ df_worklist_propagate_backward (struct dataflow *dataflow,
}
+
+/* This will free "pending". */
+static void
+df_worklist_dataflow_overeager (struct dataflow *dataflow,
+ bitmap pending,
+ sbitmap considered,
+ int *blocks_in_postorder,
+ unsigned *bbindex_to_postorder)
+{
+ enum df_flow_dir dir = dataflow->problem->dir;
+ int count = 0;
+
+ while (!bitmap_empty_p (pending))
+ {
+ unsigned bb_index;
+ int index;
+ count++;
+
+ index = bitmap_first_set_bit (pending);
+ bitmap_clear_bit (pending, index);
+
+ bb_index = blocks_in_postorder[index];
+
+ if (dir == DF_FORWARD)
+ df_worklist_propagate_forward (dataflow, bb_index,
+ bbindex_to_postorder,
+ pending, considered);
+ else
+ df_worklist_propagate_backward (dataflow, bb_index,
+ bbindex_to_postorder,
+ pending, considered);
+ }
+
+ BITMAP_FREE (pending);
+
+ /* Dump statistics. */
+ if (dump_file)
+ fprintf (dump_file, "df_worklist_dataflow_overeager:"
+ "n_basic_blocks %d n_edges %d"
+ " count %d (%5.2g)\n",
+ n_basic_blocks, n_edges,
+ count, count / (float)n_basic_blocks);
+}
+
+static void
+df_worklist_dataflow_doublequeue (struct dataflow *dataflow,
+ bitmap pending,
+ sbitmap considered,
+ int *blocks_in_postorder,
+ unsigned *bbindex_to_postorder)
+{
+ enum df_flow_dir dir = dataflow->problem->dir;
+ int dcount = 0;
+ bitmap worklist = BITMAP_ALLOC (&df_bitmap_obstack);
+
+ /* Double-queueing. Worklist is for the current iteration,
+ and pending is for the next. */
+ while (!bitmap_empty_p (pending))
+ {
+ /* Swap pending and worklist. */
+ bitmap temp = worklist;
+ worklist = pending;
+ pending = temp;
+
+ do
+ {
+ int index;
+ unsigned bb_index;
+ dcount++;
+
+ index = bitmap_first_set_bit (worklist);
+ bitmap_clear_bit (worklist, index);
+
+ bb_index = blocks_in_postorder[index];
+
+ if (dir == DF_FORWARD)
+ df_worklist_propagate_forward (dataflow, bb_index,
+ bbindex_to_postorder,
+ pending, considered);
+ else
+ df_worklist_propagate_backward (dataflow, bb_index,
+ bbindex_to_postorder,
+ pending, considered);
+ }
+ while (!bitmap_empty_p (worklist));
+ }
+
+ BITMAP_FREE (worklist);
+ BITMAP_FREE (pending);
+
+ /* Dump statistics. */
+ if (dump_file)
+ fprintf (dump_file, "df_worklist_dataflow_doublequeue:"
+ "n_basic_blocks %d n_edges %d"
+ " count %d (%5.2g)\n",
+ n_basic_blocks, n_edges,
+ dcount, dcount / (float)n_basic_blocks);
+}
+
/* Worklist-based dataflow solver. It uses sbitmap as a worklist,
with "n"-th bit representing the n-th block in the reverse-postorder order.
This is so-called over-eager algorithm where it propagates
@@ -937,7 +1042,14 @@ df_worklist_propagate_backward (struct dataflow *dataflow,
iterative algorithm by some margin overall.
Note that this is slightly different from the traditional textbook worklist solver,
in that the worklist is effectively sorted by the reverse postorder.
- For CFGs with no nested loops, this is optimal. */
+ For CFGs with no nested loops, this is optimal.
+
+ The overeager algorithm while works well for typical inputs,
+ it could degenerate into excessive iterations given CFGs with high loop nests
+ and unstructured loops. To cap the excessive iteration on such case,
+ we switch to double-queueing when the original algorithm seems to
+ get into such.
+ */
void
df_worklist_dataflow (struct dataflow *dataflow,
@@ -978,29 +1090,31 @@ df_worklist_dataflow (struct dataflow *dataflow,
bitmap_set_bit (pending, i);
}
+ /* Initialize the problem. */
if (dataflow->problem->init_fun)
dataflow->problem->init_fun (blocks_to_consider);
- while (!bitmap_empty_p (pending))
+ /* Solve it. Determine the solving algorithm
+ based on a simple heuristic. */
+ if (n_edges > PARAM_VALUE (PARAM_DF_DOUBLE_QUEUE_THRESHOLD_FACTOR)
+ * n_basic_blocks)
{
- unsigned bb_index;
-
- index = bitmap_first_set_bit (pending);
- bitmap_clear_bit (pending, index);
-
- bb_index = blocks_in_postorder[index];
-
- if (dir == DF_FORWARD)
- df_worklist_propagate_forward (dataflow, bb_index,
- bbindex_to_postorder,
- pending, considered);
- else
- df_worklist_propagate_backward (dataflow, bb_index,
- bbindex_to_postorder,
- pending, considered);
+ /* High average connectivity, meaning dense graph
+ with more likely deep nested loops
+ or unstructured loops. */
+ df_worklist_dataflow_doublequeue (dataflow, pending, considered,
+ blocks_in_postorder,
+ bbindex_to_postorder);
+ }
+ else
+ {
+ /* Most inputs fall into this case
+ with relatively flat or structured CFG. */
+ df_worklist_dataflow_overeager (dataflow, pending, considered,
+ blocks_in_postorder,
+ bbindex_to_postorder);
}
- BITMAP_FREE (pending);
sbitmap_free (considered);
free (bbindex_to_postorder);
}
@@ -1100,9 +1214,10 @@ df_analyze (void)
if (dump_file)
fprintf (dump_file, "df_analyze called\n");
-#ifdef ENABLE_DF_CHECKING
- df_verify ();
-#endif
+#ifndef ENABLE_DF_CHECKING
+ if (df->changeable_flags & DF_VERIFY_SCHEDULED)
+#endif
+ df_verify ();
for (i = 0; i < df->n_blocks; i++)
bitmap_set_bit (current_all_blocks, df->postorder[i]);
@@ -1509,9 +1624,11 @@ void
df_verify (void)
{
df_scan_verify ();
+#ifdef ENABLE_DF_CHECKING
df_lr_verify_transfer_functions ();
if (df_live)
df_live_verify_transfer_functions ();
+#endif
}
#ifdef DF_DEBUG_CFG
@@ -1753,6 +1870,7 @@ df_print_regset (FILE *file, bitmap r)
/* Dump dataflow info. */
+
void
df_dump (FILE *file)
{
@@ -1770,6 +1888,34 @@ df_dump (FILE *file)
}
+/* Dump dataflow info for df->blocks_to_analyze. */
+
+void
+df_dump_region (FILE *file)
+{
+ if (df->blocks_to_analyze)
+ {
+ bitmap_iterator bi;
+ unsigned int bb_index;
+
+ fprintf (file, "\n\nstarting region dump\n");
+ df_dump_start (file);
+
+ EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
+ {
+ basic_block bb = BASIC_BLOCK (bb_index);
+
+ df_print_bb_index (bb, file);
+ df_dump_top (bb, file);
+ df_dump_bottom (bb, file);
+ }
+ fprintf (file, "\n");
+ }
+ else
+ df_dump (file);
+}
+
+
/* Dump the introductory information for each problem defined. */
void
diff --git a/gcc/df-problems.c b/gcc/df-problems.c
index fa349633556..46aa9e03f4c 100644
--- a/gcc/df-problems.c
+++ b/gcc/df-problems.c
@@ -71,9 +71,7 @@ df_get_live_out (basic_block bb)
{
gcc_assert (df_lr);
- if (df_urec)
- return DF_RA_LIVE_OUT (bb);
- else if (df_live)
+ if (df_live)
return DF_LIVE_OUT (bb);
else
return DF_LR_OUT (bb);
@@ -89,31 +87,12 @@ df_get_live_in (basic_block bb)
{
gcc_assert (df_lr);
- if (df_urec)
- return DF_RA_LIVE_IN (bb);
- else if (df_live)
+ if (df_live)
return DF_LIVE_IN (bb);
else
return DF_LR_IN (bb);
}
-/* Get the live at top set for BB no matter what problem happens to be
- defined. This function is used by the register allocators who
- choose different dataflow problems depending on the optimization
- level. */
-
-bitmap
-df_get_live_top (basic_block bb)
-{
- gcc_assert (df_lr);
-
- if (df_urec)
- return DF_RA_LIVE_TOP (bb);
- else
- return DF_LR_TOP (bb);
-}
-
-
/*----------------------------------------------------------------------------
Utility functions.
----------------------------------------------------------------------------*/
@@ -202,24 +181,24 @@ df_unset_seen (void)
/*----------------------------------------------------------------------------
- REACHING USES
+ REACHING DEFINITIONS
- Find the locations in the function where each use site for a pseudo
- can reach backwards. In and out bitvectors are built for each basic
+ Find the locations in the function where each definition site for a
+ pseudo reaches. In and out bitvectors are built for each basic
block. The id field in the ref is used to index into these sets.
See df.h for details.
-
-----------------------------------------------------------------------------*/
+ ----------------------------------------------------------------------------*/
/* This problem plays a large number of games for the sake of
efficiency.
1) The order of the bits in the bitvectors. After the scanning
- phase, all of the uses are sorted. All of the uses for the reg 0
- are first, followed by all uses for reg 1 and so on.
+ phase, all of the defs are sorted. All of the defs for the reg 0
+ are first, followed by all defs for reg 1 and so on.
- 2) There are two kill sets, one if the number of uses is less or
- equal to DF_SPARSE_THRESHOLD and another if it is greater.
+ 2) There are two kill sets, one if the number of defs is less or
+ equal to DF_SPARSE_THRESHOLD and another if the number of defs is
+ greater.
<= : Data is built directly in the kill set.
@@ -235,517 +214,6 @@ df_unset_seen (void)
/* Private data used to compute the solution for this problem. These
data structures are not accessible outside of this module. */
-struct df_ru_problem_data
-{
- /* The set of defs to regs invalidated by call. */
- bitmap sparse_invalidated_by_call;
- /* The set of defs to regs invalidated by call for ru. */
- bitmap dense_invalidated_by_call;
- /* An obstack for the bitmaps we need for this problem. */
- bitmap_obstack ru_bitmaps;
-};
-
-/* Set basic block info. */
-
-static void
-df_ru_set_bb_info (unsigned int index, struct df_ru_bb_info *bb_info)
-{
- gcc_assert (df_ru);
- gcc_assert (index < df_ru->block_info_size);
- df_ru->block_info[index] = bb_info;
-}
-
-
-/* Free basic block info. */
-
-static void
-df_ru_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
- void *vbb_info)
-{
- struct df_ru_bb_info *bb_info = (struct df_ru_bb_info *) vbb_info;
- if (bb_info)
- {
- BITMAP_FREE (bb_info->kill);
- BITMAP_FREE (bb_info->sparse_kill);
- BITMAP_FREE (bb_info->gen);
- BITMAP_FREE (bb_info->in);
- BITMAP_FREE (bb_info->out);
- pool_free (df_ru->block_pool, bb_info);
- }
-}
-
-
-/* Allocate or reset bitmaps for DF_RU blocks. The solution bits are
- not touched unless the block is new. */
-
-static void
-df_ru_alloc (bitmap all_blocks)
-{
- unsigned int bb_index;
- bitmap_iterator bi;
- struct df_ru_problem_data *problem_data;
-
- if (!df_ru->block_pool)
- df_ru->block_pool = create_alloc_pool ("df_ru_block pool",
- sizeof (struct df_ru_bb_info), 50);
-
- if (df_ru->problem_data)
- {
- problem_data = (struct df_ru_problem_data *) df_ru->problem_data;
- bitmap_clear (problem_data->sparse_invalidated_by_call);
- bitmap_clear (problem_data->dense_invalidated_by_call);
- }
- else
- {
- problem_data = XNEW (struct df_ru_problem_data);
- df_ru->problem_data = problem_data;
-
- bitmap_obstack_initialize (&problem_data->ru_bitmaps);
- problem_data->sparse_invalidated_by_call
- = BITMAP_ALLOC (&problem_data->ru_bitmaps);
- problem_data->dense_invalidated_by_call
- = BITMAP_ALLOC (&problem_data->ru_bitmaps);
- }
-
- df_grow_bb_info (df_ru);
-
- /* Because of the clustering of all def sites for the same pseudo,
- we have to process all of the blocks before doing the
- analysis. */
-
- EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
- {
- struct df_ru_bb_info *bb_info = df_ru_get_bb_info (bb_index);
- if (bb_info)
- {
- bitmap_clear (bb_info->kill);
- bitmap_clear (bb_info->sparse_kill);
- bitmap_clear (bb_info->gen);
- }
- else
- {
- bb_info = (struct df_ru_bb_info *) pool_alloc (df_ru->block_pool);
- df_ru_set_bb_info (bb_index, bb_info);
- bb_info->kill = BITMAP_ALLOC (&problem_data->ru_bitmaps);
- bb_info->sparse_kill = BITMAP_ALLOC (&problem_data->ru_bitmaps);
- bb_info->gen = BITMAP_ALLOC (&problem_data->ru_bitmaps);
- bb_info->in = BITMAP_ALLOC (&problem_data->ru_bitmaps);
- bb_info->out = BITMAP_ALLOC (&problem_data->ru_bitmaps);
- }
- }
- df_ru->optional_p = true;
-}
-
-
-/* Process a list of DEFs for df_ru_bb_local_compute. */
-
-static void
-df_ru_bb_local_compute_process_def (struct df_ru_bb_info *bb_info,
- struct df_ref **def_rec,
- enum df_ref_flags top_flag)
-{
- while (*def_rec)
- {
- struct df_ref *def = *def_rec;
- if ((top_flag == (DF_REF_FLAGS (def) & DF_REF_AT_TOP))
- /* If the def is to only part of the reg, it is as if it did
- not happen, since some of the bits may get thru. */
- && (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL))))
- {
- unsigned int regno = DF_REF_REGNO (def);
- unsigned int begin = DF_USES_BEGIN (regno);
- unsigned int n_uses = DF_USES_COUNT (regno);
-
- if (!bitmap_bit_p (seen_in_block, regno))
- {
- /* The first def for regno in the insn, causes the kill
- info to be generated. Do not modify the gen set
- because the only values in it are the uses from here
- to the top of the block and this def does not effect
- them. */
- if (!bitmap_bit_p (seen_in_insn, regno))
- {
- if (n_uses > DF_SPARSE_THRESHOLD)
- bitmap_set_bit (bb_info->sparse_kill, regno);
- else
- bitmap_set_range (bb_info->kill, begin, n_uses);
- }
- bitmap_set_bit (seen_in_insn, regno);
- }
- }
- def_rec++;
- }
-}
-
-
-/* Process a list of USEs for df_ru_bb_local_compute. */
-
-static void
-df_ru_bb_local_compute_process_use (struct df_ru_bb_info *bb_info,
- struct df_ref **use_rec,
- enum df_ref_flags top_flag)
-{
- while (*use_rec)
- {
- struct df_ref *use = *use_rec;
- if (top_flag == (DF_REF_FLAGS (use) & DF_REF_AT_TOP))
- {
- /* Add use to set of gens in this BB unless we have seen a
- def in a previous instruction. */
- unsigned int regno = DF_REF_REGNO (use);
- if (!bitmap_bit_p (seen_in_block, regno))
- bitmap_set_bit (bb_info->gen, DF_REF_ID (use));
- }
- use_rec++;
- }
-}
-
-/* Compute local reaching use (upward exposed use) info for basic
- block BB. USE_INFO->REGS[R] caches the set of uses for register R. */
-static void
-df_ru_bb_local_compute (unsigned int bb_index)
-{
- basic_block bb = BASIC_BLOCK (bb_index);
- struct df_ru_bb_info *bb_info = df_ru_get_bb_info (bb_index);
- rtx insn;
-
- /* Set when a def for regno is seen. */
- bitmap_clear (seen_in_block);
- bitmap_clear (seen_in_insn);
-
-#ifdef EH_USES
- /* Variables defined in the prolog that are used by the exception
- handler. */
- df_ru_bb_local_compute_process_use (bb_info,
- df_get_artificial_uses (bb_index),
- DF_REF_AT_TOP);
-#endif
- df_ru_bb_local_compute_process_def (bb_info,
- df_get_artificial_defs (bb_index),
- DF_REF_AT_TOP);
-
- FOR_BB_INSNS (bb, insn)
- {
- unsigned int uid = INSN_UID (insn);
- if (!INSN_P (insn))
- continue;
-
- df_ru_bb_local_compute_process_use (bb_info,
- DF_INSN_UID_USES (uid), 0);
-
- if (df->changeable_flags & DF_EQ_NOTES)
- df_ru_bb_local_compute_process_use (bb_info,
- DF_INSN_UID_EQ_USES (uid), 0);
-
- df_ru_bb_local_compute_process_def (bb_info,
- DF_INSN_UID_DEFS (uid), 0);
-
- bitmap_ior_into (seen_in_block, seen_in_insn);
- bitmap_clear (seen_in_insn);
- }
-
- /* Process the hardware registers that are always live. */
- df_ru_bb_local_compute_process_use (bb_info,
- df_get_artificial_uses (bb_index), 0);
-
- df_ru_bb_local_compute_process_def (bb_info,
- df_get_artificial_defs (bb_index), 0);
-}
-
-
-/* Compute local reaching use (upward exposed use) info for each basic
- block within BLOCKS. */
-static void
-df_ru_local_compute (bitmap all_blocks)
-{
- unsigned int bb_index;
- bitmap_iterator bi;
- unsigned int regno;
- struct df_ru_problem_data *problem_data
- = (struct df_ru_problem_data *) df_ru->problem_data;
- bitmap sparse_invalidated = problem_data->sparse_invalidated_by_call;
- bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
-
- df_set_seen ();
-
- df_maybe_reorganize_use_refs (df->changeable_flags & DF_EQ_NOTES ?
- DF_REF_ORDER_BY_REG_WITH_NOTES : DF_REF_ORDER_BY_REG);
-
- EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
- {
- df_ru_bb_local_compute (bb_index);
- }
-
- /* Set up the knockout bit vectors to be applied across EH_EDGES. */
- EXECUTE_IF_SET_IN_BITMAP (df_invalidated_by_call, 0, regno, bi)
- {
- if (DF_USES_COUNT (regno) > DF_SPARSE_THRESHOLD)
- bitmap_set_bit (sparse_invalidated, regno);
- else
- bitmap_set_range (dense_invalidated,
- DF_USES_BEGIN (regno),
- DF_USES_COUNT (regno));
- }
-
- df_unset_seen ();
-}
-
-
-/* Initialize the solution bit vectors for problem. */
-
-static void
-df_ru_init_solution (bitmap all_blocks)
-{
- unsigned int bb_index;
- bitmap_iterator bi;
-
- EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
- {
- struct df_ru_bb_info *bb_info = df_ru_get_bb_info (bb_index);
- bitmap_copy (bb_info->in, bb_info->gen);
- bitmap_clear (bb_info->out);
- }
-}
-
-
-/* Out of target gets or of in of source. */
-
-static void
-df_ru_confluence_n (edge e)
-{
- bitmap op1 = df_ru_get_bb_info (e->src->index)->out;
- bitmap op2 = df_ru_get_bb_info (e->dest->index)->in;
-
- if (e->flags & EDGE_EH)
- {
- struct df_ru_problem_data *problem_data
- = (struct df_ru_problem_data *) df_ru->problem_data;
- bitmap sparse_invalidated = problem_data->sparse_invalidated_by_call;
- bitmap dense_invalidated = problem_data->dense_invalidated_by_call;
- bitmap_iterator bi;
- unsigned int regno;
- bitmap tmp = BITMAP_ALLOC (&df_bitmap_obstack);
-
- bitmap_copy (tmp, op2);
- bitmap_and_compl_into (tmp, dense_invalidated);
-
- EXECUTE_IF_SET_IN_BITMAP (sparse_invalidated, 0, regno, bi)
- {
- bitmap_clear_range (tmp,
- DF_USES_BEGIN (regno),
- DF_USES_COUNT (regno));
- }
- bitmap_ior_into (op1, tmp);
- BITMAP_FREE (tmp);
- }
- else
- bitmap_ior_into (op1, op2);
-}
-
-
-/* Transfer function. */
-
-static bool
-df_ru_transfer_function (int bb_index)
-{
- struct df_ru_bb_info *bb_info = df_ru_get_bb_info (bb_index);
- unsigned int regno;
- bitmap_iterator bi;
- bitmap in = bb_info->in;
- bitmap out = bb_info->out;
- bitmap gen = bb_info->gen;
- bitmap kill = bb_info->kill;
- bitmap sparse_kill = bb_info->sparse_kill;
-
- if (bitmap_empty_p (sparse_kill))
- return bitmap_ior_and_compl (in, gen, out, kill);
- else
- {
- struct df_ru_problem_data *problem_data;
- bitmap tmp;
- bool changed = false;
-
- /* Note that TMP is _not_ a temporary bitmap if we end up replacing
- IN with TMP. Therefore, allocate TMP in the RU bitmaps obstack. */
- problem_data = (struct df_ru_problem_data *) df_ru->problem_data;
- tmp = BITMAP_ALLOC (&problem_data->ru_bitmaps);
-
- bitmap_copy (tmp, out);
- EXECUTE_IF_SET_IN_BITMAP (sparse_kill, 0, regno, bi)
- {
- bitmap_clear_range (tmp,
- DF_USES_BEGIN (regno),
- DF_USES_COUNT (regno));
- }
- bitmap_and_compl_into (tmp, kill);
- bitmap_ior_into (tmp, gen);
- changed = !bitmap_equal_p (tmp, in);
- if (changed)
- {
- BITMAP_FREE (in);
- bb_info->in = tmp;
- }
- else
- BITMAP_FREE (tmp);
- return changed;
- }
-}
-
-
-/* Free all storage associated with the problem. */
-
-static void
-df_ru_free (void)
-{
- unsigned int i;
- struct df_ru_problem_data *problem_data
- = (struct df_ru_problem_data *) df_ru->problem_data;
-
- if (problem_data)
- {
- for (i = 0; i < df_ru->block_info_size; i++)
- {
- struct df_ru_bb_info *bb_info = df_ru_get_bb_info (i);
- if (bb_info)
- {
- BITMAP_FREE (bb_info->kill);
- BITMAP_FREE (bb_info->sparse_kill);
- BITMAP_FREE (bb_info->gen);
- BITMAP_FREE (bb_info->in);
- BITMAP_FREE (bb_info->out);
- }
- }
-
- free_alloc_pool (df_ru->block_pool);
- BITMAP_FREE (problem_data->sparse_invalidated_by_call);
- BITMAP_FREE (problem_data->dense_invalidated_by_call);
- bitmap_obstack_release (&problem_data->ru_bitmaps);
-
- df_ru->block_info_size = 0;
- free (df_ru->block_info);
- free (df_ru->problem_data);
- }
- free (df_ru);
-}
-
-
-/* Debugging info. */
-
-static void
-df_ru_start_dump (FILE *file)
-{
- struct df_ru_problem_data *problem_data
- = (struct df_ru_problem_data *) df_ru->problem_data;
- unsigned int m = DF_REG_SIZE(df);
- unsigned int regno;
-
- if (!df_ru->block_info)
- return;
-
- fprintf (file, ";; Reaching uses:\n");
-
- fprintf (file, ";; sparse invalidated \t");
- dump_bitmap (file, problem_data->sparse_invalidated_by_call);
- fprintf (file, " dense invalidated \t");
- dump_bitmap (file, problem_data->dense_invalidated_by_call);
-
- for (regno = 0; regno < m; regno++)
- if (DF_USES_COUNT (regno))
- fprintf (file, "%d[%d,%d] ", regno,
- DF_USES_BEGIN (regno),
- DF_USES_COUNT (regno));
- fprintf (file, "\n");
-}
-
-
-/* Debugging info at top of bb. */
-
-static void
-df_ru_top_dump (basic_block bb, FILE *file)
-{
- struct df_ru_bb_info *bb_info = df_ru_get_bb_info (bb->index);
- if (!bb_info || !bb_info->in)
- return;
-
- fprintf (file, ";; ru in \t(%d)\n", (int) bitmap_count_bits (bb_info->in));
- dump_bitmap (file, bb_info->in);
- fprintf (file, ";; ru gen \t(%d)\n", (int) bitmap_count_bits (bb_info->gen));
- dump_bitmap (file, bb_info->gen);
- fprintf (file, ";; ru kill\t(%d)\n", (int) bitmap_count_bits (bb_info->kill));
- dump_bitmap (file, bb_info->kill);
-}
-
-
-/* Debugging info at bottom of bb. */
-
-static void
-df_ru_bottom_dump (basic_block bb, FILE *file)
-{
- struct df_ru_bb_info *bb_info = df_ru_get_bb_info (bb->index);
- if (!bb_info || !bb_info->out)
- return;
-
- fprintf (file, ";; ru out \t(%d)\n", (int) bitmap_count_bits (bb_info->out));
- dump_bitmap (file, bb_info->out);
-}
-
-
-/* All of the information associated with every instance of the problem. */
-
-static struct df_problem problem_RU =
-{
- DF_RU, /* Problem id. */
- DF_BACKWARD, /* Direction. */
- df_ru_alloc, /* Allocate the problem specific data. */
- NULL, /* Reset global information. */
- df_ru_free_bb_info, /* Free basic block info. */
- df_ru_local_compute, /* Local compute function. */
- df_ru_init_solution, /* Init the solution specific data. */
- df_worklist_dataflow, /* Worklist solver. */
- NULL, /* Confluence operator 0. */
- df_ru_confluence_n, /* Confluence operator n. */
- df_ru_transfer_function, /* Transfer function. */
- NULL, /* Finalize function. */
- df_ru_free, /* Free all of the problem information. */
- df_ru_free, /* Remove this problem from the stack of dataflow problems. */
- df_ru_start_dump, /* Debugging. */
- df_ru_top_dump, /* Debugging start block. */
- df_ru_bottom_dump, /* Debugging end block. */
- NULL, /* Incremental solution verify start. */
- NULL, /* Incremental solution verify end. */
- NULL, /* Dependent problem. */
- TV_DF_RU, /* Timing variable. */
- true /* Reset blocks on dropping out of blocks_to_analyze. */
-};
-
-
-
-/* Create a new DATAFLOW instance and add it to an existing instance
- of DF. The returned structure is what is used to get at the
- solution. */
-
-void
-df_ru_add_problem (void)
-{
- df_add_problem (&problem_RU);
-}
-
-
-/*----------------------------------------------------------------------------
- REACHING DEFINITIONS
-
- Find the locations in the function where each definition site for a
- pseudo reaches. In and out bitvectors are built for each basic
- block. The id field in the ref is used to index into these sets.
- See df.h for details.
- ----------------------------------------------------------------------------*/
-
-/* See the comment at the top of the Reaching Uses problem for how the
- uses are represented in the kill sets. The same games are played
- here for the defs. */
-
-/* Private data used to compute the solution for this problem. These
- data structures are not accessible outside of this module. */
struct df_rd_problem_data
{
/* The set of defs to regs invalidated by call. */
@@ -1014,6 +482,9 @@ df_rd_confluence_n (edge e)
bitmap op1 = df_rd_get_bb_info (e->dest->index)->in;
bitmap op2 = df_rd_get_bb_info (e->src->index)->out;
+ if (e->flags & EDGE_FAKE)
+ return;
+
if (e->flags & EDGE_EH)
{
struct df_rd_problem_data *problem_data
@@ -1270,14 +741,6 @@ df_lr_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
{
BITMAP_FREE (bb_info->use);
BITMAP_FREE (bb_info->def);
- if (bb_info->in == bb_info->top)
- bb_info->top = NULL;
- else
- {
- BITMAP_FREE (bb_info->top);
- BITMAP_FREE (bb_info->ause);
- BITMAP_FREE (bb_info->adef);
- }
BITMAP_FREE (bb_info->in);
BITMAP_FREE (bb_info->out);
pool_free (df_lr->block_pool, bb_info);
@@ -1307,11 +770,6 @@ df_lr_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
{
bitmap_clear (bb_info->def);
bitmap_clear (bb_info->use);
- if (bb_info->adef)
- {
- bitmap_clear (bb_info->adef);
- bitmap_clear (bb_info->ause);
- }
}
else
{
@@ -1321,9 +779,6 @@ df_lr_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
bb_info->def = BITMAP_ALLOC (NULL);
bb_info->in = BITMAP_ALLOC (NULL);
bb_info->out = BITMAP_ALLOC (NULL);
- bb_info->top = bb_info->in;
- bb_info->adef = NULL;
- bb_info->ause = NULL;
}
}
@@ -1345,7 +800,6 @@ df_lr_reset (bitmap all_blocks)
gcc_assert (bb_info);
bitmap_clear (bb_info->in);
bitmap_clear (bb_info->out);
- bitmap_clear (bb_info->top);
}
}
@@ -1389,53 +843,16 @@ df_lr_bb_local_compute (unsigned int bb_index)
if (!INSN_P (insn))
continue;
- if (CALL_P (insn))
+ for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
- for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
+ struct df_ref *def = *def_rec;
+ /* If the def is to only part of the reg, it does
+ not kill the other defs that reach here. */
+ if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
{
- struct df_ref *def = *def_rec;
unsigned int dregno = DF_REF_REGNO (def);
-
- if (DF_REF_FLAGS (def) & DF_REF_MUST_CLOBBER)
- {
- if (dregno >= FIRST_PSEUDO_REGISTER
- || !(SIBLING_CALL_P (insn)
- && bitmap_bit_p (df->exit_block_uses, dregno)
- && !refers_to_regno_p (dregno, dregno+1,
- current_function_return_rtx,
- (rtx *)0)))
- {
- /* If the def is to only part of the reg, it does
- not kill the other defs that reach here. */
- if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
- {
- bitmap_set_bit (bb_info->def, dregno);
- bitmap_clear_bit (bb_info->use, dregno);
- }
- }
- }
- else
- /* This is the return value. */
- if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
- {
- bitmap_set_bit (bb_info->def, dregno);
- bitmap_clear_bit (bb_info->use, dregno);
- }
- }
- }
- else
- {
- for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
- {
- struct df_ref *def = *def_rec;
- /* If the def is to only part of the reg, it does
- not kill the other defs that reach here. */
- if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
- {
- unsigned int dregno = DF_REF_REGNO (def);
- bitmap_set_bit (bb_info->def, dregno);
- bitmap_clear_bit (bb_info->use, dregno);
- }
+ bitmap_set_bit (bb_info->def, dregno);
+ bitmap_clear_bit (bb_info->use, dregno);
}
}
@@ -1446,23 +863,18 @@ df_lr_bb_local_compute (unsigned int bb_index)
bitmap_set_bit (bb_info->use, DF_REF_REGNO (use));
}
}
- /* Process the registers set in an exception handler. */
+
+ /* Process the registers set in an exception handler or the hard
+ frame pointer if this block is the target of a non local
+ goto. */
for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
{
struct df_ref *def = *def_rec;
- if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP)
- && (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL))))
+ if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
{
unsigned int dregno = DF_REF_REGNO (def);
- if (bb_info->adef == NULL)
- {
- gcc_assert (bb_info->ause == NULL);
- gcc_assert (bb_info->top == bb_info->in);
- bb_info->adef = BITMAP_ALLOC (NULL);
- bb_info->ause = BITMAP_ALLOC (NULL);
- bb_info->top = BITMAP_ALLOC (NULL);
- }
- bitmap_set_bit (bb_info->adef, dregno);
+ bitmap_set_bit (bb_info->def, dregno);
+ bitmap_clear_bit (bb_info->use, dregno);
}
}
@@ -1473,17 +885,7 @@ df_lr_bb_local_compute (unsigned int bb_index)
struct df_ref *use = *use_rec;
/* Add use to set of uses in this BB. */
if (DF_REF_FLAGS (use) & DF_REF_AT_TOP)
- {
- if (bb_info->adef == NULL)
- {
- gcc_assert (bb_info->ause == NULL);
- gcc_assert (bb_info->top == bb_info->in);
- bb_info->adef = BITMAP_ALLOC (NULL);
- bb_info->ause = BITMAP_ALLOC (NULL);
- bb_info->top = BITMAP_ALLOC (NULL);
- }
- bitmap_set_bit (bb_info->ause, DF_REF_REGNO (use));
- }
+ bitmap_set_bit (bb_info->use, DF_REF_REGNO (use));
}
#endif
@@ -1608,26 +1010,15 @@ df_lr_transfer_function (int bb_index)
bitmap out = bb_info->out;
bitmap use = bb_info->use;
bitmap def = bb_info->def;
- bitmap top = bb_info->top;
- bitmap ause = bb_info->ause;
- bitmap adef = bb_info->adef;
- bool changed;
- changed = bitmap_ior_and_compl (top, use, out, def);
- if (in != top)
- {
- gcc_assert (ause && adef);
- changed |= bitmap_ior_and_compl (in, ause, top, adef);
- }
-
- return changed;
+ return bitmap_ior_and_compl (in, use, out, def);
}
/* Run the fast dce as a side effect of building LR. */
static void
-df_lr_local_finalize (bitmap all_blocks ATTRIBUTE_UNUSED)
+df_lr_finalize (bitmap all_blocks ATTRIBUTE_UNUSED)
{
if (df->changeable_flags & DF_LR_RUN_DCE)
{
@@ -1664,14 +1055,6 @@ df_lr_free (void)
{
BITMAP_FREE (bb_info->use);
BITMAP_FREE (bb_info->def);
- if (bb_info->in == bb_info->top)
- bb_info->top = NULL;
- else
- {
- BITMAP_FREE (bb_info->top);
- BITMAP_FREE (bb_info->ause);
- BITMAP_FREE (bb_info->adef);
- }
BITMAP_FREE (bb_info->in);
BITMAP_FREE (bb_info->out);
}
@@ -1781,7 +1164,7 @@ df_lr_verify_solution_end (void)
if (df_lr->solutions_dirty)
/* Do not check if the solution is still dirty. See the comment
- in df_lr_local_finalize for details. */
+ in df_lr_finalize for details. */
df_lr->solutions_dirty = false;
else
FOR_ALL_BB (bb)
@@ -1824,7 +1207,7 @@ static struct df_problem problem_LR =
df_lr_confluence_0, /* Confluence operator 0. */
df_lr_confluence_n, /* Confluence operator n. */
df_lr_transfer_function, /* Transfer function. */
- df_lr_local_finalize, /* Finalize function. */
+ df_lr_finalize, /* Finalize function. */
df_lr_free, /* Free all of the problem information. */
NULL, /* Remove this problem from the stack of dataflow problems. */
NULL, /* Debugging. */
@@ -1864,7 +1247,6 @@ df_lr_verify_transfer_functions (void)
bitmap saved_adef;
bitmap saved_ause;
bitmap all_blocks;
- bool need_as;
if (!df)
return;
@@ -1893,33 +1275,9 @@ df_lr_verify_transfer_functions (void)
bitmap_clear (bb_info->def);
bitmap_clear (bb_info->use);
- if (bb_info->adef)
- {
- need_as = true;
- bitmap_copy (saved_adef, bb_info->adef);
- bitmap_copy (saved_ause, bb_info->ause);
- bitmap_clear (bb_info->adef);
- bitmap_clear (bb_info->ause);
- }
- else
- need_as = false;
-
df_lr_bb_local_compute (bb->index);
gcc_assert (bitmap_equal_p (saved_def, bb_info->def));
gcc_assert (bitmap_equal_p (saved_use, bb_info->use));
-
- if (need_as)
- {
- gcc_assert (bb_info->adef);
- gcc_assert (bb_info->ause);
- gcc_assert (bitmap_equal_p (saved_adef, bb_info->adef));
- gcc_assert (bitmap_equal_p (saved_ause, bb_info->ause));
- }
- else
- {
- gcc_assert (!bb_info->adef);
- gcc_assert (!bb_info->ause);
- }
}
}
else
@@ -1949,16 +1307,22 @@ df_lr_verify_transfer_functions (void)
/*----------------------------------------------------------------------------
- COMBINED LIVE REGISTERS AND UNINITIALIZED REGISTERS.
-
- First find the set of uses for registers that are reachable from
- the entry block without passing thru a definition. In and out
- bitvectors are built for each basic block. The regnum is used to
- index into these sets. See df.h for details.
-
- Then the in and out sets here are the anded results of the in and
- out sets from the lr and ur
- problems.
+ LIVE AND MUST-INITIALIZED REGISTERS.
+
+ This problem first computes the IN and OUT bitvectors for the
+ must-initialized registers problems, which is a forward problem.
+ It gives the set of registers for which we MUST have an available
+ definition on any path from the entry block to the entry/exit of
+ a basic block. Sets generate a definition, while clobbers kill
+ a definition.
+
+ In and out bitvectors are built for each basic block and are indexed by
+ regnum (see df.h for details). In and out bitvectors in struct
+ df_live_bb_info actually refers to the must-initialized problem;
+
+ Then, the in and out sets for the LIVE problem itself are computed.
+ These are the logical AND of the IN and OUT sets from the LR problem
+ and the must-initialized problem.
----------------------------------------------------------------------------*/
/* Private data used to verify the solution for this problem. */
@@ -1968,6 +1332,10 @@ struct df_live_problem_data
bitmap *out;
};
+/* Scratch var used by transfer functions. This is used to implement
+ an optimization to reduce the amount of space used to compute the
+ combined lr and live analysis. */
+static bitmap df_live_scratch;
/* Set basic block info. */
@@ -2011,6 +1379,8 @@ df_live_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
if (!df_live->block_pool)
df_live->block_pool = create_alloc_pool ("df_live_block pool",
sizeof (struct df_live_bb_info), 100);
+ if (!df_live_scratch)
+ df_live_scratch = BITMAP_ALLOC (NULL);
df_grow_bb_info (df_live);
@@ -2046,7 +1416,7 @@ df_live_reset (bitmap all_blocks)
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
{
- struct df_lr_bb_info *bb_info = df_lr_get_bb_info (bb_index);
+ struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
gcc_assert (bb_info);
bitmap_clear (bb_info->in);
bitmap_clear (bb_info->out);
@@ -2065,13 +1435,6 @@ df_live_bb_local_compute (unsigned int bb_index)
struct df_ref **def_rec;
int luid = 0;
- for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
- {
- struct df_ref *def = *def_rec;
- if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
- bitmap_set_bit (bb_info->gen, DF_REF_REGNO (def));
- }
-
FOR_BB_INSNS (bb, insn)
{
unsigned int uid = INSN_UID (insn);
@@ -2112,8 +1475,7 @@ df_live_bb_local_compute (unsigned int bb_index)
for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
{
struct df_ref *def = *def_rec;
- if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
- bitmap_set_bit (bb_info->gen, DF_REF_REGNO (def));
+ bitmap_set_bit (bb_info->gen, DF_REF_REGNO (def));
}
}
@@ -2149,13 +1511,16 @@ df_live_init (bitmap all_blocks)
EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
{
struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
+ struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (bb_index);
- bitmap_copy (bb_info->out, bb_info->gen);
+ /* No register may reach a location where it is not used. Thus
+ we trim the rr result to the places where it is used. */
+ bitmap_and (bb_info->out, bb_info->gen, bb_lr_info->out);
bitmap_clear (bb_info->in);
}
}
-/* Confluence function that ignores fake edges. */
+/* Forward confluence function that ignores fake edges. */
static void
df_live_confluence_n (edge e)
@@ -2170,25 +1535,35 @@ df_live_confluence_n (edge e)
}
-/* Transfer function. */
+/* Transfer function for the forwards must-initialized problem. */
static bool
df_live_transfer_function (int bb_index)
{
struct df_live_bb_info *bb_info = df_live_get_bb_info (bb_index);
+ struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (bb_index);
bitmap in = bb_info->in;
bitmap out = bb_info->out;
bitmap gen = bb_info->gen;
bitmap kill = bb_info->kill;
- return bitmap_ior_and_compl (out, gen, in, kill);
+ /* We need to use a scratch set here so that the value returned from
+ this function invocation properly reflects if the sets changed in
+ a significant way; i.e. not just because the lr set was anded
+ in. */
+ bitmap_and (df_live_scratch, gen, bb_lr_info->out);
+ /* No register may reach a location where it is not used. Thus
+ we trim the rr result to the places where it is used. */
+ bitmap_and_into (in, bb_lr_info->in);
+
+ return bitmap_ior_and_compl (out, df_live_scratch, in, kill);
}
-/* And the LR and UR info to produce the LIVE info. */
+/* And the LR info with the must-initialized registers, to produce the LIVE info. */
static void
-df_live_local_finalize (bitmap all_blocks)
+df_live_finalize (bitmap all_blocks)
{
if (df_live->solutions_dirty)
@@ -2200,7 +1575,7 @@ df_live_local_finalize (bitmap all_blocks)
{
struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (bb_index);
struct df_live_bb_info *bb_live_info = df_live_get_bb_info (bb_index);
-
+
/* No register may reach a location where it is not used. Thus
we trim the rr result to the places where it is used. */
bitmap_and_into (bb_live_info->in, bb_lr_info->in);
@@ -2236,6 +1611,9 @@ df_live_free (void)
free_alloc_pool (df_live->block_pool);
df_live->block_info_size = 0;
free (df_live->block_info);
+
+ if (df_live_scratch)
+ BITMAP_FREE (df_live_scratch);
}
BITMAP_FREE (df_live->out_of_date_transfer_functions);
free (df_live);
@@ -2376,7 +1754,7 @@ static struct df_problem problem_LIVE =
NULL, /* Confluence operator 0. */
df_live_confluence_n, /* Confluence operator n. */
df_live_transfer_function, /* Transfer function. */
- df_live_local_finalize, /* Finalize function. */
+ df_live_finalize, /* Finalize function. */
df_live_free, /* Free all of the problem information. */
df_live_free, /* Remove this problem from the stack of dataflow problems. */
NULL, /* Debugging. */
@@ -2480,615 +1858,6 @@ df_live_verify_transfer_functions (void)
BITMAP_FREE (saved_kill);
BITMAP_FREE (all_blocks);
}
-
-
-
-/*----------------------------------------------------------------------------
- UNINITIALIZED REGISTERS WITH EARLYCLOBBER
-
- Find the set of uses for registers that are reachable from the entry
- block without passing thru a definition. In and out bitvectors are built
- for each basic block. The regnum is used to index into these sets.
- See df.h for details.
-
- This is a variant of the UR problem above that has a lot of special
- features just for the register allocation phase. This problem
- should go away if someone would fix the interference graph.
-
- ----------------------------------------------------------------------------*/
-
-/* Private data used to compute the solution for this problem. These
- data structures are not accessible outside of this module. */
-struct df_urec_problem_data
-{
- bool earlyclobbers_found; /* True if any instruction contains an
- earlyclobber. */
-#ifdef STACK_REGS
- bitmap stack_regs; /* Registers that may be allocated to a STACK_REGS. */
-#endif
-};
-
-
-/* Set basic block info. */
-
-static void
-df_urec_set_bb_info (unsigned int index,
- struct df_urec_bb_info *bb_info)
-{
- gcc_assert (df_urec);
- gcc_assert (index < df_urec->block_info_size);
- df_urec->block_info[index] = bb_info;
-}
-
-
-/* Free basic block info. */
-
-static void
-df_urec_free_bb_info (basic_block bb ATTRIBUTE_UNUSED,
- void *vbb_info)
-{
- struct df_urec_bb_info *bb_info = (struct df_urec_bb_info *) vbb_info;
- if (bb_info)
- {
- BITMAP_FREE (bb_info->gen);
- BITMAP_FREE (bb_info->kill);
- BITMAP_FREE (bb_info->in);
- BITMAP_FREE (bb_info->out);
- BITMAP_FREE (bb_info->earlyclobber);
- pool_free (df_urec->block_pool, bb_info);
- }
-}
-
-
-/* Allocate or reset bitmaps for DF_UREC blocks. The solution bits are
- not touched unless the block is new. */
-
-static void
-df_urec_alloc (bitmap all_blocks)
-
-{
- unsigned int bb_index;
- bitmap_iterator bi;
- struct df_urec_problem_data *problem_data
- = (struct df_urec_problem_data *) df_urec->problem_data;
-
- if (!df_urec->block_pool)
- df_urec->block_pool = create_alloc_pool ("df_urec_block pool",
- sizeof (struct df_urec_bb_info), 50);
-
- if (!df_urec->problem_data)
- {
- problem_data = XNEW (struct df_urec_problem_data);
- df_urec->problem_data = problem_data;
- }
- problem_data->earlyclobbers_found = false;
-
- df_grow_bb_info (df_urec);
-
- EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
- {
- struct df_urec_bb_info *bb_info = df_urec_get_bb_info (bb_index);
- if (bb_info)
- {
- bitmap_clear (bb_info->kill);
- bitmap_clear (bb_info->gen);
- bitmap_clear (bb_info->earlyclobber);
- }
- else
- {
- bb_info = (struct df_urec_bb_info *) pool_alloc (df_urec->block_pool);
- df_urec_set_bb_info (bb_index, bb_info);
- bb_info->kill = BITMAP_ALLOC (NULL);
- bb_info->gen = BITMAP_ALLOC (NULL);
- bb_info->in = BITMAP_ALLOC (NULL);
- bb_info->out = BITMAP_ALLOC (NULL);
- bb_info->top = BITMAP_ALLOC (NULL);
- bb_info->earlyclobber = BITMAP_ALLOC (NULL);
- }
- }
- df_urec->optional_p = true;
-}
-
-
-/* The function modifies local info for register REG being changed in
- SETTER. DATA is used to pass the current basic block info. */
-
-static void
-df_urec_mark_reg_change (rtx reg, rtx setter, void *data)
-{
- int regno;
- int endregno;
- int i;
- struct df_urec_bb_info *bb_info = (struct df_urec_bb_info*) data;
-
- if (GET_CODE (reg) == SUBREG)
- reg = SUBREG_REG (reg);
-
- if (!REG_P (reg))
- return;
-
- regno = REGNO (reg);
- if (regno < FIRST_PSEUDO_REGISTER)
- {
- endregno = END_HARD_REGNO (reg);
- for (i = regno; i < endregno; i++)
- {
- bitmap_set_bit (bb_info->kill, i);
-
- if (GET_CODE (setter) != CLOBBER)
- bitmap_set_bit (bb_info->gen, i);
- else
- bitmap_clear_bit (bb_info->gen, i);
- }
- }
- else
- {
- bitmap_set_bit (bb_info->kill, regno);
-
- if (GET_CODE (setter) != CLOBBER)
- bitmap_set_bit (bb_info->gen, regno);
- else
- bitmap_clear_bit (bb_info->gen, regno);
- }
-}
-/* Classes of registers which could be early clobbered in the current
- insn. */
-
-static VEC(int,heap) *earlyclobber_regclass;
-
-/* This function finds and stores register classes that could be early
- clobbered in INSN. If any earlyclobber classes are found, the function
- returns TRUE, in all other cases it returns FALSE. */
-
-static bool
-df_urec_check_earlyclobber (rtx insn)
-{
- int opno;
- bool found = false;
-
- extract_insn (insn);
-
- VEC_truncate (int, earlyclobber_regclass, 0);
- for (opno = 0; opno < recog_data.n_operands; opno++)
- {
- char c;
- bool amp_p;
- int i;
- enum reg_class class;
- const char *p = recog_data.constraints[opno];
-
- class = NO_REGS;
- amp_p = false;
- for (;;)
- {
- c = *p;
- switch (c)
- {
- case '=': case '+': case '?':
- case '#': case '!':
- case '*': case '%':
- case 'm': case '<': case '>': case 'V': case 'o':
- case 'E': case 'F': case 'G': case 'H':
- case 's': case 'i': case 'n':
- case 'I': case 'J': case 'K': case 'L':
- case 'M': case 'N': case 'O': case 'P':
- case 'X':
- case '0': case '1': case '2': case '3': case '4':
- case '5': case '6': case '7': case '8': case '9':
- /* These don't say anything we care about. */
- break;
-
- case '&':
- amp_p = true;
- break;
- case '\0':
- case ',':
- if (amp_p && class != NO_REGS)
- {
- int rc;
-
- found = true;
- for (i = 0;
- VEC_iterate (int, earlyclobber_regclass, i, rc);
- i++)
- {
- if (rc == (int) class)
- goto found_rc;
- }
-
- /* We use VEC_quick_push here because
- earlyclobber_regclass holds no more than
- N_REG_CLASSES elements. */
- VEC_quick_push (int, earlyclobber_regclass, (int) class);
- found_rc:
- ;
- }
-
- amp_p = false;
- class = NO_REGS;
- break;
-
- case 'r':
- class = GENERAL_REGS;
- break;
-
- default:
- class = REG_CLASS_FROM_CONSTRAINT (c, p);
- break;
- }
- if (c == '\0')
- break;
- p += CONSTRAINT_LEN (c, p);
- }
- }
-
- return found;
-}
-
-/* The function checks that pseudo-register *X has a class
- intersecting with the class of pseudo-register could be early
- clobbered in the same insn.
-
- This function is a no-op if earlyclobber_regclass is empty.
-
- Reload can assign the same hard register to uninitialized
- pseudo-register and early clobbered pseudo-register in an insn if
- the pseudo-register is used first time in given BB and not lived at
- the BB start. To prevent this we don't change life information for
- such pseudo-registers. */
-
-static int
-df_urec_mark_reg_use_for_earlyclobber (rtx *x, void *data)
-{
- enum reg_class pref_class, alt_class;
- int i, regno;
- struct df_urec_bb_info *bb_info = (struct df_urec_bb_info*) data;
-
- if (REG_P (*x) && REGNO (*x) >= FIRST_PSEUDO_REGISTER)
- {
- int rc;
-
- regno = REGNO (*x);
- if (bitmap_bit_p (bb_info->kill, regno)
- || bitmap_bit_p (bb_info->gen, regno))
- return 0;
- pref_class = reg_preferred_class (regno);
- alt_class = reg_alternate_class (regno);
- for (i = 0; VEC_iterate (int, earlyclobber_regclass, i, rc); i++)
- {
- if (reg_classes_intersect_p (rc, pref_class)
- || (rc != NO_REGS
- && reg_classes_intersect_p (rc, alt_class)))
- {
- bitmap_set_bit (bb_info->earlyclobber, regno);
- break;
- }
- }
- }
- return 0;
-}
-
-/* The function processes all pseudo-registers in *X with the aid of
- previous function. */
-
-static void
-df_urec_mark_reg_use_for_earlyclobber_1 (rtx *x, void *data)
-{
- for_each_rtx (x, df_urec_mark_reg_use_for_earlyclobber, data);
-}
-
-
-/* Compute local uninitialized register info for basic block BB. */
-
-static void
-df_urec_bb_local_compute (unsigned int bb_index)
-{
- basic_block bb = BASIC_BLOCK (bb_index);
- struct df_urec_bb_info *bb_info = df_urec_get_bb_info (bb_index);
- rtx insn;
- struct df_ref **def_rec;
-
- for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
- {
- struct df_ref *def = *def_rec;
- if (DF_REF_FLAGS (def) & DF_REF_AT_TOP)
- {
- unsigned int regno = DF_REF_REGNO (def);
- bitmap_set_bit (bb_info->gen, regno);
- }
- }
-
- FOR_BB_INSNS (bb, insn)
- {
- if (INSN_P (insn))
- {
- note_stores (PATTERN (insn), df_urec_mark_reg_change, bb_info);
- if (df_urec_check_earlyclobber (insn))
- {
- struct df_urec_problem_data *problem_data
- = (struct df_urec_problem_data *) df_urec->problem_data;
- problem_data->earlyclobbers_found = true;
- note_uses (&PATTERN (insn),
- df_urec_mark_reg_use_for_earlyclobber_1, bb_info);
- }
- }
- }
-
- for (def_rec = df_get_artificial_defs (bb_index); *def_rec; def_rec++)
- {
- struct df_ref *def = *def_rec;
- if ((DF_REF_FLAGS (def) & DF_REF_AT_TOP) == 0)
- {
- unsigned int regno = DF_REF_REGNO (def);
- bitmap_set_bit (bb_info->gen, regno);
- }
- }
-}
-
-
-/* Compute local uninitialized register info. */
-
-static void
-df_urec_local_compute (bitmap all_blocks)
-{
- unsigned int bb_index;
- bitmap_iterator bi;
-#ifdef STACK_REGS
- int i;
- HARD_REG_SET stack_hard_regs, used;
- struct df_urec_problem_data *problem_data
- = (struct df_urec_problem_data *) df_urec->problem_data;
-
- /* Any register that MAY be allocated to a register stack (like the
- 387) is treated poorly. Each such register is marked as being
- live everywhere. This keeps the register allocator and the
- subsequent passes from doing anything useful with these values.
-
- FIXME: This seems like an incredibly poor idea. */
-
- CLEAR_HARD_REG_SET (stack_hard_regs);
- for (i = FIRST_STACK_REG; i <= LAST_STACK_REG; i++)
- SET_HARD_REG_BIT (stack_hard_regs, i);
- problem_data->stack_regs = BITMAP_ALLOC (NULL);
- for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
- {
- COPY_HARD_REG_SET (used, reg_class_contents[reg_preferred_class (i)]);
- IOR_HARD_REG_SET (used, reg_class_contents[reg_alternate_class (i)]);
- AND_HARD_REG_SET (used, stack_hard_regs);
- if (!hard_reg_set_empty_p (used))
- bitmap_set_bit (problem_data->stack_regs, i);
- }
-#endif
-
- /* We know that earlyclobber_regclass holds no more than
- N_REG_CLASSES elements. See df_urec_check_earlyclobber. */
- earlyclobber_regclass = VEC_alloc (int, heap, N_REG_CLASSES);
-
- EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
- {
- df_urec_bb_local_compute (bb_index);
- }
-
- VEC_free (int, heap, earlyclobber_regclass);
-}
-
-
-/* Initialize the solution vectors. */
-
-static void
-df_urec_init (bitmap all_blocks)
-{
- unsigned int bb_index;
- bitmap_iterator bi;
-
- EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
- {
- struct df_urec_bb_info *bb_info = df_urec_get_bb_info (bb_index);
-
- bitmap_copy (bb_info->out, bb_info->gen);
- bitmap_clear (bb_info->in);
- }
-}
-
-
-/* Or in the stack regs, hard regs and early clobber regs into the
- urec_in sets of all of the blocks. */
-
-
-static void
-df_urec_local_finalize (bitmap all_blocks)
-{
- bitmap tmp = BITMAP_ALLOC (NULL);
- bitmap_iterator bi;
- unsigned int bb_index;
- struct df_urec_problem_data *problem_data
- = (struct df_urec_problem_data *) df_urec->problem_data;
-
- EXECUTE_IF_SET_IN_BITMAP (all_blocks, 0, bb_index, bi)
- {
- struct df_urec_bb_info *bb_info = df_urec_get_bb_info (bb_index);
- struct df_lr_bb_info *bb_lr_info = df_lr_get_bb_info (bb_index);
-
- if (bb_index != ENTRY_BLOCK && bb_index != EXIT_BLOCK)
- {
- if (problem_data->earlyclobbers_found)
- bitmap_ior_into (bb_info->in, bb_info->earlyclobber);
-
-#ifdef STACK_REGS
- /* We can not use the same stack register for uninitialized
- pseudo-register and another living pseudo-register
- because if the uninitialized pseudo-register dies,
- subsequent pass reg-stack will be confused (it will
- believe that the other register dies). */
- bitmap_ior_into (bb_info->in, problem_data->stack_regs);
- bitmap_ior_into (bb_info->out, problem_data->stack_regs);
-#endif
- }
-
- /* No register may reach a location where it is not used. Thus
- we trim the rr result to the places where it is used. */
- bitmap_and_into (bb_info->in, bb_lr_info->in);
- bitmap_and_into (bb_info->out, bb_lr_info->out);
- bitmap_copy (bb_info->top, bb_info->in);
- if (bb_lr_info->adef)
- bitmap_ior_into (bb_info->top, bb_lr_info->adef);
- bitmap_and_into (bb_info->top, bb_lr_info->top);
-#if 0
- /* Hard registers may still stick in the ur_out set, but not
- be in the ur_in set, if their only mention was in a call
- in this block. This is because a call kills in the lr
- problem but does not kill in the rr problem. To clean
- this up, we execute the transfer function on the lr_in
- set and then use that to knock bits out of ur_out. */
- bitmap_ior_and_compl (tmp, bb_info->gen, bb_lr_info->in,
- bb_info->kill);
- bitmap_and_into (bb_info->out, tmp);
-#endif
- }
-
-#ifdef STACK_REGS
- BITMAP_FREE (problem_data->stack_regs);
-#endif
- BITMAP_FREE (tmp);
-}
-
-
-/* Confluence function that ignores fake edges. */
-
-static void
-df_urec_confluence_n (edge e)
-{
- bitmap op1 = df_urec_get_bb_info (e->dest->index)->in;
- bitmap op2 = df_urec_get_bb_info (e->src->index)->out;
-
- if (e->flags & EDGE_FAKE)
- return;
-
- bitmap_ior_into (op1, op2);
-}
-
-
-/* Transfer function. */
-
-static bool
-df_urec_transfer_function (int bb_index)
-{
- struct df_urec_bb_info *bb_info = df_urec_get_bb_info (bb_index);
- bitmap in = bb_info->in;
- bitmap out = bb_info->out;
- bitmap gen = bb_info->gen;
- bitmap kill = bb_info->kill;
-
- return bitmap_ior_and_compl (out, gen, in, kill);
-}
-
-
-/* Free all storage associated with the problem. */
-
-static void
-df_urec_free (void)
-{
- if (df_urec->block_info)
- {
- unsigned int i;
-
- for (i = 0; i < df_urec->block_info_size; i++)
- {
- struct df_urec_bb_info *bb_info = df_urec_get_bb_info (i);
- if (bb_info)
- {
- BITMAP_FREE (bb_info->gen);
- BITMAP_FREE (bb_info->kill);
- BITMAP_FREE (bb_info->in);
- BITMAP_FREE (bb_info->out);
- BITMAP_FREE (bb_info->earlyclobber);
- BITMAP_FREE (bb_info->top);
- }
- }
-
- free_alloc_pool (df_urec->block_pool);
-
- df_urec->block_info_size = 0;
- free (df_urec->block_info);
- free (df_urec->problem_data);
- }
- free (df_urec);
-}
-
-
-/* Debugging info at top of bb. */
-
-static void
-df_urec_top_dump (basic_block bb, FILE *file)
-{
- struct df_urec_bb_info *bb_info = df_urec_get_bb_info (bb->index);
- if (!bb_info || !bb_info->in)
- return;
-
- fprintf (file, ";; urec in \t");
- df_print_regset (file, bb_info->in);
- fprintf (file, ";; urec gen \t");
- df_print_regset (file, bb_info->gen);
- fprintf (file, ";; urec kill\t");
- df_print_regset (file, bb_info->kill);
- fprintf (file, ";; urec ec\t");
- df_print_regset (file, bb_info->earlyclobber);
-}
-
-
-/* Debugging info at bottom of bb. */
-
-static void
-df_urec_bottom_dump (basic_block bb, FILE *file)
-{
- struct df_urec_bb_info *bb_info = df_urec_get_bb_info (bb->index);
- if (!bb_info || !bb_info->out)
- return;
- fprintf (file, ";; urec out \t");
- df_print_regset (file, bb_info->out);
-}
-
-
-/* All of the information associated with every instance of the problem. */
-
-static struct df_problem problem_UREC =
-{
- DF_UREC, /* Problem id. */
- DF_FORWARD, /* Direction. */
- df_urec_alloc, /* Allocate the problem specific data. */
- NULL, /* Reset global information. */
- df_urec_free_bb_info, /* Free basic block info. */
- df_urec_local_compute, /* Local compute function. */
- df_urec_init, /* Init the solution specific data. */
- df_worklist_dataflow, /* Worklist solver. */
- NULL, /* Confluence operator 0. */
- df_urec_confluence_n, /* Confluence operator n. */
- df_urec_transfer_function, /* Transfer function. */
- df_urec_local_finalize, /* Finalize function. */
- df_urec_free, /* Free all of the problem information. */
- df_urec_free, /* Remove this problem from the stack of dataflow problems. */
- NULL, /* Debugging. */
- df_urec_top_dump, /* Debugging start block. */
- df_urec_bottom_dump, /* Debugging end block. */
- NULL, /* Incremental solution verify start. */
- NULL, /* Incremental solution verify end. */
- &problem_LR, /* Dependent problem. */
- TV_DF_UREC, /* Timing variable. */
- false /* Reset blocks on dropping out of blocks_to_analyze. */
-};
-
-
-/* Create a new DATAFLOW instance and add it to an existing instance
- of DF. The returned structure is what is used to get at the
- solution. */
-
-void
-df_urec_add_problem (void)
-{
- df_add_problem (&problem_UREC);
-}
-
-
/*----------------------------------------------------------------------------
CREATE DEF_USE (DU) and / or USE_DEF (UD) CHAINS
@@ -3992,7 +2761,7 @@ df_note_bb_compute (unsigned int bb_index,
{
struct df_mw_hardreg *mws = *mws_rec;
if ((mws->type == DF_REF_REG_DEF)
- && !df_ignore_stack_reg (REGNO (mws->mw_reg)))
+ && !df_ignore_stack_reg (mws->start_regno))
old_unused_notes
= df_set_unused_notes_for_mw (insn, old_unused_notes,
mws, live, do_not_gen,
@@ -4055,7 +2824,7 @@ df_note_bb_compute (unsigned int bb_index,
{
struct df_mw_hardreg *mws = *mws_rec;
if ((mws->type != DF_REF_REG_DEF)
- && !df_ignore_stack_reg (REGNO (mws->mw_reg)))
+ && !df_ignore_stack_reg (mws->start_regno))
old_dead_notes
= df_set_dead_notes_for_mw (insn, old_dead_notes,
mws, live, do_not_gen,
@@ -4170,10 +2939,6 @@ static struct df_problem problem_NOTE =
NULL, /* Debugging end block. */
NULL, /* Incremental solution verify start. */
NULL, /* Incremental solution verify end. */
-
- /* Technically this is only dependent on the live registers problem
- but it will produce information if built one of uninitialized
- register problems (UR, UREC) is also run. */
&problem_LR, /* Dependent problem. */
TV_DF_NOTE, /* Timing variable. */
false /* Reset blocks on dropping out of blocks_to_analyze. */
@@ -4218,44 +2983,13 @@ df_simulate_find_defs (rtx insn, bitmap defs)
struct df_ref **def_rec;
unsigned int uid = INSN_UID (insn);
- if (CALL_P (insn))
+ for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
- for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
- {
- struct df_ref *def = *def_rec;
- unsigned int dregno = DF_REF_REGNO (def);
-
- if (DF_REF_FLAGS (def) & DF_REF_MUST_CLOBBER)
- {
- if (dregno >= FIRST_PSEUDO_REGISTER
- || !(SIBLING_CALL_P (insn)
- && bitmap_bit_p (df->exit_block_uses, dregno)
- && !refers_to_regno_p (dregno, dregno+1,
- current_function_return_rtx,
- (rtx *)0)))
- {
- /* If the def is to only part of the reg, it does
- not kill the other defs that reach here. */
- if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
- bitmap_set_bit (defs, dregno);
- }
- }
- else
- /* This is the return value. */
- if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
- bitmap_set_bit (defs, dregno);
- }
- }
- else
- {
- for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
- {
- struct df_ref *def = *def_rec;
- /* If the def is to only part of the reg, it does
- not kill the other defs that reach here. */
- if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
- bitmap_set_bit (defs, DF_REF_REGNO (def));
- }
+ struct df_ref *def = *def_rec;
+ /* If the def is to only part of the reg, it does
+ not kill the other defs that reach here. */
+ if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
+ bitmap_set_bit (defs, DF_REF_REGNO (def));
}
}
@@ -4268,46 +3002,15 @@ df_simulate_defs (rtx insn, bitmap live)
struct df_ref **def_rec;
unsigned int uid = INSN_UID (insn);
- if (CALL_P (insn))
+ for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
{
- for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
- {
- struct df_ref *def = *def_rec;
- unsigned int dregno = DF_REF_REGNO (def);
-
- if (DF_REF_FLAGS (def) & DF_REF_MUST_CLOBBER)
- {
- if (dregno >= FIRST_PSEUDO_REGISTER
- || !(SIBLING_CALL_P (insn)
- && bitmap_bit_p (df->exit_block_uses, dregno)
- && !refers_to_regno_p (dregno, dregno+1,
- current_function_return_rtx,
- (rtx *)0)))
- {
- /* If the def is to only part of the reg, it does
- not kill the other defs that reach here. */
- if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
- bitmap_clear_bit (live, dregno);
- }
- }
- else
- /* This is the return value. */
- if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
- bitmap_clear_bit (live, dregno);
- }
- }
- else
- {
- for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
- {
- struct df_ref *def = *def_rec;
- unsigned int dregno = DF_REF_REGNO (def);
-
- /* If the def is to only part of the reg, it does
- not kill the other defs that reach here. */
- if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
- bitmap_clear_bit (live, dregno);
- }
+ struct df_ref *def = *def_rec;
+ unsigned int dregno = DF_REF_REGNO (def);
+
+ /* If the def is to only part of the reg, it does
+ not kill the other defs that reach here. */
+ if (!(DF_REF_FLAGS (def) & (DF_REF_PARTIAL | DF_REF_CONDITIONAL)))
+ bitmap_clear_bit (live, dregno);
}
}
@@ -4336,7 +3039,7 @@ df_simulate_fixup_sets (basic_block bb, bitmap live)
{
/* These regs are considered always live so if they end up dying
because of some def, we need to bring the back again. */
- if (df_has_eh_preds (bb))
+ if (bb_has_eh_pred (bb))
bitmap_ior_into (live, df->eh_block_artificial_uses);
else
bitmap_ior_into (live, df->regular_block_artificial_uses);
diff --git a/gcc/df-scan.c b/gcc/df-scan.c
index 6e622f8460d..f3a90262f99 100644
--- a/gcc/df-scan.c
+++ b/gcc/df-scan.c
@@ -2004,6 +2004,10 @@ df_notes_rescan (rtx insn)
if (df->changeable_flags & DF_NO_INSN_RESCAN)
return;
+ /* Do nothing if the insn hasn't been emitted yet. */
+ if (!BLOCK_FOR_INSN (insn))
+ return;
+
df_grow_bb_info (df_scan);
df_grow_reg_info ();
@@ -2748,23 +2752,37 @@ df_def_record_1 (struct df_collection_rec *collection_rec,
|| GET_CODE (dst) == ZERO_EXTRACT)
{
flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL;
+ if (GET_CODE (dst) == ZERO_EXTRACT)
+ flags |= DF_REF_EXTRACT;
+ else
+ flags |= DF_REF_STRICT_LOWER_PART;
+
loc = &XEXP (dst, 0);
dst = *loc;
}
- if (df_read_modify_subreg_p (dst))
- flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL;
+ /* At this point if we do not have a reg or a subreg, just return. */
+ if (REG_P (dst))
+ {
+ df_ref_record (collection_rec,
+ dst, loc, bb, insn, DF_REF_REG_DEF, flags);
- if (REG_P (dst)
- || (GET_CODE (dst) == SUBREG && REG_P (SUBREG_REG (dst))))
- df_ref_record (collection_rec,
- dst, loc, bb, insn, DF_REF_REG_DEF, flags);
+ /* We want to keep sp alive everywhere - by making all
+ writes to sp also use of sp. */
+ if (REGNO (dst) == STACK_POINTER_REGNUM)
+ df_ref_record (collection_rec,
+ dst, NULL, bb, insn, DF_REF_REG_USE, flags);
+ }
+ else if (GET_CODE (dst) == SUBREG && REG_P (SUBREG_REG (dst)))
+ {
+ if (df_read_modify_subreg_p (dst))
+ flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL;
- /* We want to keep sp alive everywhere - by making all
- writes to sp also use of sp. */
- if (REG_P (dst) && REGNO (dst) == STACK_POINTER_REGNUM)
- df_ref_record (collection_rec,
- dst, NULL, bb, insn, DF_REF_REG_USE, flags);
+ flags |= DF_REF_SUBREG;
+
+ df_ref_record (collection_rec,
+ dst, loc, bb, insn, DF_REF_REG_DEF, flags);
+ }
}
@@ -2821,6 +2839,7 @@ df_uses_record (struct df_collection_rec *collection_rec,
case CONST_INT:
case CONST:
case CONST_DOUBLE:
+ case CONST_FIXED:
case CONST_VECTOR:
case PC:
case CC0:
@@ -2875,7 +2894,8 @@ df_uses_record (struct df_collection_rec *collection_rec,
if (df_read_modify_subreg_p (dst))
{
df_uses_record (collection_rec, &SUBREG_REG (dst),
- DF_REF_REG_USE, bb, insn, flags | DF_REF_READ_WRITE);
+ DF_REF_REG_USE, bb, insn,
+ flags | DF_REF_READ_WRITE | DF_REF_SUBREG);
break;
}
/* Fall through. */
@@ -2897,13 +2917,15 @@ df_uses_record (struct df_collection_rec *collection_rec,
dst = XEXP (dst, 0);
df_uses_record (collection_rec,
(GET_CODE (dst) == SUBREG) ? &SUBREG_REG (dst) : temp,
- DF_REF_REG_USE, bb, insn, DF_REF_READ_WRITE);
+ DF_REF_REG_USE, bb, insn,
+ DF_REF_READ_WRITE | DF_REF_STRICT_LOWER_PART);
}
break;
case ZERO_EXTRACT:
case SIGN_EXTRACT:
df_uses_record (collection_rec, &XEXP (dst, 0),
- DF_REF_REG_USE, bb, insn, DF_REF_READ_WRITE);
+ DF_REF_REG_USE, bb, insn,
+ DF_REF_READ_WRITE | DF_REF_EXTRACT);
df_uses_record (collection_rec, &XEXP (dst, 1),
DF_REF_REG_USE, bb, insn, flags);
df_uses_record (collection_rec, &XEXP (dst, 2),
@@ -3087,18 +3109,22 @@ df_get_call_refs (struct df_collection_rec * collection_rec,
so they are recorded as used. */
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
if (global_regs[i])
- df_ref_record (collection_rec, regno_reg_rtx[i],
- NULL, bb, insn, DF_REF_REG_USE, flags);
+ {
+ df_ref_record (collection_rec, regno_reg_rtx[i],
+ NULL, bb, insn, DF_REF_REG_USE, flags);
+ df_ref_record (collection_rec, regno_reg_rtx[i],
+ NULL, bb, insn, DF_REF_REG_DEF, flags);
+ }
is_sibling_call = SIBLING_CALL_P (insn);
EXECUTE_IF_SET_IN_BITMAP (df_invalidated_by_call, 0, ui, bi)
{
- if ((!bitmap_bit_p (defs_generated, ui))
+ if (!global_regs[ui]
+ && (!bitmap_bit_p (defs_generated, ui))
&& (!is_sibling_call
|| !bitmap_bit_p (df->exit_block_uses, ui)
|| refers_to_regno_p (ui, ui+1,
current_function_return_rtx, NULL)))
-
df_ref_record (collection_rec, regno_reg_rtx[ui],
NULL, bb, insn, DF_REF_REG_DEF, DF_REF_MAY_CLOBBER | flags);
}
@@ -3175,23 +3201,6 @@ df_insn_refs_collect (struct df_collection_rec* collection_rec,
df_canonize_collection_rec (collection_rec);
}
-/* Return true if any pred of BB is an eh. */
-
-bool
-df_has_eh_preds (basic_block bb)
-{
- edge e;
- edge_iterator ei;
-
- FOR_EACH_EDGE (e, ei, bb->preds)
- {
- if (e->flags & EDGE_EH)
- return true;
- }
- return false;
-}
-
-
/* Recompute the luids for the insns in BB. */
void
@@ -3256,7 +3265,7 @@ df_bb_refs_collect (struct df_collection_rec *collection_rec, basic_block bb)
}
#ifdef EH_RETURN_DATA_REGNO
- if (df_has_eh_preds (bb))
+ if (bb_has_eh_pred (bb))
{
unsigned int i;
/* Mark the registers that will contain data for the handler. */
@@ -3273,7 +3282,7 @@ df_bb_refs_collect (struct df_collection_rec *collection_rec, basic_block bb)
#ifdef EH_USES
- if (df_has_eh_preds (bb))
+ if (bb_has_eh_pred (bb))
{
unsigned int i;
/* This code is putting in an artificial ref for the use at the
@@ -3305,7 +3314,7 @@ df_bb_refs_collect (struct df_collection_rec *collection_rec, basic_block bb)
{
bitmap_iterator bi;
unsigned int regno;
- bitmap au = df_has_eh_preds (bb)
+ bitmap au = bb_has_eh_pred (bb)
? df->eh_block_artificial_uses
: df->regular_block_artificial_uses;
@@ -3372,7 +3381,7 @@ df_bb_refs_record (int bb_index, bool scan_insns)
df_refs_add_to_chains (&collection_rec, bb, NULL);
/* Now that the block has been processed, set the block as dirty so
- lr and ur will get it processed. */
+ LR and LIVE will get it processed. */
df_set_bb_dirty (bb);
}
@@ -3476,8 +3485,6 @@ df_mark_reg (rtx reg, void *vset)
}
-
-
/* Set the bit for regs that are considered being defined at the entry. */
static void
@@ -3523,12 +3530,12 @@ df_get_entry_block_def_set (bitmap entry_block_defs)
bitmap_set_bit (entry_block_defs, STATIC_CHAIN_REGNUM);
#endif
#endif
-
- r = targetm.calls.struct_value_rtx (current_function_decl, true);
- if (r && REG_P (r))
- bitmap_set_bit (entry_block_defs, REGNO (r));
}
+ r = targetm.calls.struct_value_rtx (current_function_decl, true);
+ if (r && REG_P (r))
+ bitmap_set_bit (entry_block_defs, REGNO (r));
+
if ((!reload_completed) || frame_pointer_needed)
{
/* Any reference to any pseudo before reload is a potential
@@ -3631,7 +3638,7 @@ df_record_entry_block_defs (bitmap entry_block_defs)
}
-/* Update the defs in the entry bolck. */
+/* Update the defs in the entry block. */
void
df_update_entry_block_defs (void)
@@ -3775,7 +3782,7 @@ df_exit_block_uses_collect (struct df_collection_rec *collection_rec, bitmap exi
I do not know why. */
if (reload_completed
&& !bitmap_bit_p (exit_block_uses, ARG_POINTER_REGNUM)
- && df_has_eh_preds (EXIT_BLOCK_PTR)
+ && bb_has_eh_pred (EXIT_BLOCK_PTR)
&& fixed_regs[ARG_POINTER_REGNUM])
df_ref_record (collection_rec, regno_reg_rtx[ARG_POINTER_REGNUM], NULL,
EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0);
@@ -4265,12 +4272,6 @@ df_scan_verify (void)
if (!df)
return;
- /* This is a hack, but a necessary one. If you do not do this,
- insn_attrtab can never be compiled in a bootstrap. This
- verification is just too expensive. */
- if (n_basic_blocks > 250)
- return;
-
/* Verification is a 4 step process. */
/* (1) All of the refs are marked by going thru the reg chains. */
diff --git a/gcc/testsuite/gcc.dg/vect/vect-106.c b/gcc/testsuite/gcc.dg/vect/vect-106.c
index 891cd3155ac..d578d814326 100644
--- a/gcc/testsuite/gcc.dg/vect/vect-106.c
+++ b/gcc/testsuite/gcc.dg/vect/vect-106.c
@@ -9,6 +9,7 @@
static int a[N] = {1,2,3,4,5,6,7,8,9};
static int b[N] = {2,3,4,5,6,7,8,9,0};
+__attribute__ ((noinline))
int main1 () {
int i;
int *p, *q, *p1, *q1;