aboutsummaryrefslogtreecommitdiff
path: root/gcc/postreload-gcse.c
diff options
context:
space:
mode:
Diffstat (limited to 'gcc/postreload-gcse.c')
-rw-r--r--gcc/postreload-gcse.c118
1 files changed, 109 insertions, 9 deletions
diff --git a/gcc/postreload-gcse.c b/gcc/postreload-gcse.c
index 324264ac652..83048bd4910 100644
--- a/gcc/postreload-gcse.c
+++ b/gcc/postreload-gcse.c
@@ -67,6 +67,8 @@ along with GCC; see the file COPYING3. If not see
#include "target.h"
#include "tree-pass.h"
#include "dbgcnt.h"
+#include "df.h"
+#include "gcse-common.h"
/* The following code implements gcse after reload, the purpose of this
pass is to cleanup redundant loads generated by reload and other
@@ -121,6 +123,9 @@ struct expr
/* The same hash for this entry. */
hashval_t hash;
+ /* Index in the transparent bitmaps. */
+ unsigned int bitmap_index;
+
/* List of available occurrence in basic blocks in the function. */
struct occr *avail_occr;
};
@@ -235,6 +240,24 @@ static struct modifies_mem *modifies_mem_obstack_bottom;
block, have no gaps, and only apply to real insns. */
static int *uid_cuid;
#define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
+
+/* Bitmap of blocks which have memory stores. */
+static bitmap modify_mem_list_set;
+
+/* Bitmap of blocks which have calls. */
+static bitmap blocks_with_calls;
+
+/* Vector indexed by block # with a list of all the insns that
+ modify memory within the block. */
+static vec<rtx_insn *> *modify_mem_list;
+
+/* Vector indexed by block # with a canonicalized list of insns
+ that modify memory in the block. */
+static vec<modify_pair> *canon_modify_mem_list;
+
+/* Vector of simple bitmaps indexed by block number. Each component sbitmap
+ indicates which expressions are transparent through the block. */
+static sbitmap *transp;
/* Helpers for memory allocation/freeing. */
@@ -266,7 +289,7 @@ static bool reg_used_on_edge (rtx, edge);
static rtx get_avail_load_store_reg (rtx_insn *);
static bool bb_has_well_behaved_predecessors (basic_block);
-static struct occr* get_bb_avail_insn (basic_block, struct occr *);
+static struct occr* get_bb_avail_insn (basic_block, struct occr *, int);
static void hash_scan_set (rtx_insn *);
static void compute_hash_table (void);
@@ -321,6 +344,15 @@ alloc_mem (void)
modifies_mem_obstack_bottom =
(struct modifies_mem *) obstack_alloc (&modifies_mem_obstack,
sizeof (struct modifies_mem));
+
+ blocks_with_calls = BITMAP_ALLOC (NULL);
+ modify_mem_list_set = BITMAP_ALLOC (NULL);
+
+ modify_mem_list = (vec_rtx_heap *) xcalloc (last_basic_block_for_fn (cfun),
+ sizeof (vec_rtx_heap));
+ canon_modify_mem_list
+ = (vec_modify_pair_heap *) xcalloc (last_basic_block_for_fn (cfun),
+ sizeof (vec_modify_pair_heap));
}
/* Free memory allocated by alloc_mem. */
@@ -338,6 +370,16 @@ free_mem (void)
obstack_free (&unoccr_obstack, NULL);
obstack_free (&modifies_mem_obstack, NULL);
+ unsigned i;
+ bitmap_iterator bi;
+ EXECUTE_IF_SET_IN_BITMAP (modify_mem_list_set, 0, i, bi)
+ {
+ modify_mem_list[i].release ();
+ canon_modify_mem_list[i].release ();
+ }
+
+ BITMAP_FREE (blocks_with_calls);
+ BITMAP_FREE (modify_mem_list_set);
free (reg_avail_info);
}
@@ -376,8 +418,15 @@ insert_expr_in_table (rtx x, rtx_insn *insn)
slot = expr_table->find_slot_with_hash (cur_expr, hash, INSERT);
if (! (*slot))
- /* The expression isn't found, so insert it. */
- *slot = cur_expr;
+ {
+ /* The expression isn't found, so insert it. */
+ *slot = cur_expr;
+
+ /* Anytime we add an entry to the table, record the index
+ of the new entry. The bitmap index starts counting
+ at zero. */
+ cur_expr->bitmap_index = expr_table->elements () - 1;
+ }
else
{
/* The expression is already in the table, so roll back the
@@ -698,6 +747,11 @@ record_last_mem_set_info (rtx_insn *insn)
list_entry->insn = insn;
list_entry->next = modifies_mem_list;
modifies_mem_list = list_entry;
+
+ record_last_mem_set_info_common (insn, modify_mem_list,
+ canon_modify_mem_list,
+ modify_mem_list_set,
+ blocks_with_calls);
}
/* Called from compute_hash_table via note_stores to handle one
@@ -955,15 +1009,45 @@ bb_has_well_behaved_predecessors (basic_block bb)
/* Search for the occurrences of expression in BB. */
static struct occr*
-get_bb_avail_insn (basic_block bb, struct occr *occr)
+get_bb_avail_insn (basic_block bb, struct occr *orig_occr, int bitmap_index)
{
+ struct occr *occr = orig_occr;
+
for (; occr != NULL; occr = occr->next)
if (BLOCK_FOR_INSN (occr->insn) == bb)
return occr;
+
+ /* If we could not find an occurrence in BB, see if BB
+ has a single predecessor with an occurrence that is
+ transparent through BB. */
+ if (single_pred_p (bb)
+ && bitmap_bit_p (transp[bb->index], bitmap_index)
+ && (occr = get_bb_avail_insn (single_pred (bb), orig_occr, bitmap_index)))
+ {
+ rtx avail_reg = get_avail_load_store_reg (occr->insn);
+ if (!reg_set_between_p (avail_reg,
+ PREV_INSN (BB_HEAD (bb)),
+ NEXT_INSN (BB_END (bb)))
+ && !reg_killed_on_edge (avail_reg, single_pred_edge (bb)))
+ return occr;
+ }
+
return NULL;
}
+/* This helper is called via htab_traverse. */
+int
+compute_expr_transp (expr **slot, FILE *dump_file ATTRIBUTE_UNUSED)
+{
+ struct expr *expr = *slot;
+
+ compute_transp (expr->expr, expr->bitmap_index, transp,
+ blocks_with_calls, modify_mem_list_set,
+ canon_modify_mem_list);
+ return 1;
+}
+
/* This handles the case where several stores feed a partially redundant
load. It checks if the redundancy elimination is possible and if it's
worth it.
@@ -1016,9 +1100,13 @@ eliminate_partially_redundant_load (basic_block bb, rtx_insn *insn,
avail_insn = NULL;
avail_reg = NULL_RTX;
pred_bb = pred->src;
- next_pred_bb_end = NEXT_INSN (BB_END (pred_bb));
- for (a_occr = get_bb_avail_insn (pred_bb, expr->avail_occr); a_occr;
- a_occr = get_bb_avail_insn (pred_bb, a_occr->next))
+ for (a_occr = get_bb_avail_insn (pred_bb,
+ expr->avail_occr,
+ expr->bitmap_index);
+ a_occr;
+ a_occr = get_bb_avail_insn (pred_bb,
+ a_occr->next,
+ expr->bitmap_index))
{
/* Check if the loaded register is not used. */
avail_insn = a_occr->insn;
@@ -1038,6 +1126,7 @@ eliminate_partially_redundant_load (basic_block bb, rtx_insn *insn,
avail_insn = NULL;
continue;
}
+ next_pred_bb_end = NEXT_INSN (BB_END (BLOCK_FOR_INSN (avail_insn)));
if (!reg_set_between_p (avail_reg, avail_insn, next_pred_bb_end))
/* AVAIL_INSN remains non-null. */
break;
@@ -1150,9 +1239,9 @@ eliminate_partially_redundant_load (basic_block bb, rtx_insn *insn,
/* Delete the insn if it is not available in this block and mark it
for deletion if it is available. If insn is available it may help
discover additional redundancies, so mark it for later deletion. */
- for (a_occr = get_bb_avail_insn (bb, expr->avail_occr);
+ for (a_occr = get_bb_avail_insn (bb, expr->avail_occr, expr->bitmap_index);
a_occr && (a_occr->insn != insn);
- a_occr = get_bb_avail_insn (bb, a_occr->next))
+ a_occr = get_bb_avail_insn (bb, a_occr->next, expr->bitmap_index))
;
if (!a_occr)
@@ -1308,8 +1397,19 @@ gcse_after_reload_main (rtx f ATTRIBUTE_UNUSED)
if (expr_table->elements () > 0)
{
+ /* Knowing which MEMs are transparent through a block can signifiantly
+ increase the number of redundant loads found. So compute transparency
+ information for each memory expression in the hash table. */
+ df_analyze ();
+ /* This can not be part of the normal allocation routine because
+ we have to know the number of elements in the hash table. */
+ transp = sbitmap_vector_alloc (last_basic_block_for_fn (cfun),
+ expr_table->elements ());
+ bitmap_vector_ones (transp, last_basic_block_for_fn (cfun));
+ expr_table->traverse <FILE *, compute_expr_transp> (dump_file);
eliminate_partially_redundant_loads ();
delete_redundant_insns ();
+ sbitmap_vector_free (transp);
if (dump_file)
{