aboutsummaryrefslogtreecommitdiff
path: root/gcc/gcse.c
diff options
context:
space:
mode:
Diffstat (limited to 'gcc/gcse.c')
-rw-r--r--gcc/gcse.c121
1 files changed, 59 insertions, 62 deletions
diff --git a/gcc/gcse.c b/gcc/gcse.c
index 5535aa121a7..456a857259f 100644
--- a/gcc/gcse.c
+++ b/gcc/gcse.c
@@ -264,19 +264,8 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
the result of the expression is copied to a new register, and the redundant
expression is deleted by replacing it with this new register. Classic GCSE
doesn't have this problem as much as it computes the reaching defs of
- each register in each block and thus can try to use an existing register.
-
- **********************
-
- A fair bit of simplicity is created by creating small functions for simple
- tasks, even when the function is only called in one place. This may
- measurably slow things down [or may not] by creating more function call
- overhead than is necessary. The source is laid out so that it's trivial
- to make the affected functions inline so that one can measure what speed
- up, if any, can be achieved, and maybe later when things settle things can
- be rearranged.
-
- Help stamp out big monolithic functions! */
+ each register in each block and thus can try to use an existing
+ register. */
/* GCSE global vars. */
@@ -500,7 +489,10 @@ static bitmap modify_mem_list_set;
/* This array parallels modify_mem_list, but is kept canonicalized. */
static rtx * canon_modify_mem_list;
-static bitmap canon_modify_mem_list_set;
+
+/* Bitmap indexed by block numbers to record which blocks contain
+ function calls. */
+static bitmap blocks_with_calls;
/* Various variables for statistics gathering. */
@@ -959,7 +951,7 @@ alloc_gcse_mem (rtx f)
CUID_INSN (i++) = insn;
/* Allocate vars to track sets of regs. */
- reg_set_bitmap = BITMAP_XMALLOC ();
+ reg_set_bitmap = BITMAP_ALLOC (NULL);
/* Allocate vars to track sets of regs, memory per block. */
reg_set_in_block = sbitmap_vector_alloc (last_basic_block, max_gcse_regno);
@@ -967,8 +959,8 @@ alloc_gcse_mem (rtx f)
basic block. */
modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
canon_modify_mem_list = gcalloc (last_basic_block, sizeof (rtx));
- modify_mem_list_set = BITMAP_XMALLOC ();
- canon_modify_mem_list_set = BITMAP_XMALLOC ();
+ modify_mem_list_set = BITMAP_ALLOC (NULL);
+ blocks_with_calls = BITMAP_ALLOC (NULL);
}
/* Free memory allocated by alloc_gcse_mem. */
@@ -979,12 +971,12 @@ free_gcse_mem (void)
free (uid_cuid);
free (cuid_insn);
- BITMAP_XFREE (reg_set_bitmap);
+ BITMAP_FREE (reg_set_bitmap);
sbitmap_vector_free (reg_set_in_block);
free_modify_mem_tables ();
- BITMAP_XFREE (modify_mem_list_set);
- BITMAP_XFREE (canon_modify_mem_list_set);
+ BITMAP_FREE (modify_mem_list_set);
+ BITMAP_FREE (blocks_with_calls);
}
/* Compute the local properties of each recorded expression.
@@ -1951,7 +1943,6 @@ canon_list_insert (rtx dest ATTRIBUTE_UNUSED, rtx unused1 ATTRIBUTE_UNUSED,
alloc_EXPR_LIST (VOIDmode, dest_addr, canon_modify_mem_list[bb]);
canon_modify_mem_list[bb] =
alloc_EXPR_LIST (VOIDmode, dest, canon_modify_mem_list[bb]);
- bitmap_set_bit (canon_modify_mem_list_set, bb);
}
/* Record memory modification information for INSN. We do not actually care
@@ -1975,7 +1966,7 @@ record_last_mem_set_info (rtx insn)
need to insert a pair of items, as canon_list_insert does. */
canon_modify_mem_list[bb] =
alloc_INSN_LIST (insn, canon_modify_mem_list[bb]);
- bitmap_set_bit (canon_modify_mem_list_set, bb);
+ bitmap_set_bit (blocks_with_calls, bb);
}
else
note_stores (PATTERN (insn), canon_list_insert, (void*) insn);
@@ -2199,17 +2190,13 @@ clear_modify_mem_tables (void)
EXECUTE_IF_SET_IN_BITMAP (modify_mem_list_set, 0, i, bi)
{
free_INSN_LIST_list (modify_mem_list + i);
- }
- bitmap_clear (modify_mem_list_set);
-
- EXECUTE_IF_SET_IN_BITMAP (canon_modify_mem_list_set, 0, i, bi)
- {
free_insn_expr_list_list (canon_modify_mem_list + i);
}
- bitmap_clear (canon_modify_mem_list_set);
+ bitmap_clear (modify_mem_list_set);
+ bitmap_clear (blocks_with_calls);
}
-/* Release memory used by modify_mem_list_set and canon_modify_mem_list_set. */
+/* Release memory used by modify_mem_list_set. */
static void
free_modify_mem_tables (void)
@@ -2470,41 +2457,51 @@ compute_transp (rtx x, int indx, sbitmap *bmap, int set_p)
return;
case MEM:
- FOR_EACH_BB (bb)
- {
- rtx list_entry = canon_modify_mem_list[bb->index];
+ {
+ bitmap_iterator bi;
+ unsigned bb_index;
- while (list_entry)
- {
- rtx dest, dest_addr;
+ /* First handle all the blocks with calls. We don't need to
+ do any list walking for them. */
+ EXECUTE_IF_SET_IN_BITMAP (blocks_with_calls, 0, bb_index, bi)
+ {
+ if (set_p)
+ SET_BIT (bmap[bb_index], indx);
+ else
+ RESET_BIT (bmap[bb_index], indx);
+ }
- if (CALL_P (XEXP (list_entry, 0)))
- {
- if (set_p)
- SET_BIT (bmap[bb->index], indx);
- else
- RESET_BIT (bmap[bb->index], indx);
- break;
- }
- /* LIST_ENTRY must be an INSN of some kind that sets memory.
- Examine each hunk of memory that is modified. */
+ /* Now iterate over the blocks which have memory modifications
+ but which do not have any calls. */
+ EXECUTE_IF_AND_COMPL_IN_BITMAP (modify_mem_list_set, blocks_with_calls,
+ 0, bb_index, bi)
+ {
+ rtx list_entry = canon_modify_mem_list[bb_index];
- dest = XEXP (list_entry, 0);
- list_entry = XEXP (list_entry, 1);
- dest_addr = XEXP (list_entry, 0);
+ while (list_entry)
+ {
+ rtx dest, dest_addr;
- if (canon_true_dependence (dest, GET_MODE (dest), dest_addr,
- x, rtx_addr_varies_p))
- {
- if (set_p)
- SET_BIT (bmap[bb->index], indx);
- else
- RESET_BIT (bmap[bb->index], indx);
- break;
- }
- list_entry = XEXP (list_entry, 1);
- }
- }
+ /* LIST_ENTRY must be an INSN of some kind that sets memory.
+ Examine each hunk of memory that is modified. */
+
+ dest = XEXP (list_entry, 0);
+ list_entry = XEXP (list_entry, 1);
+ dest_addr = XEXP (list_entry, 0);
+
+ if (canon_true_dependence (dest, GET_MODE (dest), dest_addr,
+ x, rtx_addr_varies_p))
+ {
+ if (set_p)
+ SET_BIT (bmap[bb_index], indx);
+ else
+ RESET_BIT (bmap[bb_index], indx);
+ break;
+ }
+ list_entry = XEXP (list_entry, 1);
+ }
+ }
+ }
x = XEXP (x, 0);
goto repeat;
@@ -4133,7 +4130,7 @@ pre_edge_insert (struct edge_list *edge_list, struct expr **index_map)
if (! occr->deleted_p)
continue;
- /* Insert this expression on this edge if if it would
+ /* Insert this expression on this edge if it would
reach the deleted occurrence in BB. */
if (!TEST_BIT (inserted[e], j))
{
@@ -5849,7 +5846,7 @@ find_loads (rtx x, rtx store_pattern, int after)
/* Check if INSN kills the store pattern X (is aliased with it).
AFTER is true if we are checking the case when store X occurs
- after the insn. Return true if it it does. */
+ after the insn. Return true if it does. */
static bool
store_killed_in_insn (rtx x, rtx x_regs, rtx insn, int after)