aboutsummaryrefslogtreecommitdiff
path: root/gcc/tree-ssa-alias.c
diff options
context:
space:
mode:
Diffstat (limited to 'gcc/tree-ssa-alias.c')
-rw-r--r--gcc/tree-ssa-alias.c338
1 files changed, 201 insertions, 137 deletions
diff --git a/gcc/tree-ssa-alias.c b/gcc/tree-ssa-alias.c
index 2e74b58e103..91a01d2d48a 100644
--- a/gcc/tree-ssa-alias.c
+++ b/gcc/tree-ssa-alias.c
@@ -43,6 +43,19 @@ Boston, MA 02110-1301, USA. */
#include "convert.h"
#include "params.h"
#include "vec.h"
+#include "bitmap.h"
+
+/* Keep track of how many times each pointer has been dereferenced in
+ the program using the aux variable. This is used by the alias
+ grouping heuristic in compute_flow_insensitive_aliasing. */
+#define NUM_REFERENCES(ANN) ((size_t)((ANN)->common.aux))
+#define NUM_REFERENCES_CLEAR(ANN) ((ANN)->common.aux) = 0
+#define NUM_REFERENCES_INC(ANN) (ANN)->common.aux = (void*) (((size_t)((ANN)->common.aux)) + 1)
+#define NUM_REFERENCES_SET(ANN, VAL) (ANN)->common.aux = (void*) ((void *)(VAL))
+
+/* Obstack used to hold grouping bitmaps and other temporary bitmaps used by
+ aliasing */
+static bitmap_obstack alias_obstack;
/* 'true' after aliases have been computed (see compute_may_aliases). */
bool aliases_computed_p;
@@ -66,7 +79,7 @@ struct alias_map_d
/* Set of variables aliased with VAR. This is the exact same
information contained in VAR_ANN (VAR)->MAY_ALIASES, but in
bitmap form to speed up alias grouping. */
- sbitmap may_aliases;
+ bitmap may_aliases;
};
@@ -100,11 +113,6 @@ struct alias_info
/* Number of const/pure function calls found in the program. */
size_t num_pure_const_calls_found;
- /* Array of counters to keep track of how many times each pointer has
- been dereferenced in the program. This is used by the alias grouping
- heuristic in compute_flow_insensitive_aliasing. */
- varray_type num_references;
-
/* Total number of virtual operands that will be needed to represent
all the aliases of all the pointers found in the program. */
long total_alias_vops;
@@ -490,16 +498,18 @@ static struct alias_info *
init_alias_info (void)
{
struct alias_info *ai;
+ referenced_var_iterator rvi;
+ tree var;
+ bitmap_obstack_initialize (&alias_obstack);
ai = xcalloc (1, sizeof (struct alias_info));
ai->ssa_names_visited = sbitmap_alloc (num_ssa_names);
sbitmap_zero (ai->ssa_names_visited);
VARRAY_TREE_INIT (ai->processed_ptrs, 50, "processed_ptrs");
- ai->addresses_needed = BITMAP_ALLOC (NULL);
- VARRAY_UINT_INIT (ai->num_references, num_referenced_vars, "num_references");
- ai->written_vars = BITMAP_ALLOC (NULL);
- ai->dereferenced_ptrs_store = BITMAP_ALLOC (NULL);
- ai->dereferenced_ptrs_load = BITMAP_ALLOC (NULL);
+ ai->addresses_needed = BITMAP_ALLOC (&alias_obstack);
+ ai->written_vars = BITMAP_ALLOC (&alias_obstack);
+ ai->dereferenced_ptrs_store = BITMAP_ALLOC (&alias_obstack);
+ ai->dereferenced_ptrs_load = BITMAP_ALLOC (&alias_obstack);
/* If aliases have been computed before, clear existing information. */
if (aliases_computed_p)
@@ -512,13 +522,13 @@ init_alias_info (void)
bitmap_clear (addressable_vars);
/* Clear flow-insensitive alias information from each symbol. */
- for (i = 0; i < num_referenced_vars; i++)
+ FOR_EACH_REFERENCED_VAR (var, rvi)
{
- tree var = referenced_var (i);
var_ann_t ann = var_ann (var);
-
+
ann->is_alias_tag = 0;
ann->may_aliases = NULL;
+ NUM_REFERENCES_CLEAR (ann);
/* Since we are about to re-discover call-clobbered
variables, clear the call-clobbered flag. Variables that
@@ -577,29 +587,32 @@ static void
delete_alias_info (struct alias_info *ai)
{
size_t i;
+ referenced_var_iterator rvi;
+ tree var;
sbitmap_free (ai->ssa_names_visited);
ai->processed_ptrs = NULL;
BITMAP_FREE (ai->addresses_needed);
for (i = 0; i < ai->num_addressable_vars; i++)
+ free (ai->addressable_vars[i]);
+
+ FOR_EACH_REFERENCED_VAR(var, rvi)
{
- sbitmap_free (ai->addressable_vars[i]->may_aliases);
- free (ai->addressable_vars[i]);
+ var_ann_t ann = var_ann (var);
+ NUM_REFERENCES_CLEAR (ann);
}
+
free (ai->addressable_vars);
for (i = 0; i < ai->num_pointers; i++)
- {
- sbitmap_free (ai->pointers[i]->may_aliases);
- free (ai->pointers[i]);
- }
+ free (ai->pointers[i]);
free (ai->pointers);
- ai->num_references = NULL;
BITMAP_FREE (ai->written_vars);
BITMAP_FREE (ai->dereferenced_ptrs_store);
BITMAP_FREE (ai->dereferenced_ptrs_load);
+ bitmap_obstack_release (&alias_obstack);
free (ai);
}
@@ -621,7 +634,6 @@ collect_points_to_info_for (struct alias_info *ai, tree ptr)
}
}
-
/* Traverse use-def links for all the pointers in the program to collect
address escape and points-to information.
@@ -657,17 +669,18 @@ compute_points_to_and_addr_escape (struct alias_info *ai)
chains). */
addr_taken = addresses_taken (stmt);
if (addr_taken)
- EXECUTE_IF_SET_IN_BITMAP (addr_taken, 0, i, bi)
+ EXECUTE_IF_SET_IN_BITMAP (addr_taken, 0, i, bi)
{
tree var = referenced_var (i);
- bitmap_set_bit (ai->addresses_needed, var_ann (var)->uid);
+ bitmap_set_bit (ai->addresses_needed, DECL_UID (var));
if (stmt_escapes_p)
mark_call_clobbered (var);
}
FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
{
- var_ann_t v_ann = var_ann (SSA_NAME_VAR (op));
+ tree var = SSA_NAME_VAR (op);
+ var_ann_t v_ann = var_ann (var);
struct ptr_info_def *pi;
bool is_store;
unsigned num_uses, num_derefs;
@@ -679,8 +692,8 @@ compute_points_to_and_addr_escape (struct alias_info *ai)
because we are processing regular variables, not
memory tags (the array's initial size is set to
NUM_REFERENCED_VARS). */
- if (may_be_aliased (SSA_NAME_VAR (op)))
- (VARRAY_UINT (ai->num_references, v_ann->uid))++;
+ if (may_be_aliased (var))
+ NUM_REFERENCES_INC (v_ann);
if (!POINTER_TYPE_P (TREE_TYPE (op)))
continue;
@@ -700,18 +713,18 @@ compute_points_to_and_addr_escape (struct alias_info *ai)
pi->is_dereferenced = 1;
/* Keep track of how many time we've dereferenced each
- pointer. Again, we don't need to grow
- AI->NUM_REFERENCES because we're processing
- existing program variables. */
- (VARRAY_UINT (ai->num_references, v_ann->uid))++;
+ pointer. */
+ NUM_REFERENCES_INC (v_ann);
/* If this is a store operation, mark OP as being
dereferenced to store, otherwise mark it as being
dereferenced to load. */
if (is_store)
- bitmap_set_bit (ai->dereferenced_ptrs_store, v_ann->uid);
+ bitmap_set_bit (ai->dereferenced_ptrs_store,
+ DECL_UID (var));
else
- bitmap_set_bit (ai->dereferenced_ptrs_load, v_ann->uid);
+ bitmap_set_bit (ai->dereferenced_ptrs_load,
+ DECL_UID (var));
}
if (stmt_escapes_p && num_derefs < num_uses)
@@ -727,7 +740,8 @@ compute_points_to_and_addr_escape (struct alias_info *ai)
operation inside the called function. */
if (get_call_expr_in (stmt))
{
- bitmap_set_bit (ai->dereferenced_ptrs_store, v_ann->uid);
+ bitmap_set_bit (ai->dereferenced_ptrs_store,
+ DECL_UID (var));
pi->is_dereferenced = 1;
}
}
@@ -740,9 +754,9 @@ compute_points_to_and_addr_escape (struct alias_info *ai)
{
tree var = SSA_NAME_VAR (op);
var_ann_t ann = var_ann (var);
- bitmap_set_bit (ai->written_vars, ann->uid);
+ bitmap_set_bit (ai->written_vars, DECL_UID (var));
if (may_be_aliased (var))
- (VARRAY_UINT (ai->num_references, ann->uid))++;
+ NUM_REFERENCES_INC (ann);
if (POINTER_TYPE_P (TREE_TYPE (op)))
collect_points_to_info_for (ai, op);
@@ -752,8 +766,7 @@ compute_points_to_and_addr_escape (struct alias_info *ai)
FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_VIRTUAL_DEFS)
{
tree var = DECL_P (op) ? op : SSA_NAME_VAR (op);
- var_ann_t ann = var_ann (var);
- bitmap_set_bit (ai->written_vars, ann->uid);
+ bitmap_set_bit (ai->written_vars, DECL_UID (var));
}
/* After promoting variables and computing aliasing we will
@@ -861,7 +874,6 @@ create_name_tags (struct alias_info *ai)
}
-
/* For every pointer P_i in AI->PROCESSED_PTRS, create may-alias sets for
the name memory tag (NMT) associated with P_i. If P_i escapes, then its
name tag and the variables it points-to are call-clobbered. Finally, if
@@ -961,8 +973,7 @@ compute_flow_insensitive_aliasing (struct alias_info *ai)
var_ann_t tag_ann = var_ann (tag);
p_map->total_alias_vops = 0;
- p_map->may_aliases = sbitmap_alloc (num_referenced_vars);
- sbitmap_zero (p_map->may_aliases);
+ p_map->may_aliases = BITMAP_ALLOC (&alias_obstack);
for (j = 0; j < ai->num_addressable_vars; j++)
{
@@ -985,9 +996,9 @@ compute_flow_insensitive_aliasing (struct alias_info *ai)
So we first check the call_clobbered status of the
tag and variable before querying the bitmap. */
tag_stored_p = is_call_clobbered (tag)
- || bitmap_bit_p (ai->written_vars, tag_ann->uid);
+ || bitmap_bit_p (ai->written_vars, DECL_UID (tag));
var_stored_p = is_call_clobbered (var)
- || bitmap_bit_p (ai->written_vars, v_ann->uid);
+ || bitmap_bit_p (ai->written_vars, DECL_UID (var));
if (!tag_stored_p && !var_stored_p)
continue;
@@ -996,8 +1007,8 @@ compute_flow_insensitive_aliasing (struct alias_info *ai)
subvar_t svars;
size_t num_tag_refs, num_var_refs;
- num_tag_refs = VARRAY_UINT (ai->num_references, tag_ann->uid);
- num_var_refs = VARRAY_UINT (ai->num_references, v_ann->uid);
+ num_tag_refs = NUM_REFERENCES (tag_ann);
+ num_var_refs = NUM_REFERENCES (v_ann);
/* Add VAR to TAG's may-aliases set. */
@@ -1013,7 +1024,7 @@ compute_flow_insensitive_aliasing (struct alias_info *ai)
add_may_alias (tag, sv->var);
/* Update the bitmap used to represent TAG's alias set
in case we need to group aliases. */
- SET_BIT (p_map->may_aliases, var_ann (sv->var)->uid);
+ bitmap_set_bit (p_map->may_aliases, DECL_UID (sv->var));
}
}
else
@@ -1021,7 +1032,7 @@ compute_flow_insensitive_aliasing (struct alias_info *ai)
add_may_alias (tag, var);
/* Update the bitmap used to represent TAG's alias set
in case we need to group aliases. */
- SET_BIT (p_map->may_aliases, var_ann (var)->uid);
+ bitmap_set_bit (p_map->may_aliases, DECL_UID (var));
}
/* Update the total number of virtual operands due to
@@ -1063,13 +1074,13 @@ compute_flow_insensitive_aliasing (struct alias_info *ai)
size_t j;
struct alias_map_d *p_map1 = ai->pointers[i];
tree tag1 = var_ann (p_map1->var)->type_mem_tag;
- sbitmap may_aliases1 = p_map1->may_aliases;
+ bitmap may_aliases1 = p_map1->may_aliases;
for (j = i + 1; j < ai->num_pointers; j++)
{
struct alias_map_d *p_map2 = ai->pointers[j];
tree tag2 = var_ann (p_map2->var)->type_mem_tag;
- sbitmap may_aliases2 = p_map2->may_aliases;
+ bitmap may_aliases2 = p_map2->may_aliases;
/* If the pointers may not point to each other, do nothing. */
if (!may_alias_p (p_map1->var, p_map1->set, tag2, p_map2->set))
@@ -1077,30 +1088,30 @@ compute_flow_insensitive_aliasing (struct alias_info *ai)
/* The two pointers may alias each other. If they already have
symbols in common, do nothing. */
- if (sbitmap_any_common_bits (may_aliases1, may_aliases2))
+ if (bitmap_intersect_p (may_aliases1, may_aliases2))
continue;
- if (sbitmap_first_set_bit (may_aliases2) >= 0)
+ if (!bitmap_empty_p (may_aliases2))
{
unsigned int k;
- sbitmap_iterator sbi;
+ bitmap_iterator bi;
/* Add all the aliases for TAG2 into TAG1's alias set.
FIXME, update grouping heuristic counters. */
- EXECUTE_IF_SET_IN_SBITMAP (may_aliases2, 0, k, sbi)
+ EXECUTE_IF_SET_IN_BITMAP (may_aliases2, 0, k, bi)
add_may_alias (tag1, referenced_var (k));
- sbitmap_a_or_b (may_aliases1, may_aliases1, may_aliases2);
+ bitmap_ior_into (may_aliases1, may_aliases2);
}
else
{
/* Since TAG2 does not have any aliases of its own, add
TAG2 itself to the alias set of TAG1. */
add_may_alias (tag1, tag2);
- SET_BIT (may_aliases1, var_ann (tag2)->uid);
+ bitmap_set_bit (may_aliases1, DECL_UID (tag2));
}
}
}
-
+
if (dump_file)
fprintf (dump_file, "%s: Total number of aliased vops: %ld\n",
get_name (current_function_decl),
@@ -1141,14 +1152,14 @@ total_alias_vops_cmp (const void *p, const void *q)
may-aliases(V2) = { TAG } */
static void
-group_aliases_into (tree tag, sbitmap tag_aliases, struct alias_info *ai)
+group_aliases_into (tree tag, bitmap tag_aliases, struct alias_info *ai)
{
unsigned int i;
var_ann_t tag_ann = var_ann (tag);
- size_t num_tag_refs = VARRAY_UINT (ai->num_references, tag_ann->uid);
- sbitmap_iterator sbi;
+ size_t num_tag_refs = NUM_REFERENCES (tag_ann);
+ bitmap_iterator bi;
- EXECUTE_IF_SET_IN_SBITMAP (tag_aliases, 0, i, sbi)
+ EXECUTE_IF_SET_IN_BITMAP (tag_aliases, 0, i, bi)
{
tree var = referenced_var (i);
var_ann_t ann = var_ann (var);
@@ -1256,7 +1267,7 @@ group_aliases (struct alias_info *ai)
{
size_t j;
tree tag1 = var_ann (ai->pointers[i]->var)->type_mem_tag;
- sbitmap tag1_aliases = ai->pointers[i]->may_aliases;
+ bitmap tag1_aliases = ai->pointers[i]->may_aliases;
/* Skip tags that have been grouped already. */
if (ai->pointers[i]->grouped_p)
@@ -1267,16 +1278,16 @@ group_aliases (struct alias_info *ai)
aliases into TAG1. */
for (j = i + 1; j < ai->num_pointers; j++)
{
- sbitmap tag2_aliases = ai->pointers[j]->may_aliases;
+ bitmap tag2_aliases = ai->pointers[j]->may_aliases;
- if (sbitmap_any_common_bits (tag1_aliases, tag2_aliases))
+ if (bitmap_intersect_p (tag1_aliases, tag2_aliases))
{
tree tag2 = var_ann (ai->pointers[j]->var)->type_mem_tag;
- sbitmap_a_or_b (tag1_aliases, tag1_aliases, tag2_aliases);
+ bitmap_ior_into (tag1_aliases, tag2_aliases);
/* TAG2 does not need its aliases anymore. */
- sbitmap_zero (tag2_aliases);
+ bitmap_clear (tag2_aliases);
var_ann (tag2)->may_aliases = NULL;
/* TAG1 is the unique alias of TAG2. */
@@ -1372,14 +1383,15 @@ create_alias_map_for (tree var, struct alias_info *ai)
static void
setup_pointers_and_addressables (struct alias_info *ai)
{
- size_t i, n_vars, num_addressable_vars, num_pointers;
+ size_t n_vars, num_addressable_vars, num_pointers;
+ referenced_var_iterator rvi;
+ tree var;
/* Size up the arrays ADDRESSABLE_VARS and POINTERS. */
num_addressable_vars = num_pointers = 0;
- for (i = 0; i < num_referenced_vars; i++)
+
+ FOR_EACH_REFERENCED_VAR (var, rvi)
{
- tree var = referenced_var (i);
-
if (may_be_aliased (var))
num_addressable_vars++;
@@ -1388,7 +1400,7 @@ setup_pointers_and_addressables (struct alias_info *ai)
/* Since we don't keep track of volatile variables, assume that
these pointers are used in indirect store operations. */
if (TREE_THIS_VOLATILE (var))
- bitmap_set_bit (ai->dereferenced_ptrs_store, var_ann (var)->uid);
+ bitmap_set_bit (ai->dereferenced_ptrs_store, DECL_UID (var));
num_pointers++;
}
@@ -1410,9 +1422,8 @@ setup_pointers_and_addressables (struct alias_info *ai)
unnecessarily. */
n_vars = num_referenced_vars;
- for (i = 0; i < n_vars; i++)
+ FOR_EACH_REFERENCED_VAR (var, rvi)
{
- tree var = referenced_var (i);
var_ann_t v_ann = var_ann (var);
subvar_t svars;
@@ -1436,7 +1447,7 @@ setup_pointers_and_addressables (struct alias_info *ai)
cleanup passes. */
if (TREE_ADDRESSABLE (var) && v_ann->mem_tag_kind != STRUCT_FIELD)
{
- if (!bitmap_bit_p (ai->addresses_needed, v_ann->uid)
+ if (!bitmap_bit_p (ai->addresses_needed, DECL_UID (var))
&& TREE_CODE (var) != RESULT_DECL
&& !is_global_var (var))
{
@@ -1453,8 +1464,8 @@ setup_pointers_and_addressables (struct alias_info *ai)
for (sv = svars; sv; sv = sv->next)
{
- var_ann_t svann = var_ann (sv->var);
- if (bitmap_bit_p (ai->addresses_needed, svann->uid))
+ if (bitmap_bit_p (ai->addresses_needed,
+ DECL_UID (sv->var)))
okay_to_mark = false;
mark_sym_for_renaming (sv->var);
}
@@ -1472,13 +1483,13 @@ setup_pointers_and_addressables (struct alias_info *ai)
used when scanning operands for ASM_EXPRs that
clobber memory. In those cases, we need to clobber
all call-clobbered variables and all addressables. */
- bitmap_set_bit (addressable_vars, v_ann->uid);
+ bitmap_set_bit (addressable_vars, DECL_UID (var));
if (var_can_have_subvars (var)
&& (svars = get_subvars_for_var (var)))
{
subvar_t sv;
for (sv = svars; sv; sv = sv->next)
- bitmap_set_bit (addressable_vars, var_ann (sv->var)->uid);
+ bitmap_set_bit (addressable_vars, DECL_UID (sv->var));
}
}
@@ -1496,8 +1507,8 @@ setup_pointers_and_addressables (struct alias_info *ai)
array and create a type memory tag for them. */
if (POINTER_TYPE_P (TREE_TYPE (var)))
{
- if ((bitmap_bit_p (ai->dereferenced_ptrs_store, v_ann->uid)
- || bitmap_bit_p (ai->dereferenced_ptrs_load, v_ann->uid)))
+ if ((bitmap_bit_p (ai->dereferenced_ptrs_store, DECL_UID (var))
+ || bitmap_bit_p (ai->dereferenced_ptrs_load, DECL_UID (var))))
{
tree tag;
var_ann_t t_ann;
@@ -1525,8 +1536,8 @@ setup_pointers_and_addressables (struct alias_info *ai)
/* If pointer VAR has been used in a store operation,
then its memory tag must be marked as written-to. */
- if (bitmap_bit_p (ai->dereferenced_ptrs_store, v_ann->uid))
- bitmap_set_bit (ai->written_vars, t_ann->uid);
+ if (bitmap_bit_p (ai->dereferenced_ptrs_store, DECL_UID (var)))
+ bitmap_set_bit (ai->written_vars, DECL_UID (tag));
/* If pointer VAR is a global variable or a PARM_DECL,
then its memory tag should be considered a global
@@ -1537,13 +1548,11 @@ setup_pointers_and_addressables (struct alias_info *ai)
/* All the dereferences of pointer VAR count as
references of TAG. Since TAG can be associated with
several pointers, add the dereferences of VAR to the
- TAG. We may need to grow AI->NUM_REFERENCES because
- we have been adding name and type tags. */
- if (t_ann->uid >= VARRAY_SIZE (ai->num_references))
- VARRAY_GROW (ai->num_references, t_ann->uid + 10);
+ TAG. */
- VARRAY_UINT (ai->num_references, t_ann->uid)
- += VARRAY_UINT (ai->num_references, v_ann->uid);
+ NUM_REFERENCES_SET (t_ann,
+ NUM_REFERENCES (t_ann) +
+ NUM_REFERENCES (v_ann));
}
else
{
@@ -1643,7 +1652,7 @@ maybe_create_global_var (struct alias_info *ai)
/* Mark all call-clobbered symbols for renaming. Since the initial
rewrite into SSA ignored all call sites, we may need to rename
- .GLOBAL_VAR and the call-clobbered variables. */
+ .GLOBAL_VAR and the call-clobbered variables. */
EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
{
tree var = referenced_var (i);
@@ -2019,7 +2028,7 @@ add_pointed_to_var (struct alias_info *ai, tree ptr, tree value)
subvar_t sv;
svars = get_subvars_for_var (ref);
- uid = var_ann (pt_var)->uid;
+ uid = DECL_UID (pt_var);
if (pi->pt_vars == NULL)
pi->pt_vars = BITMAP_GGC_ALLOC ();
@@ -2032,15 +2041,15 @@ add_pointed_to_var (struct alias_info *ai, tree ptr, tree value)
{
if (overlap_subvar (offset, size, sv, NULL))
{
- bitmap_set_bit (pi->pt_vars, var_ann (sv->var)->uid);
- bitmap_set_bit (ai->addresses_needed, var_ann (sv->var)->uid);
+ bitmap_set_bit (pi->pt_vars, DECL_UID (sv->var));
+ bitmap_set_bit (ai->addresses_needed, DECL_UID (sv->var));
}
}
}
else if (pt_var && SSA_VAR_P (pt_var))
{
- uid = var_ann (pt_var)->uid;
+ uid = DECL_UID (pt_var);
if (pi->pt_vars == NULL)
pi->pt_vars = BITMAP_GGC_ALLOC ();
@@ -2053,7 +2062,7 @@ add_pointed_to_var (struct alias_info *ai, tree ptr, tree value)
subvar_t sv;
for (sv = svars; sv; sv = sv->next)
{
- uid = var_ann (sv->var)->uid;
+ uid = DECL_UID (sv->var);
bitmap_set_bit (ai->addresses_needed, uid);
bitmap_set_bit (pi->pt_vars, uid);
}
@@ -2419,30 +2428,32 @@ dump_alias_info (FILE *file)
size_t i;
const char *funcname
= lang_hooks.decl_printable_name (current_function_decl, 2);
+ referenced_var_iterator rvi;
+ tree var;
fprintf (file, "\nFlow-insensitive alias information for %s\n\n", funcname);
fprintf (file, "Aliased symbols\n\n");
- for (i = 0; i < num_referenced_vars; i++)
+
+ FOR_EACH_REFERENCED_VAR (var, rvi)
{
- tree var = referenced_var (i);
if (may_be_aliased (var))
dump_variable (file, var);
}
fprintf (file, "\nDereferenced pointers\n\n");
- for (i = 0; i < num_referenced_vars; i++)
+
+ FOR_EACH_REFERENCED_VAR (var, rvi)
{
- tree var = referenced_var (i);
var_ann_t ann = var_ann (var);
if (ann->type_mem_tag)
dump_variable (file, var);
}
fprintf (file, "\nType memory tags\n\n");
- for (i = 0; i < num_referenced_vars; i++)
+
+ FOR_EACH_REFERENCED_VAR (var, rvi)
{
- tree var = referenced_var (i);
var_ann_t ann = var_ann (var);
if (ann->mem_tag_kind == TYPE_TAG)
dump_variable (file, var);
@@ -2467,9 +2478,9 @@ dump_alias_info (FILE *file)
}
fprintf (file, "\nName memory tags\n\n");
- for (i = 0; i < num_referenced_vars; i++)
+
+ FOR_EACH_REFERENCED_VAR (var, rvi)
{
- tree var = referenced_var (i);
var_ann_t ann = var_ann (var);
if (ann->mem_tag_kind == NAME_TAG)
dump_variable (file, var);
@@ -2578,19 +2589,19 @@ dump_points_to_info (FILE *file)
{
basic_block bb;
block_stmt_iterator si;
- size_t i;
ssa_op_iter iter;
const char *fname =
lang_hooks.decl_printable_name (current_function_decl, 2);
+ referenced_var_iterator rvi;
+ tree var;
fprintf (file, "\n\nPointed-to sets for pointers in %s\n\n", fname);
/* First dump points-to information for the default definitions of
pointer variables. This is necessary because default definitions are
not part of the code. */
- for (i = 0; i < num_referenced_vars; i++)
+ FOR_EACH_REFERENCED_VAR (var, rvi)
{
- tree var = referenced_var (i);
if (POINTER_TYPE_P (TREE_TYPE (var)))
{
var_ann_t ann = var_ann (var);
@@ -2709,13 +2720,14 @@ add_type_alias (tree ptr, tree var)
tree tag;
var_ann_t ann = var_ann (ptr);
subvar_t svars;
+
if (ann->type_mem_tag == NULL_TREE)
{
- size_t i;
tree q = NULL_TREE;
tree tag_type = TREE_TYPE (TREE_TYPE (ptr));
HOST_WIDE_INT tag_set = get_alias_set (tag_type);
+ referenced_var_iterator rvi;
/* PTR doesn't have a type tag, create a new one and add VAR to
the new tag's alias set.
@@ -2724,10 +2736,8 @@ add_type_alias (tree ptr, tree var)
whether there is another pointer Q with the same alias set as
PTR. This could be sped up by having type tags associated
with types. */
- for (i = 0; i < num_referenced_vars; i++)
+ FOR_EACH_REFERENCED_VAR (q, rvi)
{
- q = referenced_var (i);
-
if (POINTER_TYPE_P (TREE_TYPE (q))
&& tag_set == get_alias_set (TREE_TYPE (TREE_TYPE (q))))
{
@@ -2853,6 +2863,8 @@ new_type_alias (tree ptr, tree var)
}
}
+
+
/* This represents the used range of a variable. */
typedef struct used_part
@@ -2870,7 +2882,68 @@ typedef struct used_part
/* An array of used_part structures, indexed by variable uid. */
-static used_part_t *used_portions;
+static htab_t used_portions;
+
+struct used_part_map
+{
+ unsigned int uid;
+ used_part_t to;
+};
+
+/* Return true if the uid in the two used part maps are equal. */
+
+static int
+used_part_map_eq (const void *va, const void *vb)
+{
+ const struct used_part_map *a = va, *b = vb;
+ return (a->uid == b->uid);
+}
+
+/* Hash a from uid in a used_part_map. */
+
+static unsigned int
+used_part_map_hash (const void *item)
+{
+ return ((const struct used_part_map *)item)->uid;
+}
+
+/* Free a used part map element. */
+
+static void
+free_used_part_map (void *item)
+{
+ free (((struct used_part_map *)item)->to);
+}
+
+/* Lookup a used_part structure for a UID. */
+
+static used_part_t
+up_lookup (unsigned int uid)
+{
+ struct used_part_map *h, in;
+ in.uid = uid;
+ h = htab_find_with_hash (used_portions, &in, uid);
+ if (!h)
+ return NULL;
+ return h->to;
+}
+
+/* Insert the pair UID, TO into the used part hashtable. */
+
+static void
+up_insert (unsigned int uid, used_part_t to)
+{
+ struct used_part_map *h;
+ void **loc;
+
+ h = xmalloc (sizeof (struct used_part_map));
+ h->uid = uid;
+ h->to = to;
+ loc = htab_find_slot_with_hash (used_portions, h,
+ uid, INSERT);
+ *(struct used_part_map **) loc = h;
+}
+
/* Given a variable uid, UID, get or create the entry in the used portions
table for the variable. */
@@ -2879,7 +2952,7 @@ static used_part_t
get_or_create_used_part_for (size_t uid)
{
used_part_t up;
- if (used_portions[uid] == NULL)
+ if ((up = up_lookup (uid)) == NULL)
{
up = xcalloc (1, sizeof (struct used_part));
up->minused = INT_MAX;
@@ -2887,8 +2960,7 @@ get_or_create_used_part_for (size_t uid)
up->explicit_uses = false;
up->implicit_uses = false;
}
- else
- up = used_portions[uid];
+
return up;
}
@@ -2901,12 +2973,12 @@ create_overlap_variables_for (tree var)
{
VEC(fieldoff_s,heap) *fieldstack = NULL;
used_part_t up;
- size_t uid = var_ann (var)->uid;
+ size_t uid = DECL_UID (var);
- if (used_portions[uid] == NULL)
+ if (!up_lookup (uid))
return;
- up = used_portions[uid];
+ up = up_lookup (uid);
push_fields_onto_fieldstack (TREE_TYPE (var), &fieldstack, 0, NULL);
if (VEC_length (fieldoff_s, fieldstack) != 0)
{
@@ -3079,7 +3151,7 @@ find_used_portions (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
&unsignedp, &volatilep, false);
if (DECL_P (ref) && offset == NULL && bitsize != -1)
{
- size_t uid = var_ann (ref)->uid;
+ size_t uid = DECL_UID (ref);
used_part_t up;
up = get_or_create_used_part_for (uid);
@@ -3090,7 +3162,7 @@ find_used_portions (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
up->maxused = bitpos + bitsize;
up->explicit_uses = true;
- used_portions[uid] = up;
+ up_insert (uid, up);
*walk_subtrees = 0;
return NULL_TREE;
@@ -3102,7 +3174,7 @@ find_used_portions (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
&& TREE_CODE (DECL_SIZE (ref)) == INTEGER_CST)
{
used_part_t up;
- size_t uid = var_ann (ref)->uid;
+ size_t uid = DECL_UID (ref);
up = get_or_create_used_part_for (uid);
@@ -3111,7 +3183,7 @@ find_used_portions (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
up->implicit_uses = true;
- used_portions[uid] = up;
+ up_insert (uid, up);
*walk_subtrees = 0;
return NULL_TREE;
@@ -3134,7 +3206,7 @@ find_used_portions (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
&& TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
{
used_part_t up;
- size_t uid = var_ann (var)->uid;
+ size_t uid = DECL_UID (var);
up = get_or_create_used_part_for (uid);
@@ -3142,7 +3214,7 @@ find_used_portions (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
up->maxused = TREE_INT_CST_LOW (DECL_SIZE (var));
up->implicit_uses = true;
- used_portions[uid] = up;
+ up_insert (uid, up);
*walk_subtrees = 0;
return NULL_TREE;
}
@@ -3157,7 +3229,7 @@ find_used_portions (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
&& TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
{
used_part_t up;
- size_t uid = var_ann (var)->uid;
+ size_t uid = DECL_UID (var);
up = get_or_create_used_part_for (uid);
@@ -3165,7 +3237,7 @@ find_used_portions (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
up->maxused = TREE_INT_CST_LOW (DECL_SIZE (var));
up->implicit_uses = true;
- used_portions[uid] = up;
+ up_insert (uid, up);
*walk_subtrees = 0;
return NULL_TREE;
}
@@ -3179,22 +3251,17 @@ find_used_portions (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
return NULL_TREE;
}
-/* We are about to create some new referenced variables, and we need the
- before size. */
-
-static size_t old_referenced_vars;
-
-
/* Create structure field variables for structures used in this function. */
static void
create_structure_vars (void)
{
basic_block bb;
- size_t i;
+ referenced_var_iterator rvi;
+ tree var;
- old_referenced_vars = num_referenced_vars;
- used_portions = xcalloc (num_referenced_vars, sizeof (used_part_t));
+ used_portions = htab_create (10, used_part_map_hash, used_part_map_eq,
+ free_used_part_map);
FOR_EACH_BB (bb)
{
@@ -3206,9 +3273,8 @@ create_structure_vars (void)
NULL);
}
}
- for (i = 0; i < old_referenced_vars; i++)
+ FOR_EACH_REFERENCED_VAR (var, rvi)
{
- tree var = referenced_var (i);
/* The C++ FE creates vars without DECL_SIZE set, for some reason. */
if (var
&& DECL_SIZE (var)
@@ -3217,10 +3283,8 @@ create_structure_vars (void)
&& TREE_CODE (DECL_SIZE (var)) == INTEGER_CST)
create_overlap_variables_for (var);
}
- for (i = 0; i < old_referenced_vars; i++)
- free (used_portions[i]);
+ htab_delete (used_portions);
- free (used_portions);
}
static bool