aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorDiego Novillo <dnovillo@redhat.com>2006-07-06 21:40:33 +0000
committerDiego Novillo <dnovillo@redhat.com>2006-07-06 21:40:33 +0000
commit61ae379339ed7e7b9bac78b922a80e3cbeef3314 (patch)
tree471f95eea7c086116092617a468850cfb8b48963
parente6976bf7a14b146fb1f0784324c2a44589d1d96b (diff)
* tree-ssa-operands.h (SSA_OPERAND_MEMORY_SIZE): Increase buffer size.
(get_loads_and_stores): Declare. (push_stmt_changes): Declare. (pop_stmt_changes): Declare. (enum): * doc/tree-ssa.texi: Update documentation for virtual operands and the use of push_stmt_changes/pop_stmt_changes. * doc/invoke.texi: Remove documentation for params global-var-threshold and max-aliased-vops. * tree-into-ssa.c (stale_ssa_names): New local variable. (struct mem_syms_map_d): Declare. (mem_syms_map_t): Declare. (mem_syms_tbl): New local variable. (set_livein_block): Check for empty def blocks set. (symbol_marked_for_renaming): Remove superfluous DECL_P assert. (add_new_name_mapping): Do not add .MEM to the set of virtual names. (mem_syms_hash): New. (mem_syms_eq): New. (mem_syms_free): New. (syms_referenced_by): New. (mark_def_sites): Process memory symbols loaded/stored separately. (register_new_def): Use BLOCK_DEFS_STACK directly. Update callers. Deal with mismatched symbols when SYM is not a GIMPLE register. (rewrite_initialize_block): Assert that we are not dealing with .MEM. (get_reaching_def): For non-GIMPLE registers use .MEM's default definition when necessary. (rewrite_memory_stmt): New. (rewrite_stmt): Call it when dealing with memory statements. (rewrite_finalize_block): Take the symbol for virtual SSA_NAMEs from the next slot in the stack. (dump_defs_stack): New. (debug_defs_stack): New. (dump_currdefs): New. (debug_currdefs): New. (dump_tree_ssa): Call dump_def_blocks, dump_defs_stack and dump_currdefs. (debug_def_blocks_r): Also show PHI_BLOCKS. (dump_def_blocks): New. (debug_def_blocks): Call it. (maybe_replace_use): Do not handle memory symbols. (maybe_register_def): Likewise. (name_marked_for_release_p): New. (mark_ssa_name_stale): New. (stale_ssa_name_p): New. (preserve_needed_names_in_vops): New. (rewrite_update_stmt_vops): New. (register_new_vdef_name): New. (rewrite_update_memory_stmt): New. (rewrite_update_stmt): Call it. (rewrite_update_phi_arguments): Use the LHS of the PHI node when the argument is a memory symbol. (rewrite_blocks): Move code to delete DEF_BLOCKS to fini_ssa_renamer. (mark_def_site_blocks): Move DEF_BLOCKS and current def initialization to init_ssa_renamer. (init_ssa_renamer): New. (fini_ssa_renamer): New. (rewrite_into_ssa): Call them. (prepare_block_for_update): Call syms_referenced_by to process memory symbols in the statement. (dump_update_ssa): Show SSA names made stale by renaming process. (init_update_ssa): Initialize STALE_SSA_NAMES. (delete_update_ssa): Free STALE_SSA_NAMES. Call fini_ssa_renamer. (mark_sym_for_renaming): Reject .MEM. Add temporary workaround when marking symbols with sub-vars. (mark_set_for_renaming): Likewise. (release_ssa_name_after_update_ssa): Call init_update_ssa if needed. (replace_stale_ssa_names): New. (update_ssa): Call init_ssa_renamer. Call replace_stale_ssa_names if needed. * tree-ssa-loop-im.c (rewrite_mem_refs): Call get_loads_and_stores to gather memory symbols from a statement. (gather_mem_refs_stmt): Likewise. * tree-complex.c (update_all_vops): Remove. (expand_complex_move): Call mark_symbols_for_renaming. * tree-pretty-print.c (dump_generic_node): Show (D) for default SSA names. (dump_vops): Show all VUSE/VDEF operands in one line. * tree.c (is_global_var): Call may_be_aliased. * tree.h (struct tree_memory_tag): Remove field is_used_alone. Update all users. (SMT_USED_ALONE): Remove. Update all users. * tree-pass.h (PROP_smt_usage): Remove. Update all users. * params.h (GLOBAL_VAR_THRESHOLD): Remove. Update all users. (MAX_ALIASED_VOPS): Remove. Update all users. * tree-ssa-dse.c (gate_dse): Temporarily disable. * tree-stdarg.c (check_all_va_list_escapes): Call get_loads_and_stores to gather memory symbols from the statement. * tree-ssa-dom.c (stmts_to_rescan): Change to stack of 'tree *' instead of 'tree'. Update all users. * tree-nrv.c (execute_return_slot_opt): Use get_loads_and_stores to gather memory symbols referenced. * tree-ssa-alias.c (struct alias_map_d): Remove field grouped_p. Update all users. (updating_used_alone): Remove. Update all users. (lhs_may_store_to): Remove. Update all users. (recalculate_used_alone): Remove. Update all users. (total_alias_vops_cmp): Remove. Update all users. (group_aliases_into): Remove. Update all users. (group_aliases): Remove. Update all users. (maybe_create_global_var): Create only if there are no global variables and a mix of pure and non-pure function call sites. (compute_may_aliases): Remove stale documentation. (replace_may_alias): Remove. Update all users. (may_be_aliased): Call is_global_var. * tree-dfa.c (mark_symbols_for_renaming): Rename from mark_new_vars_to_rename. Update all users. Call get_loads_and_stores to gather memory symbols. (find_new_referenced_vars): * tree-ssa-pre.c (gate_pre): Temporarily disable. * tree-sra.c (sra_replace): Mark virtual SSA names to be released after update_ssa. * tree-ssa-copy.c (may_propagate_copy): Handle .MEM. (merge_alias_info): Likewise. * tree-ssa.c (verify_ssa_name): (verify_ssa): Only check VDEFs if aliases have been computed and the statement has no volatile operands. (create_mem_var): New. (delete_tree_ssa): Tidy. * tree-vect-transform.c (vectorizable_store): Call get_loads_and_stores to gather memory symbols. * tree-flow.h (struct var_ann_d): Remove fields in_vuse_list and in_vdef_list. (mem_var): Declare. (register_new_def): Remove. (recalculate_used_alone): Remove. (updating_used_alone): Remove. * tree-ssa-structalias.c (update_alias_info): Use get_loads_and_stores to gather memory symbols. * params.def (PARAM_GLOBAL_VAR_THRESHOLD): Remove. (PARAM_MAX_ALIASED_VOPS): Remove. * tree-ssanames.c (replace_ssa_name_symbol): Assert that we are not trying to replace .MEM. * tree-ssa-operands.c (mem_var): Declare. (opf_implicit): Rename from opf_non_specific. Update all users. (opf_def): Rename from opf_is_def. (opf_use): Rename from opf_none. (loaded_syms): New local variable. (stored_syms): New local variable. (struct scb_d): Declare. (scb_t): Define. (scb_stack): New local variable. (gathering_loads_stores): New. (init_ssa_operands): Initialize scb_stack. (fini_ssa_operands): Free scb_stack. (truncate_ssa_stmt_operands): New. (ssa_operand_alloc): Assert that size fits in SSA_OPERAND_MEMORY_SIZE. (realloc_vdef): Terminate operand linked list. (realloc_vuse): Likewise. (finalize_ssa_vdef_ops): Do not handle more than one VDEF. (finalize_ssa_vuse_ops): Likewise. (finalize_ssa_vuses): Do not look for superfluous VUSE operands. (append_vdef): Do not add more than a single VDEF. (append_vuse): Likewise. (add_virtual_operand): Remove. Update all users. (add_virtual_operator): New. (add_mem_symbol): New. (add_mem_symbols_in_decl): New. (get_mem_symbols_in_tag): New. (get_mem_symbols_in_indirect_ref): Rename from get_indirect_ref_operands. Move tag processing code to get_mem_symbols_in_tag. (get_mem_symbols_in_aggregate): New. (get_mem_symbols_in_tmr): New. (add_call_clobbered_mem_symbols): Rename from add_call_clobber_ops. (add_call_read_mem_symbols): Rename from add_call_read_ops. (get_call_expr_operands): Call add_call_clobbered_mem_symbols and add_call_read_mem_symbols. (get_asm_expr_operands): Call add_mem_symbols_in_decl when gathering loads and stores. Only add a single VDEF/VUSE when not gathering loads/stores. (get_aggregate_operands): Extract from get_expr_operands. Call get_mem_symbols_in_aggregate when gathering loads/stores. (get_expr_operands) <case ADDR_EXPR>: Return early if gathering loads/stores. <case STRUCT_FIELD_TAG>: Remove. <case SYMBOL_MEMORY_TAG>: Remove. <case NAME_MEMORY_TAG>: Remove. <case VAR_DECL>: Do not handle sub-variables. (update_stmt_operands): Assert that we are not gathering loads and stores. (copy_virtual_operands): Do not look for more than one VDEF/VUSE. (create_ssa_artficial_load_stmt): Likewise. (dump_loads_and_stores): New. (debug_loads_and_stores): New. (get_loads_and_stores_for_phi): New. (get_loads_and_stores): New. (push_stmt_changes): New. (mark_difference_for_renaming): New. (pop_stmt_changes): New. testsuite/ChangeLog.mem-ssa * gcc.dg/tree-ssa/pr23382.c: Update for mem-ssa patterns. * gcc.dg/tree-ssa/20040302-1.c: Remove use of --param global-var-threshold. git-svn-id: https://gcc.gnu.org/svn/gcc/branches/mem-ssa@115232 138bc75d-0d04-0410-961f-82ee72b054a4
-rw-r--r--gcc/ChangeLog.mem-ssa204
-rw-r--r--gcc/doc/invoke.texi16
-rw-r--r--gcc/doc/tree-ssa.texi60
-rw-r--r--gcc/params.def12
-rw-r--r--gcc/params.h4
-rw-r--r--gcc/passes.c11
-rw-r--r--gcc/testsuite/ChangeLog.mem-ssa7
-rw-r--r--gcc/testsuite/gcc.dg/tree-ssa/20040302-1.c2
-rw-r--r--gcc/testsuite/gcc.dg/tree-ssa/pr23382.c10
-rw-r--r--gcc/tree-cfg.c9
-rw-r--r--gcc/tree-complex.c20
-rw-r--r--gcc/tree-dfa.c72
-rw-r--r--gcc/tree-flow.h74
-rw-r--r--gcc/tree-into-ssa.c1348
-rw-r--r--gcc/tree-nrv.c28
-rw-r--r--gcc/tree-pass.h2
-rw-r--r--gcc/tree-pretty-print.c55
-rw-r--r--gcc/tree-sra.c31
-rw-r--r--gcc/tree-ssa-alias.c596
-rw-r--r--gcc/tree-ssa-ccp.c32
-rw-r--r--gcc/tree-ssa-copy.c22
-rw-r--r--gcc/tree-ssa-dce.c13
-rw-r--r--gcc/tree-ssa-dom.c59
-rw-r--r--gcc/tree-ssa-dse.c3
-rw-r--r--gcc/tree-ssa-forwprop.c15
-rw-r--r--gcc/tree-ssa-loop-im.c35
-rw-r--r--gcc/tree-ssa-loop-ivopts.c8
-rw-r--r--gcc/tree-ssa-loop-prefetch.c3
-rw-r--r--gcc/tree-ssa-operands.c1935
-rw-r--r--gcc/tree-ssa-operands.h6
-rw-r--r--gcc/tree-ssa-pre.c39
-rw-r--r--gcc/tree-ssa-propagate.c9
-rw-r--r--gcc/tree-ssa-structalias.c13
-rw-r--r--gcc/tree-ssa.c42
-rw-r--r--gcc/tree-ssanames.c1
-rw-r--r--gcc/tree-stdarg.c29
-rw-r--r--gcc/tree-vect-transform.c18
-rw-r--r--gcc/tree-vrp.c4
-rw-r--r--gcc/tree.c6
-rw-r--r--gcc/tree.h6
40 files changed, 2983 insertions, 1876 deletions
diff --git a/gcc/ChangeLog.mem-ssa b/gcc/ChangeLog.mem-ssa
index 1d1c526187b..19f41b9d798 100644
--- a/gcc/ChangeLog.mem-ssa
+++ b/gcc/ChangeLog.mem-ssa
@@ -1,3 +1,207 @@
+2006-07-06 Diego Novillo <dnovillo@redhat.com>
+
+ * tree-ssa-operands.h (SSA_OPERAND_MEMORY_SIZE): Increase buffer size.
+ (get_loads_and_stores): Declare.
+ (push_stmt_changes): Declare.
+ (pop_stmt_changes): Declare.
+ (enum):
+ * doc/tree-ssa.texi: Update documentation for virtual operands
+ and the use of push_stmt_changes/pop_stmt_changes.
+ * doc/invoke.texi: Remove documentation for params
+ global-var-threshold and max-aliased-vops.
+ * tree-into-ssa.c (stale_ssa_names): New local variable.
+ (struct mem_syms_map_d): Declare.
+ (mem_syms_map_t): Declare.
+ (mem_syms_tbl): New local variable.
+ (set_livein_block): Check for empty def blocks set.
+ (symbol_marked_for_renaming): Remove superfluous DECL_P assert.
+ (add_new_name_mapping): Do not add .MEM to the set of virtual names.
+ (mem_syms_hash): New.
+ (mem_syms_eq): New.
+ (mem_syms_free): New.
+ (syms_referenced_by): New.
+ (mark_def_sites): Process memory symbols loaded/stored separately.
+ (register_new_def): Use BLOCK_DEFS_STACK directly. Update callers.
+ Deal with mismatched symbols when SYM is not a GIMPLE register.
+ (rewrite_initialize_block): Assert that we are not dealing
+ with .MEM.
+ (get_reaching_def): For non-GIMPLE registers use .MEM's
+ default definition when necessary.
+ (rewrite_memory_stmt): New.
+ (rewrite_stmt): Call it when dealing with memory statements.
+ (rewrite_finalize_block): Take the symbol for virtual
+ SSA_NAMEs from the next slot in the stack.
+ (dump_defs_stack): New.
+ (debug_defs_stack): New.
+ (dump_currdefs): New.
+ (debug_currdefs): New.
+ (dump_tree_ssa): Call dump_def_blocks, dump_defs_stack and
+ dump_currdefs.
+ (debug_def_blocks_r): Also show PHI_BLOCKS.
+ (dump_def_blocks): New.
+ (debug_def_blocks): Call it.
+ (maybe_replace_use): Do not handle memory symbols.
+ (maybe_register_def): Likewise.
+ (name_marked_for_release_p): New.
+ (mark_ssa_name_stale): New.
+ (stale_ssa_name_p): New.
+ (preserve_needed_names_in_vops): New.
+ (rewrite_update_stmt_vops): New.
+ (register_new_vdef_name): New.
+ (rewrite_update_memory_stmt): New.
+ (rewrite_update_stmt): Call it.
+ (rewrite_update_phi_arguments): Use the LHS of the PHI node
+ when the argument is a memory symbol.
+ (rewrite_blocks): Move code to delete DEF_BLOCKS to
+ fini_ssa_renamer.
+ (mark_def_site_blocks): Move DEF_BLOCKS and current def
+ initialization to init_ssa_renamer.
+ (init_ssa_renamer): New.
+ (fini_ssa_renamer): New.
+ (rewrite_into_ssa): Call them.
+ (prepare_block_for_update): Call syms_referenced_by to process
+ memory symbols in the statement.
+ (dump_update_ssa): Show SSA names made stale by renaming
+ process.
+ (init_update_ssa): Initialize STALE_SSA_NAMES.
+ (delete_update_ssa): Free STALE_SSA_NAMES.
+ Call fini_ssa_renamer.
+ (mark_sym_for_renaming): Reject .MEM.
+ Add temporary workaround when marking symbols with sub-vars.
+ (mark_set_for_renaming): Likewise.
+ (release_ssa_name_after_update_ssa): Call init_update_ssa if
+ needed.
+ (replace_stale_ssa_names): New.
+ (update_ssa): Call init_ssa_renamer.
+ Call replace_stale_ssa_names if needed.
+ * tree-ssa-loop-im.c (rewrite_mem_refs): Call
+ get_loads_and_stores to gather memory symbols from a statement.
+ (gather_mem_refs_stmt): Likewise.
+ * tree-complex.c (update_all_vops): Remove.
+ (expand_complex_move): Call mark_symbols_for_renaming.
+ * tree-pretty-print.c (dump_generic_node): Show (D) for
+ default SSA names.
+ (dump_vops): Show all VUSE/VDEF operands in one line.
+ * tree.c (is_global_var): Call may_be_aliased.
+ * tree.h (struct tree_memory_tag): Remove field is_used_alone.
+ Update all users.
+ (SMT_USED_ALONE): Remove. Update all users.
+ * tree-pass.h (PROP_smt_usage): Remove. Update all users.
+ * params.h (GLOBAL_VAR_THRESHOLD): Remove. Update all users.
+ (MAX_ALIASED_VOPS): Remove. Update all users.
+ * tree-ssa-dse.c (gate_dse): Temporarily disable.
+
+ * tree-stdarg.c (check_all_va_list_escapes): Call
+ get_loads_and_stores to gather memory symbols from the
+ statement.
+ * tree-ssa-dom.c (stmts_to_rescan): Change to stack of
+ 'tree *' instead of 'tree'. Update all users.
+ * tree-nrv.c (execute_return_slot_opt): Use
+ get_loads_and_stores to gather memory symbols referenced.
+ * tree-ssa-alias.c (struct alias_map_d): Remove field
+ grouped_p. Update all users.
+ (updating_used_alone): Remove. Update all users.
+ (lhs_may_store_to): Remove. Update all users.
+ (recalculate_used_alone): Remove. Update all users.
+ (total_alias_vops_cmp): Remove. Update all users.
+ (group_aliases_into): Remove. Update all users.
+ (group_aliases): Remove. Update all users.
+ (maybe_create_global_var): Create only if there are no global
+ variables and a mix of pure and non-pure function call sites.
+ (compute_may_aliases): Remove stale documentation.
+ (replace_may_alias): Remove. Update all users.
+ (may_be_aliased): Call is_global_var.
+ * tree-dfa.c (mark_symbols_for_renaming): Rename from
+ mark_new_vars_to_rename. Update all users.
+ Call get_loads_and_stores to gather memory symbols.
+ (find_new_referenced_vars):
+ * tree-ssa-pre.c (gate_pre): Temporarily disable.
+ * tree-sra.c (sra_replace): Mark virtual SSA names to be
+ released after update_ssa.
+ * tree-ssa-copy.c (may_propagate_copy): Handle .MEM.
+ (merge_alias_info): Likewise.
+ * tree-ssa.c (verify_ssa_name):
+ (verify_ssa): Only check VDEFs if aliases have been computed
+ and the statement has no volatile operands.
+ (create_mem_var): New.
+ (delete_tree_ssa): Tidy.
+ * tree-vect-transform.c (vectorizable_store): Call
+ get_loads_and_stores to gather memory symbols.
+ * tree-flow.h (struct var_ann_d): Remove fields in_vuse_list
+ and in_vdef_list.
+ (mem_var): Declare.
+ (register_new_def): Remove.
+ (recalculate_used_alone): Remove.
+ (updating_used_alone): Remove.
+ * tree-ssa-structalias.c (update_alias_info): Use
+ get_loads_and_stores to gather memory symbols.
+ * params.def (PARAM_GLOBAL_VAR_THRESHOLD): Remove.
+ (PARAM_MAX_ALIASED_VOPS): Remove.
+ * tree-ssanames.c (replace_ssa_name_symbol): Assert that we
+ are not trying to replace .MEM.
+ * tree-ssa-operands.c (mem_var): Declare.
+ (opf_implicit): Rename from opf_non_specific. Update all
+ users.
+ (opf_def): Rename from opf_is_def.
+ (opf_use): Rename from opf_none.
+ (loaded_syms): New local variable.
+ (stored_syms): New local variable.
+ (struct scb_d): Declare.
+ (scb_t): Define.
+ (scb_stack): New local variable.
+ (gathering_loads_stores): New.
+ (init_ssa_operands): Initialize scb_stack.
+ (fini_ssa_operands): Free scb_stack.
+ (truncate_ssa_stmt_operands): New.
+ (ssa_operand_alloc): Assert that size fits in
+ SSA_OPERAND_MEMORY_SIZE.
+ (realloc_vdef): Terminate operand linked list.
+ (realloc_vuse): Likewise.
+ (finalize_ssa_vdef_ops): Do not handle more than one VDEF.
+ (finalize_ssa_vuse_ops): Likewise.
+ (finalize_ssa_vuses): Do not look for superfluous VUSE
+ operands.
+ (append_vdef): Do not add more than a single VDEF.
+ (append_vuse): Likewise.
+ (add_virtual_operand): Remove. Update all users.
+ (add_virtual_operator): New.
+ (add_mem_symbol): New.
+ (add_mem_symbols_in_decl): New.
+ (get_mem_symbols_in_tag): New.
+ (get_mem_symbols_in_indirect_ref): Rename from
+ get_indirect_ref_operands. Move tag processing code to
+ get_mem_symbols_in_tag.
+ (get_mem_symbols_in_aggregate): New.
+ (get_mem_symbols_in_tmr): New.
+ (add_call_clobbered_mem_symbols): Rename from
+ add_call_clobber_ops.
+ (add_call_read_mem_symbols): Rename from add_call_read_ops.
+ (get_call_expr_operands): Call add_call_clobbered_mem_symbols
+ and add_call_read_mem_symbols.
+ (get_asm_expr_operands): Call add_mem_symbols_in_decl when
+ gathering loads and stores.
+ Only add a single VDEF/VUSE when not gathering loads/stores.
+ (get_aggregate_operands): Extract from get_expr_operands.
+ Call get_mem_symbols_in_aggregate when gathering loads/stores.
+ (get_expr_operands) <case ADDR_EXPR>: Return early if
+ gathering loads/stores.
+ <case STRUCT_FIELD_TAG>: Remove.
+ <case SYMBOL_MEMORY_TAG>: Remove.
+ <case NAME_MEMORY_TAG>: Remove.
+ <case VAR_DECL>: Do not handle sub-variables.
+ (update_stmt_operands): Assert that we are not gathering loads
+ and stores.
+ (copy_virtual_operands): Do not look for more than one
+ VDEF/VUSE.
+ (create_ssa_artficial_load_stmt): Likewise.
+ (dump_loads_and_stores): New.
+ (debug_loads_and_stores): New.
+ (get_loads_and_stores_for_phi): New.
+ (get_loads_and_stores): New.
+ (push_stmt_changes): New.
+ (mark_difference_for_renaming): New.
+ (pop_stmt_changes): New.
+
2006-05-29 Diego Novillo <dnovillo@redhat.com>
Mainline merge as of 2006-05-29 (@114205).
diff --git a/gcc/doc/invoke.texi b/gcc/doc/invoke.texi
index f4934184532..a15dd6c1dbd 100644
--- a/gcc/doc/invoke.texi
+++ b/gcc/doc/invoke.texi
@@ -6125,22 +6125,6 @@ Maximum number of basic blocks on path that cse considers. The default is 10.
@item max-cse-insns
The maximum instructions CSE process before flushing. The default is 1000.
-@item global-var-threshold
-
-Counts the number of function calls (@var{n}) and the number of
-call-clobbered variables (@var{v}). If @var{n}x@var{v} is larger than this limit, a
-single artificial variable will be created to represent all the
-call-clobbered variables at function call sites. This artificial
-variable will then be made to alias every call-clobbered variable.
-(done as @code{int * size_t} on the host machine; beware overflow).
-
-@item max-aliased-vops
-
-Maximum number of virtual operands allowed to represent aliases
-before triggering the alias grouping heuristic. Alias grouping
-reduces compile times and memory consumption needed for aliasing at
-the expense of precision loss in alias information.
-
@item ggc-min-expand
GCC uses a garbage collector to manage its own memory allocation. This
diff --git a/gcc/doc/tree-ssa.texi b/gcc/doc/tree-ssa.texi
index 016f812fed4..4e3e67f6a92 100644
--- a/gcc/doc/tree-ssa.texi
+++ b/gcc/doc/tree-ssa.texi
@@ -852,8 +852,8 @@ operands, use the @option{-vops} option to @option{-fdump-tree}:
p = &a;
else
p = &b;
- # a = V_MAY_DEF <a>
- # b = V_MAY_DEF <b>
+ # a = VDEF <a>
+ # b = VDEF <b>
*p = 5;
# VUSE <a>
@@ -862,11 +862,11 @@ operands, use the @option{-vops} option to @option{-fdump-tree}:
@}
@end smallexample
-Notice that @code{V_MAY_DEF} operands have two copies of the referenced
+Notice that @code{VDEF} operands have two copies of the referenced
variable. This indicates that this is not a killing definition of
that variable. In this case we refer to it as a @dfn{may definition}
or @dfn{aliased store}. The presence of the second copy of the
-variable in the @code{V_MAY_DEF} operand will become important when the
+variable in the @code{VDEF} operand will become important when the
function is converted into SSA form. This will be used to link all
the non-killing definitions to prevent optimizations from making
incorrect assumptions about them.
@@ -963,8 +963,8 @@ tree FOR_EACH_SSA_TREE_OPERAND
#define SSA_OP_USE 0x01 /* @r{Real USE operands.} */
#define SSA_OP_DEF 0x02 /* @r{Real DEF operands.} */
#define SSA_OP_VUSE 0x04 /* @r{VUSE operands.} */
-#define SSA_OP_VMAYUSE 0x08 /* @r{USE portion of V_MAY_DEFS.} */
-#define SSA_OP_VMAYDEF 0x10 /* @r{DEF portion of V_MAY_DEFS.} */
+#define SSA_OP_VMAYUSE 0x08 /* @r{USE portion of VDEFS.} */
+#define SSA_OP_VMAYDEF 0x10 /* @r{DEF portion of VDEFS.} */
#define SSA_OP_VMUSTDEF 0x20 /* @r{V_MUST_DEF definitions.} */
/* @r{These are commonly grouped operand flags.} */
@@ -1004,12 +1004,12 @@ aren't using operand pointers, use and defs flags can be mixed.
@}
@end smallexample
-@code{V_MAY_DEF}s are broken into two flags, one for the
+@code{VDEF}s are broken into two flags, one for the
@code{DEF} portion (@code{SSA_OP_VMAYDEF}) and one for the USE portion
(@code{SSA_OP_VMAYUSE}). If all you want to look at are the
-@code{V_MAY_DEF}s together, there is a fourth iterator macro for this,
+@code{VDEF}s together, there is a fourth iterator macro for this,
which returns both a def_operand_p and a use_operand_p for each
-@code{V_MAY_DEF} in the statement. Note that you don't need any flags for
+@code{VDEF} in the statement. Note that you don't need any flags for
this one.
@smallexample
@@ -1400,21 +1400,27 @@ There are several @code{TODO} flags that control the behavior of
The virtual SSA form is harder to preserve than the non-virtual SSA form
mainly because the set of virtual operands for a statement may change at
-what some would consider unexpected times. In general, any time you
-have modified a statement that has virtual operands, you should verify
-whether the list of virtual operands has changed, and if so, mark the
-newly exposed symbols by calling @code{mark_new_vars_to_rename}.
-
-There is one additional caveat to preserving virtual SSA form. When the
-entire set of virtual operands may be eliminated due to better
-disambiguation, a bare SMT will be added to the list of virtual
-operands, to signify the non-visible aliases that the are still being
-referenced. If the set of bare SMT's may change,
-@code{TODO_update_smt_usage} should be added to the todo flags.
-
-With the current pruning code, this can only occur when constants are
-propagated into array references that were previously non-constant, or
-address expressions are propagated into their uses.
+what some would consider unexpected times. In general, statement
+modifications should be bracketed between calls to
+@code{push_stmt_changes} and @code{pop_stmt_changes}. For example,
+
+@smallexample
+ munge_stmt (tree stmt)
+ @{
+ push_stmt_changes (&stmt);
+ ... rewrite STMT ...
+ pop_stmt_changes (&stmt);
+ @}
+@end smallexample
+
+The call to @code{push_stmt_changes} saves the current state of the
+statement operands and the call to @code{pop_stmt_changes} compares
+the saved state with the current one and does the appropriate symbol
+marking for the SSA renamer.
+
+It is possible to modify several statements at a time, provided that
+@code{push_stmt_changes} and @code{pop_stmt_changes} are called in
+LIFO order, as when processing a stack of statements.
@subsection Examining @code{SSA_NAME} nodes
@cindex examining SSA_NAMEs
@@ -1629,11 +1635,11 @@ foo (int i)
p_6 = &b;
# p_1 = PHI <p_4(1), p_6(2)>;
- # a_7 = V_MAY_DEF <a_3>;
- # b_8 = V_MAY_DEF <b_5>;
+ # a_7 = VDEF <a_3>;
+ # b_8 = VDEF <b_5>;
*p_1 = 3;
- # a_9 = V_MAY_DEF <a_7>
+ # a_9 = VDEF <a_7>
# VUSE <b_8>
a_9 = b_8 + 2;
diff --git a/gcc/params.def b/gcc/params.def
index e8d861515e5..5a107dbf3eb 100644
--- a/gcc/params.def
+++ b/gcc/params.def
@@ -437,13 +437,6 @@ DEFPARAM(PARAM_VECT_MAX_VERSION_CHECKS,
"Bound on number of runtime checks inserted by the vectorizer's loop versioning",
6, 0, 0)
-/* The product of the next two is used to decide whether or not to
- use .GLOBAL_VAR. See tree-dfa.c. */
-DEFPARAM(PARAM_GLOBAL_VAR_THRESHOLD,
- "global-var-threshold",
- "Given N calls and V call-clobbered vars in a function. Use .GLOBAL_VAR if NxV is larger than this limit",
- 500000, 0, 0)
-
DEFPARAM(PARAM_MAX_CSELIB_MEMORY_LOCATIONS,
"max-cselib-memory-locations",
"The maximum memory locations recorded by cselib",
@@ -479,11 +472,6 @@ DEFPARAM(PARAM_MAX_RELOAD_SEARCH_INSNS,
"The maximum number of instructions to search backward when looking for equivalent reload",
100, 0, 0)
-DEFPARAM(PARAM_MAX_ALIASED_VOPS,
- "max-aliased-vops",
- "The maximum number of virtual operands allowed to represent aliases before triggering alias grouping",
- 500, 0, 0)
-
DEFPARAM(PARAM_MAX_SCHED_REGION_BLOCKS,
"max-sched-region-blocks",
"The maximum number of blocks in a region to be considered for interblock scheduling",
diff --git a/gcc/params.h b/gcc/params.h
index 32c9c8c63fa..69e40acab46 100644
--- a/gcc/params.h
+++ b/gcc/params.h
@@ -135,10 +135,6 @@ typedef enum compiler_param
PARAM_VALUE (PARAM_SMS_DFA_HISTORY)
#define SMS_LOOP_AVERAGE_COUNT_THRESHOLD \
PARAM_VALUE (PARAM_SMS_LOOP_AVERAGE_COUNT_THRESHOLD)
-#define GLOBAL_VAR_THRESHOLD \
- PARAM_VALUE (PARAM_GLOBAL_VAR_THRESHOLD)
-#define MAX_ALIASED_VOPS \
- PARAM_VALUE (PARAM_MAX_ALIASED_VOPS)
#define INTEGER_SHARE_LIMIT \
PARAM_VALUE (PARAM_INTEGER_SHARE_LIMIT)
#define MAX_LAST_VALUE_RTL \
diff --git a/gcc/passes.c b/gcc/passes.c
index fe6f637e868..405e5f4a427 100644
--- a/gcc/passes.c
+++ b/gcc/passes.c
@@ -719,10 +719,6 @@ execute_todo (unsigned int flags)
flags &= ~last_verified;
if (!flags)
return;
-
- /* Always recalculate SMT usage before doing anything else. */
- if (flags & TODO_update_smt_usage)
- recalculate_used_alone ();
/* Always cleanup the CFG before trying to update SSA . */
if (flags & TODO_cleanup_cfg)
@@ -828,9 +824,6 @@ execute_one_pass (struct tree_opt_pass *pass)
gcc_assert ((curr_properties & pass->properties_required)
== pass->properties_required);
- if (pass->properties_destroyed & PROP_smt_usage)
- updating_used_alone = true;
-
/* If a dump file name is present, open it if enabled. */
if (pass->static_pass_number != -1)
{
@@ -891,15 +884,13 @@ execute_one_pass (struct tree_opt_pass *pass)
free ((char *) dump_file_name);
dump_file_name = NULL;
}
+
if (dump_file)
{
dump_end (pass->static_pass_number, dump_file);
dump_file = NULL;
}
- if (pass->properties_destroyed & PROP_smt_usage)
- updating_used_alone = false;
-
return true;
}
diff --git a/gcc/testsuite/ChangeLog.mem-ssa b/gcc/testsuite/ChangeLog.mem-ssa
index 8bcf3448094..b0e7d5b6074 100644
--- a/gcc/testsuite/ChangeLog.mem-ssa
+++ b/gcc/testsuite/ChangeLog.mem-ssa
@@ -1,3 +1,10 @@
+2006-07-06 Diego Novillo <dnovillo@redhat.com>
+
+ * gcc.dg/tree-ssa/pr23382.c: Update for mem-ssa
+ patterns.
+ * gcc.dg/tree-ssa/20040302-1.c: Remove use of --param
+ global-var-threshold.
+
2006-05-22 Aldy Hernandez <aldyh@redhat.com>
* gcc.dg/tree-ssa/complex-5.c: New.
diff --git a/gcc/testsuite/gcc.dg/tree-ssa/20040302-1.c b/gcc/testsuite/gcc.dg/tree-ssa/20040302-1.c
index ef59b041030..8b80128cd60 100644
--- a/gcc/testsuite/gcc.dg/tree-ssa/20040302-1.c
+++ b/gcc/testsuite/gcc.dg/tree-ssa/20040302-1.c
@@ -1,5 +1,5 @@
/* { dg-do compile } */
-/* { dg-options "-O1 --param global-var-threshold=0" } */
+/* { dg-options "-O1" } */
/* Test for .GLOBAL_VAR not being renamed into SSA after alias analysis.
provided by Dale Johannesen in PR 14266. */
diff --git a/gcc/testsuite/gcc.dg/tree-ssa/pr23382.c b/gcc/testsuite/gcc.dg/tree-ssa/pr23382.c
index 191be7ca38b..5c3ea36c0b8 100644
--- a/gcc/testsuite/gcc.dg/tree-ssa/pr23382.c
+++ b/gcc/testsuite/gcc.dg/tree-ssa/pr23382.c
@@ -14,11 +14,11 @@ void f(void)
{
struct a *a = malloc(sizeof(struct a));
}
-/* { dg-final { scan-tree-dump-times "VDEF <HEAP" 1 "alias1"} } */
-/* { dg-final { scan-tree-dump-times "VDEF <HEAP" 1 "alias2"} } */
-/* { dg-final { scan-tree-dump-times "VDEF <HEAP" 1 "alias3"} } */
-/* { dg-final { scan-tree-dump-times "VDEF <HEAP" 1 "alias4"} } */
-/* { dg-final { scan-tree-dump-times "VDEF <HEAP" 1 "alias5"} } */
+/* { dg-final { scan-tree-dump-times "HEAP.* = VDEF <.MEM" 1 "alias1"} } */
+/* { dg-final { scan-tree-dump-times "HEAP.* = VDEF <.MEM" 1 "alias2"} } */
+/* { dg-final { scan-tree-dump-times "HEAP.* = VDEF <.MEM" 1 "alias3"} } */
+/* { dg-final { scan-tree-dump-times "HEAP.* = VDEF <.MEM" 1 "alias4"} } */
+/* { dg-final { scan-tree-dump-times "HEAP.* = VDEF <.MEM" 1 "alias5"} } */
/* { dg-final { cleanup-tree-dump "alias1" } } */
/* { dg-final { cleanup-tree-dump "alias2" } } */
/* { dg-final { cleanup-tree-dump "alias3" } } */
diff --git a/gcc/tree-cfg.c b/gcc/tree-cfg.c
index a3fed03956f..e7e7578b7d9 100644
--- a/gcc/tree-cfg.c
+++ b/gcc/tree-cfg.c
@@ -1263,6 +1263,9 @@ replace_uses_by (tree name, tree val)
FOR_EACH_IMM_USE_STMT (stmt, imm_iter, name)
{
+ if (TREE_CODE (stmt) != PHI_NODE)
+ push_stmt_changes (&stmt);
+
FOR_EACH_IMM_USE_ON_STMT (use, imm_iter)
{
replace_exp (use, val);
@@ -1280,17 +1283,21 @@ replace_uses_by (tree name, tree val)
}
}
}
+
if (TREE_CODE (stmt) != PHI_NODE)
{
tree rhs;
fold_stmt_inplace (stmt);
+
+ /* FIXME. This should go in pop_stmt_changes. */
rhs = get_rhs (stmt);
if (TREE_CODE (rhs) == ADDR_EXPR)
recompute_tree_invariant_for_addr_expr (rhs);
maybe_clean_or_replace_eh_stmt (stmt, stmt);
- mark_new_vars_to_rename (stmt);
+
+ pop_stmt_changes (&stmt);
}
}
diff --git a/gcc/tree-complex.c b/gcc/tree-complex.c
index 0a5040ebfcd..2f4713c6171 100644
--- a/gcc/tree-complex.c
+++ b/gcc/tree-complex.c
@@ -706,21 +706,6 @@ update_phi_components (basic_block bb)
}
}
-/* Mark each virtual op in STMT for ssa update. */
-
-static void
-update_all_vops (tree stmt)
-{
- ssa_op_iter iter;
- tree sym;
-
- FOR_EACH_SSA_TREE_OPERAND (sym, stmt, iter, SSA_OP_ALL_VIRTUALS)
- {
- if (TREE_CODE (sym) == SSA_NAME)
- sym = SSA_NAME_VAR (sym);
- mark_sym_for_renaming (sym);
- }
-}
/* Expand a complex move to scalars. */
@@ -759,7 +744,7 @@ expand_complex_move (block_stmt_iterator *bsi, tree stmt, tree type,
}
else
{
- update_all_vops (bsi_stmt (*bsi));
+ mark_symbols_for_renaming (bsi_stmt (*bsi));
r = extract_component (bsi, rhs, 0, true);
i = extract_component (bsi, rhs, 1, true);
update_complex_assignment (bsi, r, i);
@@ -794,8 +779,7 @@ expand_complex_move (block_stmt_iterator *bsi, tree stmt, tree type,
TREE_OPERAND (stmt, 0) = lhs;
}
- update_all_vops (stmt);
- update_stmt (stmt);
+ mark_symbols_for_renaming (stmt);
}
}
diff --git a/gcc/tree-dfa.c b/gcc/tree-dfa.c
index 9f7ee5d7580..a564457925d 100644
--- a/gcc/tree-dfa.c
+++ b/gcc/tree-dfa.c
@@ -758,67 +758,43 @@ get_virtual_var (tree var)
return var;
}
-/* Mark all the non-SSA variables found in STMT's operands to be
- processed by update_ssa. */
+/* Mark all the naked symbols in STMT for SSA renaming.
+
+ NOTE: This function should only be used for brand new statements.
+ If the caller is modifying an existing statement, it should use the
+ combination push_stmt_changes/pop_stmt_changes. */
void
-mark_new_vars_to_rename (tree stmt)
+mark_symbols_for_renaming (tree stmt)
{
+ tree op;
ssa_op_iter iter;
- tree val;
- bitmap vars_in_vops_to_rename;
- bool found_exposed_symbol = false;
- int vdefs_before, vdefs_after;
- if (TREE_CODE (stmt) == PHI_NODE)
- return;
-
- get_stmt_ann (stmt);
- vars_in_vops_to_rename = BITMAP_ALLOC (NULL);
-
- /* Before re-scanning the statement for operands, mark the existing
- virtual operands to be renamed again. We do this because when new
- symbols are exposed, the virtual operands that were here before due to
- aliasing will probably be removed by the call to get_stmt_operand.
- Therefore, we need to flag them to be renamed beforehand.
-
- We flag them in a separate bitmap because we don't really want to
- rename them if there are not any newly exposed symbols in the
- statement operands. */
- vdefs_before = NUM_SSA_OPERANDS (stmt, SSA_OP_VDEF);
+ update_stmt (stmt);
- FOR_EACH_SSA_TREE_OPERAND (val, stmt, iter, SSA_OP_VDEF | SSA_OP_VUSE)
+ if (stmt_references_memory_p (stmt))
{
- if (!DECL_P (val))
- val = SSA_NAME_VAR (val);
- bitmap_set_bit (vars_in_vops_to_rename, DECL_UID (val));
- }
+ bitmap loads, stores;
- /* Now force an operand re-scan on the statement and mark any newly
- exposed variables. */
- update_stmt (stmt);
+ loads = BITMAP_ALLOC (NULL);
+ stores = BITMAP_ALLOC (NULL);
- vdefs_after = NUM_SSA_OPERANDS (stmt, SSA_OP_VDEF);
+ get_loads_and_stores (stmt, loads, stores);
- FOR_EACH_SSA_TREE_OPERAND (val, stmt, iter, SSA_OP_ALL_OPERANDS)
- if (DECL_P (val))
- {
- found_exposed_symbol = true;
- mark_sym_for_renaming (val);
- }
+ mark_set_for_renaming (loads);
+ mark_set_for_renaming (stores);
- /* If we found any newly exposed symbols, or if there are fewer VDEF
- operands in the statement, add the variables we had set in
- VARS_IN_VOPS_TO_RENAME to VARS_TO_RENAME. We need to check for
- vanishing VDEFs because in those cases, the names that were formerly
- generated by this statement are not going to be available anymore. */
- if (found_exposed_symbol
- || vdefs_before > vdefs_after)
- mark_set_for_renaming (vars_in_vops_to_rename);
+ BITMAP_FREE (loads);
+ BITMAP_FREE (stores);
+ }
- BITMAP_FREE (vars_in_vops_to_rename);
+ /* Mark all the GIMPLE register operands for renaming. */
+ FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF|SSA_OP_USE)
+ if (DECL_P (op))
+ mark_sym_for_renaming (op);
}
+
/* Find all variables within the gimplified statement that were not previously
visible to the function and add them to the referenced variables list. */
@@ -847,7 +823,7 @@ find_new_referenced_vars (tree *stmt_p)
}
-/* If REF is a handled component reference for a structure, return the
+/* If EXP is a handled component reference for a structure, return the
base variable. The access range is delimited by bit positions *POFFSET and
*POFFSET + *PMAX_SIZE. The access size is *PSIZE bits. If either
*PSIZE or *PMAX_SIZE is -1, they could not be determined. If *PSIZE
diff --git a/gcc/tree-flow.h b/gcc/tree-flow.h
index 96c16470d5f..60fe305a996 100644
--- a/gcc/tree-flow.h
+++ b/gcc/tree-flow.h
@@ -176,32 +176,29 @@ struct var_ann_d GTY(())
states. */
ENUM_BITFIELD (need_phi_state) need_phi_state : 2;
- /* Used during operand processing to determine if this variable is already
- in the vuse list. */
- unsigned in_vuse_list : 1;
-
- /* Used during operand processing to determine if this variable is already
- in the vdef list. */
- unsigned in_vdef_list : 1;
-
- /* An artificial variable representing the memory location pointed-to by
- all the pointer symbols that flow-insensitive alias analysis
- (mostly type-based) considers to be aliased. If the variable is
- not a pointer or if it is never dereferenced, this must be NULL. */
+ /* If this variable is a pointer P that has been dereferenced, this
+ field is an artificial variable that represents the memory
+ location *P. Every other pointer Q that is type-compatible with
+ P will also have the same memory tag. If the variable is not a
+ pointer or if it is never dereferenced, this must be NULL.
+ FIXME, do we really need this here? How much slower would it be
+ to convert to hash table? */
tree symbol_mem_tag;
- /* Variables that may alias this variable. */
+ /* Variables that may alias this variable. This may only be set on
+ memory tags (NAME_MEMORY_TAG or TYPE_MEMORY_TAG). FIXME, move to
+ struct tree_memory_tag. */
VEC(tree, gc) *may_aliases;
/* Used when going out of SSA form to indicate which partition this
- variable represents storage for. */
+ variable represents storage for. FIXME, remove. */
unsigned partition;
- /* Used by the root-var object in tree-ssa-live.[ch]. */
+ /* Used by the root-var object in tree-ssa-live.[ch]. FIXME, remove. */
unsigned root_index;
/* During into-ssa and the dominator optimizer, this field holds the
- current version of this variable (an SSA_NAME). */
+ current version of this variable (an SSA_NAME). FIXME, remove. */
tree current_def;
/* If this variable is a structure, this fields holds a list of
@@ -433,7 +430,12 @@ extern GTY(()) VEC(tree,gc) *ssa_names;
#define num_ssa_names (VEC_length (tree, ssa_names))
#define ssa_name(i) (VEC_index (tree, ssa_names, (i)))
-/* Artificial variable used to model the effects of function calls. */
+/* Artificial variable to represent references to memory (i.e., non
+ gimple registers). */
+extern GTY(()) tree mem_var;
+
+/* Artificial variable used to model the effects of function calls
+ when no other global symbols are referenced in the function. */
extern GTY(()) tree global_var;
/* Call clobbered variables in the function. If bit I is set, then
@@ -634,7 +636,7 @@ extern void dump_subvars_for (FILE *, tree);
extern void debug_subvars_for (tree);
extern tree get_virtual_var (tree);
extern void add_referenced_var (tree);
-extern void mark_new_vars_to_rename (tree);
+extern void mark_symbols_for_renaming (tree);
extern void find_new_referenced_vars (tree *);
extern tree make_rename_temp (tree, const char *);
@@ -693,7 +695,6 @@ extern bool tree_ssa_useless_type_conversion (tree);
extern bool tree_ssa_useless_type_conversion_1 (tree, tree);
extern void verify_ssa (bool);
extern void delete_tree_ssa (void);
-extern void register_new_def (tree, VEC(tree,heap) **);
extern void walk_use_def_chains (tree, walk_use_def_chains_fn, void *, bool);
extern bool stmt_references_memory_p (tree);
@@ -705,7 +706,7 @@ tree create_new_def_for (tree, tree, def_operand_p);
bool need_ssa_update_p (void);
bool name_registered_for_update_p (tree);
bitmap ssa_names_to_replace (void);
-void release_ssa_name_after_update_ssa (tree name);
+void release_ssa_name_after_update_ssa (tree);
void compute_global_livein (bitmap, bitmap);
tree duplicate_ssa_name (tree, tree);
void mark_sym_for_renaming (tree);
@@ -866,21 +867,22 @@ extern enum move_pos movement_possibility (tree);
/* The reasons a variable may escape a function. */
enum escape_type
- {
- NO_ESCAPE = 0, /* Doesn't escape. */
- ESCAPE_STORED_IN_GLOBAL = 1 << 1,
- ESCAPE_TO_ASM = 1 << 2, /* Passed by address to an assembly
- statement. */
- ESCAPE_TO_CALL = 1 << 3, /* Escapes to a function call. */
- ESCAPE_BAD_CAST = 1 << 4, /* Cast from pointer to integer */
- ESCAPE_TO_RETURN = 1 << 5, /* Returned from function. */
- ESCAPE_TO_PURE_CONST = 1 << 6, /* Escapes to a pure or constant
- function call. */
- ESCAPE_IS_GLOBAL = 1 << 7, /* Is a global variable. */
- ESCAPE_IS_PARM = 1 << 8, /* Is an incoming function parameter. */
- ESCAPE_UNKNOWN = 1 << 9 /* We believe it escapes for some reason
- not enumerated above. */
- };
+{
+ NO_ESCAPE = 0, /* Doesn't escape. */
+ ESCAPE_STORED_IN_GLOBAL = 1 << 1,
+ ESCAPE_TO_ASM = 1 << 2, /* Passed by address to an assembly
+ statement. */
+ ESCAPE_TO_CALL = 1 << 3, /* Escapes to a function call. */
+ ESCAPE_BAD_CAST = 1 << 4, /* Cast from pointer to integer */
+ ESCAPE_TO_RETURN = 1 << 5, /* Returned from function. */
+ ESCAPE_TO_PURE_CONST = 1 << 6, /* Escapes to a pure or constant
+ function call. */
+ ESCAPE_IS_GLOBAL = 1 << 7, /* Is a global variable. */
+ ESCAPE_IS_PARM = 1 << 8, /* Is an incoming function argument. */
+ ESCAPE_UNKNOWN = 1 << 9 /* We believe it escapes for
+ some reason not enumerated
+ above. */
+};
/* In tree-flow-inline.h */
static inline bool is_call_clobbered (tree);
@@ -1020,6 +1022,4 @@ void delete_alias_heapvars (void);
void swap_tree_operands (tree, tree *, tree *);
-extern void recalculate_used_alone (void);
-extern bool updating_used_alone;
#endif /* _TREE_FLOW_H */
diff --git a/gcc/tree-into-ssa.c b/gcc/tree-into-ssa.c
index 5dec1a6e7eb..b73622fcec7 100644
--- a/gcc/tree-into-ssa.c
+++ b/gcc/tree-into-ssa.c
@@ -55,7 +55,7 @@ Boston, MA 02110-1301, USA. */
Graph. ACM Transactions on Programming Languages and Systems,
13(4):451-490, October 1991. */
-/* True if the code is in ssa form. */
+/* True if the code is in SSA form. */
bool in_ssa_p;
/* Structure to map a variable VAR to the set of blocks that contain
@@ -91,16 +91,17 @@ static htab_t def_blocks;
state after completing rewriting of a block and its dominator
children. Its elements have the following properties:
- - An SSA_NAME indicates that the current definition of the
- underlying variable should be set to the given SSA_NAME.
+ - An SSA_NAME (N) indicates that the current definition of the
+ underlying variable should be set to the given SSA_NAME. If the
+ symbol associated with the SSA_NAME is not a GIMPLE register, the
+ next slot in the stack must be a _DECL node (SYM). In this case,
+ the name N in the previous slot is the current reaching
+ definition for SYM.
- A _DECL node indicates that the underlying variable has no
current definition.
- - A NULL node is used to mark the last node associated with the
- current block.
-
- - A NULL node at the top entry is used to mark the last node
+ - A NULL node at the top entry is used to mark the last slot
associated with the current block. */
static VEC(tree,heap) *block_defs_stack;
@@ -121,9 +122,16 @@ static bitmap syms_to_rename;
released after we finish updating the SSA web. */
static bitmap names_to_release;
-/* For each block, the phi nodes that need to be rewritten are stored into
- these vectors. */
+/* Set of SSA names that have been marked stale by the SSA updater.
+ This happens when the LHS of a VDEF operator needs a new SSA name
+ (i.e., it used to be a .MEM factored store and got converted into a
+ regular store). When this occurs, other VDEF and VUSE operators
+ using the original LHS must stop using it.
+ See rewrite_update_stmt_vops. */
+static bitmap stale_ssa_names;
+/* For each block, the PHI nodes that need to be rewritten are stored into
+ these vectors. */
typedef VEC(tree, heap) *tree_vec;
DEF_VEC_P (tree_vec);
DEF_VEC_ALLOC_P (tree_vec, heap);
@@ -131,7 +139,6 @@ DEF_VEC_ALLOC_P (tree_vec, heap);
static VEC(tree_vec, heap) *phis_to_rewrite;
/* The bitmap of non-NULL elements of PHIS_TO_REWRITE. */
-
static bitmap blocks_with_phis_to_rewrite;
/* Growth factor for NEW_SSA_NAMES and OLD_SSA_NAMES. These sets need
@@ -217,7 +224,6 @@ static VEC(ssa_name_info_p, heap) *info_for_ssa_name;
static unsigned current_info_for_ssa_name_age;
/* The set of blocks affected by update_ssa. */
-
static bitmap blocks_to_update;
/* The main entry point to the SSA renamer (rewrite_blocks) may be
@@ -250,6 +256,22 @@ enum rewrite_mode {
registered, but they don't need to have their uses renamed. */
#define REGISTER_DEFS_IN_THIS_STMT(T) (T)->common.unsigned_flag
+DEF_VEC_P(bitmap);
+DEF_VEC_ALLOC_P(bitmap,heap);
+
+/* Mapping between a statement and the symbols referenced by it. */
+struct mem_syms_map_d
+{
+ tree stmt;
+ bitmap loaded;
+ bitmap stored;
+};
+
+typedef struct mem_syms_map_d *mem_syms_map_t;
+
+/* Table, indexed by statement, for symbols loaded and stored by
+ statements that reference memory. */
+static htab_t mem_syms_tbl;
/* Prototypes for debugging functions. */
extern void dump_tree_ssa (FILE *);
@@ -257,10 +279,16 @@ extern void debug_tree_ssa (void);
extern void debug_def_blocks (void);
extern void dump_tree_ssa_stats (FILE *);
extern void debug_tree_ssa_stats (void);
-void dump_update_ssa (FILE *);
-void debug_update_ssa (void);
-void dump_names_replaced_by (FILE *, tree);
-void debug_names_replaced_by (tree);
+extern void dump_update_ssa (FILE *);
+extern void debug_update_ssa (void);
+extern void dump_names_replaced_by (FILE *, tree);
+extern void debug_names_replaced_by (tree);
+extern void dump_def_blocks (FILE *);
+extern void debug_def_blocks (void);
+extern void dump_defs_stack (FILE *, int);
+extern void debug_defs_stack (int);
+extern void dump_currdefs (FILE *);
+extern void debug_currdefs (void);
/* Get the information associated with NAME. */
@@ -526,13 +554,10 @@ set_livein_block (tree var, basic_block bb)
by the single block containing the definition(s) of this variable. If
it is, then we remain in NEED_PHI_STATE_NO, otherwise we transition to
NEED_PHI_STATE_MAYBE. */
- if (state == NEED_PHI_STATE_NO)
+ if (state == NEED_PHI_STATE_NO && !bitmap_empty_p (db_p->def_blocks))
{
- int def_block_index = bitmap_first_set_bit (db_p->def_blocks);
-
- if (def_block_index == -1
- || ! dominated_by_p (CDI_DOMINATORS, bb,
- BASIC_BLOCK (def_block_index)))
+ int ix = bitmap_first_set_bit (db_p->def_blocks);
+ if (!dominated_by_p (CDI_DOMINATORS, bb, BASIC_BLOCK (ix)))
set_phi_state (var, NEED_PHI_STATE_MAYBE);
}
else
@@ -545,7 +570,6 @@ set_livein_block (tree var, basic_block bb)
static inline bool
symbol_marked_for_renaming (tree sym)
{
- gcc_assert (DECL_P (sym));
return bitmap_bit_p (syms_to_rename, DECL_UID (sym));
}
@@ -659,8 +683,12 @@ add_new_name_mapping (tree new, tree old)
}
/* If this mapping is for virtual names, we will need to update
- virtual operands. */
- if (!is_gimple_reg (new))
+ virtual operands. Ignore .MEM as it is not a symbol that
+ can be put into SSA form independently. If the heuristic for
+ renaming the virtual symbols from scratch is enabled, it will
+ want to put .MEM into SSA form from scratch, and that cannot be
+ done. */
+ if (!is_gimple_reg (new) && SSA_NAME_VAR (new) != mem_var)
{
tree sym;
size_t uid;
@@ -703,6 +731,63 @@ add_new_name_mapping (tree new, tree old)
}
+/* Hashing and equality functions for MEM_SYMS_TBL. */
+
+static hashval_t
+mem_syms_hash (const void *p)
+{
+ return htab_hash_pointer ((const void *)((const mem_syms_map_t)p)->stmt);
+}
+
+static int
+mem_syms_eq (const void *p1, const void *p2)
+{
+ return ((const mem_syms_map_t)p1)->stmt == ((const mem_syms_map_t)p2)->stmt;
+}
+
+static void
+mem_syms_free (void *p)
+{
+ BITMAP_FREE (((mem_syms_map_t)p)->loaded);
+ BITMAP_FREE (((mem_syms_map_t)p)->stored);
+ free (p);
+}
+
+
+/* Return the memory symbols referenced by STMT. */
+
+static inline mem_syms_map_t
+syms_referenced_by (tree stmt)
+{
+ struct mem_syms_map_d m, *mp;
+ void **slot;
+
+ m.stmt = stmt;
+ slot = htab_find_slot (mem_syms_tbl, (void *) &m, INSERT);
+ if (*slot == NULL)
+ {
+ mp = XNEW (struct mem_syms_map_d);
+ mp->stmt = stmt;
+ mp->loaded = BITMAP_ALLOC (NULL);
+ mp->stored = BITMAP_ALLOC (NULL);
+
+ get_loads_and_stores (stmt, mp->loaded, mp->stored);
+
+ if (bitmap_empty_p (mp->loaded))
+ BITMAP_FREE (mp->loaded);
+
+ if (bitmap_empty_p (mp->stored))
+ BITMAP_FREE (mp->stored);
+
+ *slot = (void *) mp;
+ }
+ else
+ mp = (mem_syms_map_t) *slot;
+
+ return mp;
+}
+
+
/* Call back for walk_dominator_tree used to collect definition sites
for every variable in the function. For every statement S in block
BB:
@@ -718,30 +803,28 @@ add_new_name_mapping (tree new, tree old)
we create. */
static void
-mark_def_sites (struct dom_walk_data *walk_data,
- basic_block bb,
+mark_def_sites (struct dom_walk_data *walk_data, basic_block bb,
block_stmt_iterator bsi)
{
- struct mark_def_sites_global_data *gd =
- (struct mark_def_sites_global_data *) walk_data->global_data;
- bitmap kills = gd->kills;
+ struct mark_def_sites_global_data *gd;
+ bitmap kills;
tree stmt, def;
use_operand_p use_p;
- def_operand_p def_p;
ssa_op_iter iter;
- vuse_vec_p vv;
stmt = bsi_stmt (bsi);
update_stmt_if_modified (stmt);
+ gd = (struct mark_def_sites_global_data *) walk_data->global_data;
+ kills = gd->kills;
+
gcc_assert (blocks_to_update == NULL);
REGISTER_DEFS_IN_THIS_STMT (stmt) = 0;
REWRITE_THIS_STMT (stmt) = 0;
/* If a variable is used before being set, then the variable is live
across a block boundary, so mark it live-on-entry to BB. */
- FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter,
- SSA_OP_USE | SSA_OP_VUSE | SSA_OP_VMAYUSE)
+ FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
{
tree sym = USE_FROM_PTR (use_p);
gcc_assert (DECL_P (sym));
@@ -750,26 +833,9 @@ mark_def_sites (struct dom_walk_data *walk_data,
REWRITE_THIS_STMT (stmt) = 1;
}
- /* Note that virtual definitions are irrelevant for computing KILLS
- because a VDEF does not constitute a killing definition of the
- variable. However, the operand of a virtual definitions is a use
- of the variable, so it may cause the variable to be considered
- live-on-entry. */
- FOR_EACH_SSA_VDEF_OPERAND (def_p, vv, stmt, iter)
- {
- tree sym;
- gcc_assert (VUSE_VECT_NUM_ELEM (*vv) == 1);
- use_p = VUSE_ELEMENT_PTR (*vv, 0);
- sym = USE_FROM_PTR (use_p);
- gcc_assert (DECL_P (sym));
- set_livein_block (sym, bb);
- set_def_block (sym, bb, false);
- REGISTER_DEFS_IN_THIS_STMT (stmt) = 1;
- REWRITE_THIS_STMT (stmt) = 1;
- }
-
- /* Now process the defs and vdefs made by this statement. */
- FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_DEF | SSA_OP_VDEF)
+ /* Now process the defs. Mark BB as the definition block and add
+ each def to the set of killed symbols. */
+ FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_DEF)
{
gcc_assert (DECL_P (def));
set_def_block (def, bb, false);
@@ -777,6 +843,51 @@ mark_def_sites (struct dom_walk_data *walk_data,
REGISTER_DEFS_IN_THIS_STMT (stmt) = 1;
}
+ /* If the statement has memory references, process the associated
+ symbols. */
+ if (stmt_references_memory_p (stmt))
+ {
+ mem_syms_map_t syms;
+ unsigned i;
+ bitmap_iterator bi;
+
+ syms = syms_referenced_by (stmt);
+
+ if (syms->loaded)
+ {
+ REWRITE_THIS_STMT (stmt) = 1;
+ EXECUTE_IF_SET_IN_BITMAP (syms->loaded, 0, i, bi)
+ {
+ tree sym = referenced_var (i);
+
+ /* Note that VDEF operators are never considered killing
+ definitions, so memory symbols are always
+ live-on-entry. */
+ set_livein_block (sym, bb);
+ }
+ }
+
+ if (syms->stored)
+ {
+ REGISTER_DEFS_IN_THIS_STMT (stmt) = 1;
+ REWRITE_THIS_STMT (stmt) = 1;
+
+ EXECUTE_IF_SET_IN_BITMAP (syms->stored, 0, i, bi)
+ {
+ tree sym = referenced_var (i);
+ set_def_block (sym, bb, false);
+
+ /* Note that virtual definitions are irrelevant for
+ computing KILLS because a VDEF does not constitute a
+ killing definition of the variable. However, the
+ operand of a virtual definitions is a use of the
+ variable, so it may cause the variable to be
+ considered live-on-entry. */
+ set_livein_block (sym, bb);
+ }
+ }
+ }
+
/* If we found the statement interesting then also mark the block BB
as interesting. */
if (REWRITE_THIS_STMT (stmt) || REGISTER_DEFS_IN_THIS_STMT (stmt))
@@ -1014,14 +1125,12 @@ insert_phi_nodes (bitmap *dfs)
}
-/* Register DEF (an SSA_NAME) to be a new definition for its underlying
- variable (SSA_NAME_VAR (DEF)) and push VAR's current reaching definition
- into the stack pointed to by BLOCK_DEFS_P. */
+/* Push SYM's current reaching definition into BLOCK_DEFS_STACK and
+ register DEF (an SSA_NAME) to be a new definition for SYM. */
-void
-register_new_def (tree def, VEC(tree,heap) **block_defs_p)
+static void
+register_new_def (tree def, tree sym)
{
- tree var = SSA_NAME_VAR (def);
tree currdef;
/* If this variable is set in a single basic block and all uses are
@@ -1032,23 +1141,31 @@ register_new_def (tree def, VEC(tree,heap) **block_defs_p)
This is the same test to prune the set of variables which may
need PHI nodes. So we just use that information since it's already
computed and available for us to use. */
- if (get_phi_state (var) == NEED_PHI_STATE_NO)
+ if (get_phi_state (sym) == NEED_PHI_STATE_NO)
{
- set_current_def (var, def);
+ set_current_def (sym, def);
return;
}
- currdef = get_current_def (var);
+ currdef = get_current_def (sym);
+
+ /* If SYM is not a GIMPLE register, then CURRDEF may be a name whose
+ SSA_NAME_VAR is not necessarily SYM. In this case, also push SYM
+ in the stack so that we know which symbol is being defined by
+ this SSA name when we unwind the stack. */
+ if (currdef && !is_gimple_reg (sym))
+ VEC_safe_push (tree, heap, block_defs_stack, sym);
- /* Push the current reaching definition into *BLOCK_DEFS_P. This stack is
- later used by the dominator tree callbacks to restore the reaching
- definitions for all the variables defined in the block after a recursive
- visit to all its immediately dominated blocks. If there is no current
- reaching definition, then just record the underlying _DECL node. */
- VEC_safe_push (tree, heap, *block_defs_p, currdef ? currdef : var);
+ /* Push the current reaching definition into BLOCK_DEFS_STACK. This
+ stack is later used by the dominator tree callbacks to restore
+ the reaching definitions for all the variables defined in the
+ block after a recursive visit to all its immediately dominated
+ blocks. If there is no current reaching definition, then just
+ record the underlying _DECL node. */
+ VEC_safe_push (tree, heap, block_defs_stack, currdef ? currdef : sym);
- /* Set the current reaching definition for VAR to be DEF. */
- set_current_def (var, def);
+ /* Set the current reaching definition for SYM to be DEF. */
+ set_current_def (sym, def);
}
@@ -1098,37 +1215,144 @@ rewrite_initialize_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
{
tree result = PHI_RESULT (phi);
- register_new_def (result, &block_defs_stack);
+ /* FIXME. For memory symbols, this will be .MEM, but we should
+ determine what symbol is associated with this particular PHI
+ function. */
+ gcc_assert (SSA_NAME_VAR (result) != mem_var);
+ register_new_def (result, SSA_NAME_VAR (result));
}
}
/* Return the current definition for variable VAR. If none is found,
- create a new SSA name to act as the zeroth definition for VAR. If VAR
- is call clobbered and there exists a more recent definition of
- GLOBAL_VAR, return the definition for GLOBAL_VAR. This means that VAR
- has been clobbered by a function call since its last assignment. */
+ create a new SSA name to act as the zeroth definition for VAR. */
-static tree
+static inline tree
get_reaching_def (tree var)
{
- tree currdef_var, avar;
+ tree currdef;
/* Lookup the current reaching definition for VAR. */
- currdef_var = get_current_def (var);
+ currdef = get_current_def (var);
/* If there is no reaching definition for VAR, create and register a
default definition for it (if needed). */
- if (currdef_var == NULL_TREE)
+ if (currdef == NULL_TREE)
{
- avar = DECL_P (var) ? var : SSA_NAME_VAR (var);
- currdef_var = get_default_def_for (avar);
- set_current_def (var, currdef_var);
+ /* If VAR is not a GIMPLE register, use the default definition
+ for .MEM. */
+ tree sym = DECL_P (var) ? var : SSA_NAME_VAR (var);
+ sym = (is_gimple_reg (sym)) ? sym : mem_var;
+ currdef = get_default_def_for (sym);
+ set_current_def (var, currdef);
}
/* Return the current reaching definition for VAR, or the default
definition, if we had to create one. */
- return currdef_var;
+ return currdef;
+}
+
+
+/* Rewrite memory references in STMT. */
+
+static void
+rewrite_memory_stmt (tree stmt)
+{
+ unsigned i;
+ bitmap rdefs;
+ bitmap_iterator bi;
+ mem_syms_map_t syms;
+
+ syms = syms_referenced_by (stmt);
+
+ /* If the statement makes no loads or stores, it must have volatile
+ operands. */
+ if (syms->loaded == NULL && syms->stored == NULL)
+ {
+ gcc_assert (stmt_ann (stmt)->has_volatile_ops);
+ return;
+ }
+
+ rdefs = BITMAP_ALLOC (NULL);
+
+ /* Rewrite loaded symbols. */
+ if (syms->loaded)
+ {
+ size_t j;
+ struct vuse_optype_d *vuses;
+
+ /* If STMT is a load, it should have exactly one VUSE operator. */
+ vuses = VUSE_OPS (stmt);
+ gcc_assert (vuses && vuses->next == NULL);
+
+ /* Collect all the reaching definitions for symbols loaded by STMT. */
+ bitmap_clear (rdefs);
+ EXECUTE_IF_SET_IN_BITMAP (syms->loaded, 0, i, bi)
+ {
+ tree sym = referenced_var (i);
+ tree rdef = get_reaching_def (sym);
+ bitmap_set_bit (rdefs, SSA_NAME_VERSION (rdef));
+ }
+
+ /* Rewrite the VUSE as VUSE <{RDEFS}>. */
+ vuses = realloc_vuse (vuses, bitmap_count_bits (rdefs));
+ j = 0;
+ EXECUTE_IF_SET_IN_BITMAP (rdefs, 0, i, bi)
+ SET_USE (VUSE_OP_PTR (vuses, j++), ssa_name (i));
+
+ vuses = vuses->next;
+ }
+
+ /* Rewrite stored symbols. */
+ if (syms->stored)
+ {
+ tree lhs, new_name;
+ size_t j;
+ struct vdef_optype_d *vdefs;
+
+ /* If STMT is a store, it should have exactly one VDEF operator. */
+ vdefs = VDEF_OPS (stmt);
+ gcc_assert (vdefs && vdefs->next == NULL);
+
+ /* Collect all the current reaching definitions for symbols in STORES. */
+ bitmap_clear (rdefs);
+ EXECUTE_IF_SET_IN_BITMAP (syms->stored, 0, i, bi)
+ {
+ tree sym = referenced_var (i);
+ tree rdef = get_reaching_def (sym);
+ bitmap_set_bit (rdefs, SSA_NAME_VERSION (rdef));
+ }
+
+ /* Create a new name for the LHS. If there is a single symbol
+ in STORES, use it as the target of the VDEF. Otherwise
+ factor all the stored symbols into .MEM. */
+ if (bitmap_count_bits (syms->stored) == 1)
+ lhs = referenced_var (bitmap_first_set_bit (syms->stored));
+ else
+ {
+ lhs = VDEF_RESULT (vdefs);
+ gcc_assert (lhs == mem_var);
+ }
+
+ new_name = make_ssa_name (lhs, stmt);
+
+ /* Set NEW_NAME to be the current reaching definition for every
+ symbol loaded by STMT. */
+ EXECUTE_IF_SET_IN_BITMAP (syms->stored, 0, i, bi)
+ {
+ tree sym = referenced_var (i);
+ register_new_def (new_name, sym);
+ }
+
+ /* Rewrite the VDEF as NEW_NAME = VDEF <{RDEFS}>. */
+ vdefs = realloc_vdef (vdefs, bitmap_count_bits (rdefs));
+ SET_DEF (VDEF_RESULT_PTR (vdefs), new_name);
+ j = 0;
+ EXECUTE_IF_SET_IN_BITMAP (rdefs, 0, i, bi)
+ SET_USE (VDEF_OP_PTR (vdefs, j++), ssa_name (i));
+ }
+
+ BITMAP_FREE (rdefs);
}
@@ -1138,8 +1362,7 @@ get_reaching_def (tree var)
static void
rewrite_stmt (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
- basic_block bb ATTRIBUTE_UNUSED,
- block_stmt_iterator si)
+ basic_block bb ATTRIBUTE_UNUSED, block_stmt_iterator si)
{
tree stmt;
use_operand_p use_p;
@@ -1161,24 +1384,29 @@ rewrite_stmt (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
fprintf (dump_file, "\n");
}
- /* Step 1. Rewrite USES and VUSES in the statement. */
+ /* Step 1. Rewrite USES in the statement. */
if (REWRITE_THIS_STMT (stmt))
- FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_ALL_USES)
+ FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
{
tree var = USE_FROM_PTR (use_p);
gcc_assert (DECL_P (var));
SET_USE (use_p, get_reaching_def (var));
}
- /* Step 2. Register the statement's DEF and VDEF operands. */
+ /* Step 2. Register the statement's DEF operands. */
if (REGISTER_DEFS_IN_THIS_STMT (stmt))
- FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, iter, SSA_OP_ALL_DEFS)
+ FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, iter, SSA_OP_DEF)
{
tree var = DEF_FROM_PTR (def_p);
gcc_assert (DECL_P (var));
SET_DEF (def_p, make_ssa_name (var, stmt));
- register_new_def (DEF_FROM_PTR (def_p), &block_defs_stack);
+ register_new_def (DEF_FROM_PTR (def_p), var);
}
+
+ /* Rewrite virtual operands for statements that make memory
+ references. */
+ if (stmt_references_memory_p (stmt))
+ rewrite_memory_stmt (stmt);
}
@@ -1224,17 +1452,25 @@ rewrite_finalize_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
if (tmp == NULL_TREE)
break;
- /* If we recorded an SSA_NAME, then make the SSA_NAME the current
- definition of its underlying variable. If we recorded anything
- else, it must have been an _DECL node and its current reaching
- definition must have been NULL. */
if (TREE_CODE (tmp) == SSA_NAME)
{
+ /* If we recorded an SSA_NAME, then make the SSA_NAME the
+ current definition of its underlying variable. Note that
+ if the SSA_NAME is not for a GIMPLE register, the symbol
+ being defined is stored in the next slot in the stack.
+ This mechanism is needed because an SSA name for a
+ non-register symbol may be the definition for more than
+ one symbol (e.g., SFTs, aliased variables, etc). */
saved_def = tmp;
var = SSA_NAME_VAR (saved_def);
+ if (!is_gimple_reg (var))
+ var = VEC_pop (tree, block_defs_stack);
}
else
{
+ /* If we recorded anything else, it must have been a _DECL
+ node and its current reaching definition must have been
+ NULL. */
saved_def = NULL;
var = tmp;
}
@@ -1244,24 +1480,120 @@ rewrite_finalize_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
}
+/* Dump the renaming stack (block_defs_stack) to FILE. Traverse the
+ stack up to a maximum of N levels. If N is -1, the whole stack is
+ dumped. New levels are created when the dominator tree traversal
+ used for renaming enters a new sub-tree. */
+
+void
+dump_defs_stack (FILE *file, int n)
+{
+ int i, j;
+
+ fprintf (file, "\n\nRenaming stack");
+ if (n > 0)
+ fprintf (file, " (up to %d levels)", n);
+ fprintf (file, "\n\n");
+
+ i = 1;
+ fprintf (file, "Level %d (current level)\n", i);
+ for (j = (int) VEC_length (tree, block_defs_stack) - 1; j >= 0; j--)
+ {
+ tree name, var;
+
+ name = VEC_index (tree, block_defs_stack, j);
+ if (name == NULL_TREE)
+ {
+ i++;
+ if (n > 0 && i > n)
+ break;
+ fprintf (file, "\nLevel %d\n", i);
+ continue;
+ }
+
+ if (DECL_P (name))
+ {
+ var = name;
+ name = NULL_TREE;
+ }
+ else
+ {
+ var = SSA_NAME_VAR (name);
+ if (!is_gimple_reg (var))
+ {
+ j--;
+ var = VEC_index (tree, block_defs_stack, j);
+ }
+ }
+
+ fprintf (file, " Previous CURRDEF (");
+ print_generic_expr (file, var, 0);
+ fprintf (file, ") = ");
+ if (name)
+ print_generic_expr (file, name, 0);
+ else
+ fprintf (file, "<NIL>");
+ fprintf (file, "\n");
+ }
+}
+
+
+/* Dump the renaming stack (block_defs_stack) to stderr. Traverse the
+ stack up to a maximum of N levels. If N is -1, the whole stack is
+ dumped. New levels are created when the dominator tree traversal
+ used for renaming enters a new sub-tree. */
+
+void
+debug_defs_stack (int n)
+{
+ dump_defs_stack (stderr, n);
+}
+
+
+/* Dump the current reaching definition of every symbol to FILE. */
+
+void
+dump_currdefs (FILE *file)
+{
+ referenced_var_iterator i;
+ tree var;
+
+ fprintf (file, "\n\nCurrent reaching definitions\n\n");
+ FOR_EACH_REFERENCED_VAR (var, i)
+ {
+ fprintf (file, "CURRDEF (");
+ print_generic_expr (file, var, 0);
+ fprintf (file, ") = ");
+ if (get_current_def (var))
+ print_generic_expr (file, get_current_def (var), 0);
+ else
+ fprintf (file, "<NIL>");
+ fprintf (file, "\n");
+ }
+}
+
+/* Dump the current reaching definition of every symbol to stderr. */
+
+void
+debug_currdefs (void)
+{
+ dump_currdefs (stderr);
+}
+
+
/* Dump SSA information to FILE. */
void
dump_tree_ssa (FILE *file)
{
- basic_block bb;
const char *funcname
= lang_hooks.decl_printable_name (current_function_decl, 2);
- fprintf (file, "SSA information for %s\n\n", funcname);
+ fprintf (file, "SSA renaming information for %s\n\n", funcname);
- FOR_EACH_BB (bb)
- {
- dump_bb (bb, file, 0);
- fputs (" ", file);
- print_generic_stmt (file, phi_nodes (bb), dump_flags);
- fputs ("\n\n", file);
- }
+ dump_def_blocks (file);
+ dump_defs_stack (file, -1);
+ dump_currdefs (file);
}
@@ -1342,25 +1674,38 @@ def_blocks_free (void *p)
/* Callback for htab_traverse to dump the DEF_BLOCKS hash table. */
static int
-debug_def_blocks_r (void **slot, void *data ATTRIBUTE_UNUSED)
+debug_def_blocks_r (void **slot, void *data)
{
+ FILE *file = (FILE *) data;
struct def_blocks_d *db_p = (struct def_blocks_d *) *slot;
- fprintf (stderr, "VAR: ");
- print_generic_expr (stderr, db_p->var, dump_flags);
- bitmap_print (stderr, db_p->def_blocks, ", DEF_BLOCKS: { ", "}");
- bitmap_print (stderr, db_p->livein_blocks, ", LIVEIN_BLOCKS: { ", "}\n");
+ fprintf (file, "VAR: ");
+ print_generic_expr (file, db_p->var, dump_flags);
+ bitmap_print (file, db_p->def_blocks, ", DEF_BLOCKS: { ", "}");
+ bitmap_print (file, db_p->livein_blocks, ", LIVEIN_BLOCKS: { ", "}");
+ bitmap_print (file, db_p->phi_blocks, ", PHI_BLOCKS: { ", "}\n");
return 1;
}
+/* Dump the DEF_BLOCKS hash table on FILE. */
+
+void
+dump_def_blocks (FILE *file)
+{
+ fprintf (file, "\n\nDefinition and live-in blocks:\n\n");
+ if (def_blocks)
+ htab_traverse (def_blocks, debug_def_blocks_r, file);
+}
+
+
/* Dump the DEF_BLOCKS hash table on stderr. */
void
debug_def_blocks (void)
{
- htab_traverse (def_blocks, debug_def_blocks_r, NULL);
+ dump_def_blocks (stderr);
}
@@ -1371,7 +1716,7 @@ register_new_update_single (tree new_name, tree old_name)
{
tree currdef = get_current_def (old_name);
- /* Push the current reaching definition into *BLOCK_DEFS_P.
+ /* Push the current reaching definition into BLOCK_DEFS_STACK.
This stack is later used by the dominator tree callbacks to
restore the reaching definitions for all the variables
defined in the block after a recursive visit to all its
@@ -1452,6 +1797,8 @@ rewrite_update_init_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
lhs_sym = SSA_NAME_VAR (lhs);
if (symbol_marked_for_renaming (lhs_sym))
+ /* If LHS is .MEM here, we should know which symbol is this
+ .MEM a new name for. */
register_new_update_single (lhs, lhs_sym);
else
{
@@ -1509,10 +1856,18 @@ maybe_replace_use (use_operand_p use_p)
tree use = USE_FROM_PTR (use_p);
tree sym = DECL_P (use) ? use : SSA_NAME_VAR (use);
- if (symbol_marked_for_renaming (sym))
- rdef = get_reaching_def (sym);
- else if (is_old_name (use))
- rdef = get_reaching_def (use);
+ if (TREE_CODE (use) == SSA_NAME && is_old_name (use))
+ {
+ gcc_assert (!symbol_marked_for_renaming (sym));
+ rdef = get_reaching_def (use);
+ }
+ else if (is_gimple_reg (sym) && symbol_marked_for_renaming (sym))
+ {
+ /* Note that when renaming naked symbols, we are only interested
+ in handling GIMPLE registers. Memory operands are updated in
+ rewrite_update_memory_stmt. */
+ rdef = get_reaching_def (sym);
+ }
if (rdef && rdef != use)
SET_USE (use_p, rdef);
@@ -1530,10 +1885,29 @@ maybe_register_def (def_operand_p def_p, tree stmt)
tree def = DEF_FROM_PTR (def_p);
tree sym = DECL_P (def) ? def : SSA_NAME_VAR (def);
- /* If DEF is a naked symbol that needs renaming, create a
- new name for it. */
- if (symbol_marked_for_renaming (sym))
+ if (TREE_CODE (def) == SSA_NAME && is_new_name (def))
+ {
+ /* If DEF is a new name, register it as a new definition
+ for all the names replaced by DEF. */
+ gcc_assert (!symbol_marked_for_renaming (sym));
+ register_new_update_set (def, names_replaced_by (def));
+ }
+
+ if (TREE_CODE (def) == SSA_NAME && is_old_name (def))
+ {
+ /* If DEF is an old name, register DEF as a new
+ definition for itself. */
+ gcc_assert (!symbol_marked_for_renaming (sym));
+ register_new_update_single (def, def);
+ }
+
+ /* Note that when renaming naked symbols, we are only interested
+ in handling GIMPLE registers. Memory operands are updated in
+ rewrite_update_memory_stmt. */
+ if (is_gimple_reg (sym) && symbol_marked_for_renaming (sym))
{
+ /* If DEF is a naked symbol that needs renaming, create a new
+ name for it. */
if (DECL_P (def))
{
def = make_ssa_name (def, stmt);
@@ -1542,18 +1916,382 @@ maybe_register_def (def_operand_p def_p, tree stmt)
register_new_update_single (def, sym);
}
+}
+
+
+/* Return true if name N has been marked to be released after the SSA
+ form has been updated. */
+
+static inline bool
+name_marked_for_release_p (tree n)
+{
+ return names_to_release
+ && bitmap_bit_p (names_to_release, SSA_NAME_VERSION (n));
+}
+
+
+/* Stale names are those that have been replaced by register_new_vdef_name.
+ Since it will sometimes decide to create a new name for the LHS,
+ uses of the original LHS on the virtual operands of statements
+ downstream should not keep using the original LHS.
+
+ This happens when the LHS used to be a .MEM name, which we
+ typically try to preserve when updating the RHS of VDEF and VUSE
+ operators (see rewrite_update_stmt_vops). */
+
+static inline void
+mark_ssa_name_stale (tree n)
+{
+ gcc_assert (!need_to_initialize_update_ssa_p);
+
+ if (stale_ssa_names == NULL)
+ stale_ssa_names = BITMAP_ALLOC (NULL);
+
+ bitmap_set_bit (stale_ssa_names, SSA_NAME_VERSION (n));
+}
+
+
+/* Return true if name N has been marked stale by the SSA updater. */
+
+static inline bool
+stale_ssa_name_p (tree n)
+{
+ return stale_ssa_names
+ && bitmap_bit_p (stale_ssa_names, SSA_NAME_VERSION (n));
+}
+
+
+/* Preserve in RDEFS all the names from the virtual operands in STMT
+ that represent the symbols in UNMARKED_SYMS. WHICH_VOPS indicates
+ what virtual operator to process.
+
+ At this point, RDEFS contains the reaching definitions for all the
+ symbols marked for renaming in SYMS. However, there may be some
+ symbols in SYMS that had not been marked for renaming (i.e., those
+ collected in UNMARKED_SYMS).
+
+ We need to match those symbols in UNMARKED_SYMS to existing SSA
+ names in the virtual operands for STMT. Otherwise, we will remove
+ use-def chains for symbols that had not been marked for renaming.
+
+ Notice that we need to do all this maneuvering because the SSA
+ names for .MEM in the operand list may need to be removed when they
+ don't represent symbols in this statement anymore.
+
+ After this, names in WHICH_VOPS that have not been added to RDEFS
+ will be discarded. */
+
+static void
+preserve_needed_names_in_vops (tree stmt, bitmap unmarked_syms,
+ bitmap rdefs, int which_vops)
+{
+ tree name, sym;
+ use_operand_p use_p;
+ ssa_op_iter iter;
+ VEC(tree,heap) *mem_names_stack = NULL;
+
+ /* We first need to match SSA names for individual symbols to
+ avoid the following problem:
+
+ 1 # .MEM_3 = VDEF <a_5, b_1, c_9>
+ 2 *p_8 = ...
+ 3 ...
+ 4 # a_7 = VDEF <.MEM_3>
+ 5 a = ...
+ 6 ...
+ 7 # VUSE <.MEM_3, a_7>
+ 8 ... = a
+
+ Suppose that we are only renaming 'a'. Since .MEM_3 was used
+ to factor a store to 'a' and 'b', if we process .MEM_3 first,
+ we will decide to keep the .MEM_3 operand because 'a' is one of its
+ factored symbols. However, the definition of 'a_7' now supercedes
+ the definition for .MEM_3, and so, .MEM_3 needs to be discarded.
+
+ Doing this two-stage matching is correct because the existing
+ SSA web guarantees that the individual name 'a_7'
+ post-dominates the factored name '.MEM_3'. If there was
+ another factored store in the path, then we would have
+ another factored .MEM name reaching the load from 'a' at line 8. */
+ FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, which_vops)
+ {
+ name = USE_FROM_PTR (use_p);
+ sym = DECL_P (name) ? name : SSA_NAME_VAR (name);
+
+ if (TREE_CODE (name) == SSA_NAME && !name_marked_for_release_p (name))
+ {
+ if (sym == mem_var)
+ {
+ /* Save .MEM names to be processed after individual
+ names. */
+ VEC_safe_push (tree, heap, mem_names_stack, name);
+ }
+ else if (bitmap_bit_p (unmarked_syms, DECL_UID (sym)))
+ {
+ /* Otherwise, if SYM is in UNMARKED_SYMS and NAME
+ has not been marked to be released, add NAME to
+ RDEFS. */
+ bitmap_set_bit (rdefs, SSA_NAME_VERSION (name));
+ bitmap_clear_bit (unmarked_syms, DECL_UID (sym));
+ }
+ }
+ }
+
+ /* If we still have unmarked symbols, match them to the .MEM name(s) */
+ if (!bitmap_empty_p (unmarked_syms))
+ {
+ mem_syms_map_t syms;
+ bool found_default_mem_name_p = false;
+ tree mem_default_def = default_def (mem_var);
+ bitmap tmp = BITMAP_ALLOC (NULL);
+ bitmap matched_syms = BITMAP_ALLOC (NULL);
+
+ while (VEC_length (tree, mem_names_stack) > 0)
+ {
+ name = VEC_pop (tree, mem_names_stack);
+
+ if (name == mem_default_def)
+ {
+ /* The default definition for .MEM is special because it
+ matches every memory symbol. However, we only want
+ to use it as a last resort (i.e., if no other
+ existing SSA name matches). */
+ found_default_mem_name_p = true;
+ continue;
+ }
+
+ /* If this .MEM name matches some of the symbols in
+ UNMARKED_SYMS, add it to RDEFS. */
+ syms = syms_referenced_by (SSA_NAME_DEF_STMT (name));
+ if (bitmap_intersect_p (unmarked_syms, syms->stored))
+ {
+ /* Remove from UNMARKED_SYMS the symbols that are common
+ between UNMARKED_SYMS and SYMS->STORED. If there were
+ symbols in common, add NAME to RDEFS. */
+ bitmap_set_bit (rdefs, SSA_NAME_VERSION (name));
+
+ /* Add the matched symbols to MATCHED_SYMS. */
+ bitmap_and (tmp, unmarked_syms, syms->stored);
+ bitmap_ior_into (matched_syms, tmp);
+ }
+ }
+
+ /* Remove all the matched symbols from UNMARKED_SYMS, if all the
+ unmarked symbols were matched, then we can discard the
+ default definition for .MEM (if we found one). */
+ bitmap_and_compl_into (unmarked_syms, matched_syms);
+
+ /* The default definition for .MEM matches all the symbols that
+ could not be matched to other names in the list of operands. */
+ if (found_default_mem_name_p && !bitmap_empty_p (unmarked_syms))
+ {
+ bitmap_set_bit (rdefs, SSA_NAME_VERSION (mem_default_def));
+ bitmap_clear (unmarked_syms);
+ }
+
+ BITMAP_FREE (tmp);
+ BITMAP_FREE (matched_syms);
+ }
+
+ /* We should have found matches for every symbol in UNMARKED_SYMS. */
+ gcc_assert (unmarked_syms == NULL || bitmap_empty_p (unmarked_syms));
+ VEC_free (tree, heap, mem_names_stack);
+}
+
+
+/* Helper for rewrite_update_memory_stmt. WHICH_VOPS is either
+ SSA_OP_VUSE to update the RHS of a VUSE operator or SSA_OP_VMAYUSE
+ to update the RHS of a VDEF operator. This is done by collecting
+ reaching definitions for all the symbols in SYMS and writing a new
+ RHS for the virtual operator.
+
+ RDEFS is a scratch bitmap used to store reaching definitions for
+ all the symbols in SYMS. The caller is responsible for allocating
+ and freeing it.
+
+ FIXME, change bitmaps to pointer-sets when possible. */
+
+static void
+rewrite_update_stmt_vops (tree stmt, bitmap syms, bitmap rdefs, int which_vops)
+{
+ unsigned i, j, num_rdefs;
+ bitmap_iterator bi;
+ bool all_marked_p;
+ bitmap unmarked_syms = NULL;
+
+ gcc_assert (which_vops == SSA_OP_VUSE || which_vops == SSA_OP_VMAYUSE);
+
+ /* Collect all the reaching definitions for symbols marked for
+ renaming in SYMS. */
+ all_marked_p = true;
+ EXECUTE_IF_SET_IN_BITMAP (syms, 0, i, bi)
+ {
+ tree sym = referenced_var (i);
+ if (symbol_marked_for_renaming (sym))
+ {
+ tree rdef = get_reaching_def (sym);
+ bitmap_set_bit (rdefs, SSA_NAME_VERSION (rdef));
+ }
+ else
+ {
+ all_marked_p = false;
+
+ /* Add SYM to UNMARKED_SYMS so that they can be matched to
+ existing SSA names in WHICH_VOPS. */
+ if (unmarked_syms == NULL)
+ unmarked_syms = BITMAP_ALLOC (NULL);
+ bitmap_set_bit (unmarked_syms, DECL_UID (sym));
+ }
+ }
+
+ /* Preserve names from VOPS that are needed for the symbols that
+ have not been marked for renaming. */
+ if (!all_marked_p)
+ {
+ preserve_needed_names_in_vops (stmt, unmarked_syms, rdefs, which_vops);
+ BITMAP_FREE (unmarked_syms);
+ }
+
+ /* Rewrite the appropriate virtual operand setting its RHS to RDEFS. */
+ num_rdefs = bitmap_count_bits (rdefs);
+ if (which_vops == SSA_OP_VUSE)
+ {
+ /* STMT should have exactly one VUSE operator. */
+ struct vuse_optype_d *vuses = VUSE_OPS (stmt);
+ gcc_assert (vuses && vuses->next == NULL);
+
+ vuses = realloc_vuse (vuses, num_rdefs);
+ j = 0;
+ EXECUTE_IF_SET_IN_BITMAP (rdefs, 0, i, bi)
+ SET_USE (VUSE_OP_PTR (vuses, j++), ssa_name (i));
+ }
else
{
- /* If DEF is a new name, register it as a new definition
- for all the names replaced by DEF. */
- if (is_new_name (def))
- register_new_update_set (def, names_replaced_by (def));
+ tree lhs;
+ struct vdef_optype_d *vdefs;
- /* If DEF is an old name, register DEF as a new
- definition for itself. */
- if (is_old_name (def))
- register_new_update_single (def, def);
+ /* STMT should have exactly one VDEF operator. */
+ vdefs = VDEF_OPS (stmt);
+ gcc_assert (vdefs && vdefs->next == NULL);
+
+ /* Preserve the existing LHS to avoid creating SSA names
+ unnecessarily. */
+ lhs = VDEF_RESULT (vdefs);
+
+ vdefs = realloc_vdef (vdefs, num_rdefs);
+ j = 0;
+ EXECUTE_IF_SET_IN_BITMAP (rdefs, 0, i, bi)
+ SET_USE (VDEF_OP_PTR (vdefs, j++), ssa_name (i));
+
+ SET_DEF (VDEF_RESULT_PTR (vdefs), lhs);
+ }
+}
+
+
+/* Helper for rewrite_update_memory_stmt. Register the LHS of the
+ VDEF operator in STMT to be the current reaching definition of
+ every symbol in the bitmap STORES. */
+
+static void
+register_new_vdef_name (tree stmt, bitmap stores)
+{
+ tree lhs, new_name;
+ struct vdef_optype_d *vdefs;
+ bitmap_iterator bi;
+ unsigned i;
+
+ /* If needed, create a new name for the LHS. */
+ vdefs = VDEF_OPS (stmt);
+ lhs = VDEF_RESULT (vdefs);
+ if (DECL_P (lhs))
+ {
+ /* If there is a single symbol in STORES, use it as the target
+ of the VDEF. Otherwise factor all the stored symbols into
+ .MEM. */
+ if (bitmap_count_bits (stores) == 1)
+ lhs = referenced_var (bitmap_first_set_bit (stores));
+ else
+ lhs = mem_var;
+
+ new_name = make_ssa_name (lhs, stmt);
+ }
+ else
+ {
+ /* If the LHS is already an SSA name, then we may not need to
+ create a new name. If the underlying symbol for LHS is the
+ same as the symbol we want to use, then re-use it.
+ Otherwise, create a new SSA name for it. */
+ tree new_lhs_sym;
+
+ if (bitmap_count_bits (stores) == 1)
+ new_lhs_sym = referenced_var (bitmap_first_set_bit (stores));
+ else
+ new_lhs_sym = mem_var;
+
+ if (new_lhs_sym == SSA_NAME_VAR (lhs))
+ new_name = lhs;
+ else
+ {
+ /* Create a new SSA name for the LHS and mark the original
+ LHS stale. This will prevent rewrite_update_stmt_vops
+ from keeping LHS in statements that still use it. FIXME,
+ this does not help statements that are never visited by
+ update_ssa. */
+ new_name = make_ssa_name (new_lhs_sym, stmt);
+ mark_ssa_name_stale (lhs);
+ }
+ }
+
+ /* Set NEW_NAME to be the current reaching definition for every
+ symbol on the RHS of the VDEF. */
+ SET_DEF (VDEF_RESULT_PTR (vdefs), new_name);
+ EXECUTE_IF_SET_IN_BITMAP (stores, 0, i, bi)
+ {
+ tree sym = referenced_var (i);
+ if (symbol_marked_for_renaming (sym))
+ register_new_update_single (new_name, sym);
+ }
+}
+
+
+/* Update every SSA memory reference in STMT. */
+
+static void
+rewrite_update_memory_stmt (tree stmt)
+{
+ bitmap rdefs;
+ mem_syms_map_t syms;
+
+ syms = syms_referenced_by (stmt);
+
+ /* If the statement makes no loads or stores, it must have volatile
+ operands. */
+ if (syms->loaded == NULL && syms->stored == NULL)
+ {
+ gcc_assert (stmt_ann (stmt)->has_volatile_ops);
+ return;
+ }
+
+ rdefs = BITMAP_ALLOC (NULL);
+
+ /* Rewrite loaded symbols marked for renaming. */
+ if (syms->loaded)
+ {
+ rewrite_update_stmt_vops (stmt, syms->loaded, rdefs, SSA_OP_VUSE);
+ bitmap_clear (rdefs);
+ }
+
+ if (syms->stored)
+ {
+ /* Rewrite stored symbols marked for renaming. */
+ rewrite_update_stmt_vops (stmt, syms->stored, rdefs, SSA_OP_VMAYUSE);
+
+ /* Register the LHS of the VDEF to be the new reaching
+ definition of all the symbols in STORES. */
+ register_new_vdef_name (stmt, syms->stored);
}
+
+ BITMAP_FREE (rdefs);
}
@@ -1591,30 +2329,24 @@ rewrite_update_stmt (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
fprintf (dump_file, "\n");
}
+ /* If there are memory symbols to put in SSA form, process them. */
+ if (need_to_update_vops_p
+ && stmt_references_memory_p (stmt)
+ && !bitmap_empty_p (syms_to_rename))
+ rewrite_update_memory_stmt (stmt);
+
/* Rewrite USES included in OLD_SSA_NAMES and USES whose underlying
symbol is marked for renaming. */
if (REWRITE_THIS_STMT (stmt))
- {
- FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
- maybe_replace_use (use_p);
-
- if (need_to_update_vops_p)
- FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_VIRTUAL_USES)
- maybe_replace_use (use_p);
- }
+ FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_ALL_USES)
+ maybe_replace_use (use_p);
/* Register definitions of names in NEW_SSA_NAMES and OLD_SSA_NAMES.
Also register definitions for names whose underlying symbol is
marked for renaming. */
if (REGISTER_DEFS_IN_THIS_STMT (stmt))
- {
- FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, iter, SSA_OP_DEF)
- maybe_register_def (def_p, stmt);
-
- if (need_to_update_vops_p)
- FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, iter, SSA_OP_VIRTUAL_DEFS)
- maybe_register_def (def_p, stmt);
- }
+ FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, iter, SSA_OP_ALL_DEFS)
+ maybe_register_def (def_p, stmt);
}
@@ -1674,7 +2406,21 @@ rewrite_update_phi_arguments (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
}
else
{
- tree sym = DECL_P (arg) ? arg : SSA_NAME_VAR (arg);
+ tree sym, lhs, arg_sym, lhs_sym;
+
+ lhs = PHI_RESULT (phi);
+ lhs_sym = SSA_NAME_VAR (lhs);
+ arg_sym = DECL_P (arg) ? arg : SSA_NAME_VAR (arg);
+
+ /* Make sure we use the right symbol when updating PHIs
+ for memory symbols. When either the LHS of a PHI or
+ the argument is a memory symbol, then use the LHS of
+ the PHI as that always contains the right symbol
+ being defined by this PHI. */
+ if (!is_gimple_reg (lhs_sym) || !is_gimple_reg (arg_sym))
+ sym = lhs_sym;
+ else
+ sym = arg_sym;
if (symbol_marked_for_renaming (sym))
replace_use (arg_p, sym);
@@ -1762,12 +2508,6 @@ rewrite_blocks (basic_block entry, enum rewrite_mode what, sbitmap blocks)
if (def_blocks)
dump_tree_ssa_stats (dump_file);
}
-
- if (def_blocks)
- {
- htab_delete (def_blocks);
- def_blocks = NULL;
- }
VEC_free (tree, heap, block_defs_stack);
@@ -1801,14 +2541,6 @@ mark_def_site_blocks (sbitmap interesting_blocks)
{
struct dom_walk_data walk_data;
struct mark_def_sites_global_data mark_def_sites_global_data;
- referenced_var_iterator rvi;
- tree var;
-
- /* Allocate memory for the DEF_BLOCKS hash table. */
- def_blocks = htab_create (num_referenced_vars,
- def_blocks_hash, def_blocks_eq, def_blocks_free);
- FOR_EACH_REFERENCED_VAR(var, rvi)
- set_current_def (var, NULL_TREE);
/* Setup callbacks for the generic dominator tree walker to find and
mark definition sites. */
@@ -1850,6 +2582,51 @@ mark_def_site_blocks (sbitmap interesting_blocks)
}
+/* Initialize internal data needed during renaming. */
+
+static void
+init_ssa_renamer (void)
+{
+ tree var;
+ referenced_var_iterator rvi;
+
+ in_ssa_p = false;
+
+ /* Allocate memory for the DEF_BLOCKS hash table. */
+ gcc_assert (def_blocks == NULL);
+ def_blocks = htab_create (num_referenced_vars, def_blocks_hash,
+ def_blocks_eq, def_blocks_free);
+
+ FOR_EACH_REFERENCED_VAR(var, rvi)
+ set_current_def (var, NULL_TREE);
+
+ /* Allocate the table to map statements to the symbols they load/store. */
+ gcc_assert (mem_syms_tbl == NULL);
+ mem_syms_tbl = htab_create (200, mem_syms_hash, mem_syms_eq, mem_syms_free);
+}
+
+
+/* Deallocate internal data structures used by the renamer. */
+
+static void
+fini_ssa_renamer (void)
+{
+ if (mem_syms_tbl)
+ {
+ htab_delete (mem_syms_tbl);
+ mem_syms_tbl = NULL;
+ }
+
+ if (def_blocks)
+ {
+ htab_delete (def_blocks);
+ def_blocks = NULL;
+ }
+
+ in_ssa_p = true;
+}
+
+
/* Main entry point into the SSA builder. The renaming process
proceeds in four main phases:
@@ -1879,6 +2656,9 @@ rewrite_into_ssa (void)
/* Initialize operand data structures. */
init_ssa_operands ();
+ /* Initialize internal data needed by the renamer. */
+ init_ssa_renamer ();
+
/* Initialize the set of interesting blocks. The callback
mark_def_sites will add to this set those blocks that the renamer
should process. */
@@ -1909,8 +2689,9 @@ rewrite_into_ssa (void)
free (dfs);
sbitmap_free (interesting_blocks);
+ fini_ssa_renamer ();
+
timevar_pop (TV_TREE_SSA_OTHER);
- in_ssa_p = true;
return 0;
}
@@ -2022,6 +2803,8 @@ prepare_block_for_update (basic_block bb, bool insert_phi_p)
lhs_sym = DECL_P (lhs) ? lhs : SSA_NAME_VAR (lhs);
+ /* Have to check all the symbols in the arg list of the PHI
+ node. */
if (symbol_marked_for_renaming (lhs_sym))
{
mark_use_interesting (lhs_sym, phi, bb, insert_phi_p);
@@ -2051,30 +2834,36 @@ prepare_block_for_update (basic_block bb, bool insert_phi_p)
{
tree def = DEF_FROM_PTR (def_p);
tree sym = DECL_P (def) ? def : SSA_NAME_VAR (def);
-
if (symbol_marked_for_renaming (sym))
mark_def_interesting (def, stmt, bb, insert_phi_p);
}
- FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, i, SSA_OP_VIRTUAL_DEFS)
+ if (need_to_update_vops_p && stmt_references_memory_p (stmt))
{
- tree def = DEF_FROM_PTR (def_p);
- tree sym = DECL_P (def) ? def : SSA_NAME_VAR (def);
-
- if (symbol_marked_for_renaming (sym))
- {
- mark_use_interesting (sym, stmt, bb, insert_phi_p);
- mark_def_interesting (sym, stmt, bb, insert_phi_p);
- }
- }
-
- FOR_EACH_SSA_USE_OPERAND (use_p, stmt, i, SSA_OP_VUSE)
- {
- tree use = USE_FROM_PTR (use_p);
- tree sym = DECL_P (use) ? use : SSA_NAME_VAR (use);
-
- if (symbol_marked_for_renaming (sym))
- mark_use_interesting (sym, stmt, bb, insert_phi_p);
+ unsigned i;
+ bitmap_iterator bi;
+ mem_syms_map_t syms;
+
+ syms = syms_referenced_by (stmt);
+
+ if (syms->stored)
+ EXECUTE_IF_SET_IN_BITMAP (syms->stored, 0, i, bi)
+ {
+ tree sym = referenced_var (i);
+ if (symbol_marked_for_renaming (sym))
+ {
+ mark_use_interesting (sym, stmt, bb, insert_phi_p);
+ mark_def_interesting (sym, stmt, bb, insert_phi_p);
+ }
+ }
+
+ if (syms->loaded)
+ EXECUTE_IF_SET_IN_BITMAP (syms->loaded, 0, i, bi)
+ {
+ tree sym = referenced_var (i);
+ if (symbol_marked_for_renaming (sym))
+ mark_use_interesting (sym, stmt, bb, insert_phi_p);
+ }
}
}
@@ -2294,6 +3083,16 @@ dump_update_ssa (FILE *file)
}
}
+ if (stale_ssa_names && !bitmap_empty_p (stale_ssa_names))
+ {
+ fprintf (file, "\n\nSSA names marked stale\n\n");
+ EXECUTE_IF_SET_IN_BITMAP (stale_ssa_names, 0, i, bi)
+ {
+ print_generic_expr (file, ssa_name (i), 0);
+ fprintf (file, " ");
+ }
+ }
+
fprintf (file, "\n\n");
}
@@ -2326,6 +3125,7 @@ init_update_ssa (void)
need_to_update_vops_p = false;
syms_to_rename = BITMAP_ALLOC (NULL);
names_to_release = NULL;
+ stale_ssa_names = NULL;
memset (&update_ssa_stats, 0, sizeof (update_ssa_stats));
update_ssa_stats.virtual_symbols = BITMAP_ALLOC (NULL);
}
@@ -2352,6 +3152,7 @@ delete_update_ssa (void)
need_to_update_vops_p = false;
BITMAP_FREE (syms_to_rename);
BITMAP_FREE (update_ssa_stats.virtual_symbols);
+ BITMAP_FREE (stale_ssa_names);
if (names_to_release)
{
@@ -2361,6 +3162,8 @@ delete_update_ssa (void)
}
clear_ssa_name_info ();
+
+ fini_ssa_renamer ();
}
@@ -2421,9 +3224,33 @@ register_new_name_mapping (tree new, tree old)
void
mark_sym_for_renaming (tree sym)
{
+ /* .MEM is not a regular symbol, it is a device for factoring
+ multiple stores, much like a PHI function factors multiple
+ control flow paths. */
+ gcc_assert (sym != mem_var);
+
+#if 0
+ /* Variables with sub-variables should have their sub-variables
+ marked separately. */
+ gcc_assert (get_subvars_for_var (sym) == NULL);
+#endif
+
if (need_to_initialize_update_ssa_p)
init_update_ssa ();
+#if 1
+ /* HACK. Caller should be responsible for this. */
+ {
+ subvar_t svars;
+ if (var_can_have_subvars (sym) && (svars = get_subvars_for_var (sym)))
+ {
+ subvar_t sv;
+ for (sv = svars; sv; sv = sv->next)
+ bitmap_set_bit (syms_to_rename, DECL_UID (sv->var));
+ }
+ }
+#endif
+
bitmap_set_bit (syms_to_rename, DECL_UID (sym));
if (!is_gimple_reg (sym))
@@ -2439,20 +3266,43 @@ mark_set_for_renaming (bitmap set)
bitmap_iterator bi;
unsigned i;
+#if 0
+ /* Variables with sub-variables should have their sub-variables
+ marked separately. */
+ EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
+ gcc_assert (get_subvars_for_var (referenced_var (i)) == NULL);
+#endif
+
if (bitmap_empty_p (set))
return;
if (need_to_initialize_update_ssa_p)
init_update_ssa ();
+#if 1
+ /* HACK. Caller should be responsible for this. */
+ EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
+ {
+ subvar_t svars;
+ tree var = referenced_var (i);
+ if (var_can_have_subvars (var) && (svars = get_subvars_for_var (var)))
+ {
+ subvar_t sv;
+ for (sv = svars; sv; sv = sv->next)
+ bitmap_set_bit (syms_to_rename, DECL_UID (sv->var));
+ }
+ }
+#endif
+
bitmap_ior_into (syms_to_rename, set);
- EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
- if (!is_gimple_reg (referenced_var (i)))
- {
- need_to_update_vops_p = true;
- break;
- }
+ if (!need_to_update_vops_p)
+ EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
+ if (!is_gimple_reg (referenced_var (i)))
+ {
+ need_to_update_vops_p = true;
+ break;
+ }
}
@@ -2501,7 +3351,8 @@ ssa_names_to_replace (void)
void
release_ssa_name_after_update_ssa (tree name)
{
- gcc_assert (!need_to_initialize_update_ssa_p);
+ if (need_to_initialize_update_ssa_p)
+ init_update_ssa ();
if (names_to_release == NULL)
names_to_release = BITMAP_ALLOC (NULL);
@@ -2685,6 +3536,127 @@ switch_virtuals_to_full_rewrite (void)
}
+/* While the SSA web is updated, statements that make memory stores
+ may need to switch from a factored store into .MEM to a single
+ store into the corresponding symbol and vice-versa. For instance,
+ suppose that we have this structure assignment before gathering
+ call-clobbered variables in the aliaser:
+
+ # D.1528_9 = VDEF <.MEM_1(D)>
+ D.1528 = g (); --> stores { D.1528 }
+
+ Since D.1528 is a structure, we create a VDEF for it. And since we
+ have not yet computed call-clobbered variables, we assume that the
+ only symbol stored by the assignment is D.1528 itself.
+
+ After computing call-clobbered variables, however, the store may
+ get converted into a factored store:
+
+ # .MEM_10 = VDEF <.MEM_1(D)>
+ D.1528 = g (); --> stores { s D.1528 }
+
+ But since we had never marked D.1528 for renaming (it was not
+ necessary), the immediate uses of D.1528_9 do not get updated. So,
+ we must update them at the end. */
+
+static void
+replace_stale_ssa_names (void)
+{
+ unsigned i;
+ bitmap_iterator bi;
+ tree new_name;
+
+ /* If there are any .MEM names that are marked to be released, we
+ need to replace their immediate uses with the default definition
+ for .MEM. Consider this
+
+ struct { ... } x;
+ if (i_12 > 10)
+ # .MEM_39 = VDEF <.MEM_4(D)>
+ x = y;
+ else
+ # .MEM_15 = VDEF <.MEM_4(D)>
+ x = z;
+ endif
+ # .MEM_59 = PHI <.MEM_15, .MEM_39>
+
+ After scalarization
+
+ struct { ... } x;
+ if (i_12 > 10)
+ x$a_40 = y$a_39;
+ x$b_41 = y$b_38;
+ else
+ x$a_45 = y$a_35;
+ x$b_46 = y$b_34;
+ endif
+ # .MEM_59 = PHI <.MEM_15, .MEM_39>
+ # x$a_60 = PHI <x$a_40, x$a_45>
+ # x$b_61 = PHI <x$b_41, x$b_46>
+
+ Both .MEM_15 and .MEM_39 have disappeared and have been marked
+ for removal. But since .MEM is not a symbol that can be marked
+ for renaming, the PHI node for it remains in place. Moreover,
+ because 'x' has been scalarized, there will be no uses of .MEM_59
+ downstream. However, the SSA verifier will see uses of .MEM_15
+ and .MEM_39 and trigger an ICE. By replacing both of them with
+ .MEM's default definition, we placate the verifier and maintain
+ the removability of this PHI node. */
+ if (names_to_release)
+ {
+ new_name = get_default_def_for (mem_var);
+ EXECUTE_IF_SET_IN_BITMAP (names_to_release, 0, i, bi)
+ {
+ /* The replacement name for every stale SSA name is the new
+ LHS of the VDEF operator in the original defining
+ statement. */
+ tree use_stmt, old_name;
+ imm_use_iterator iter;
+
+ old_name = ssa_name (i);
+
+ /* We only care about .MEM. All other symbols should've
+ been marked for renaming. */
+ if (SSA_NAME_VAR (old_name) != mem_var)
+ continue;
+
+ FOR_EACH_IMM_USE_STMT (use_stmt, iter, old_name)
+ {
+ use_operand_p use_p;
+ FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
+ SET_USE (use_p, new_name);
+ }
+ }
+ }
+
+
+ /* Replace every stale name with the new name created for the VDEF
+ of its original defining statement. */
+ if (stale_ssa_names)
+ EXECUTE_IF_SET_IN_BITMAP (stale_ssa_names, 0, i, bi)
+ {
+ /* The replacement name for every stale SSA name is the new
+ LHS of the VDEF operator in the original defining
+ statement. */
+ tree use_stmt, old_name, new_name;
+ imm_use_iterator iter;
+
+ old_name = ssa_name (i);
+ new_name = VDEF_RESULT (VDEF_OPS (SSA_NAME_DEF_STMT (old_name)));
+
+ FOR_EACH_IMM_USE_STMT (use_stmt, iter, old_name)
+ {
+ use_operand_p use_p;
+
+ FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
+ SET_USE (use_p, new_name);
+ }
+
+ release_ssa_name_after_update_ssa (old_name);
+ }
+}
+
+
/* Given a set of newly created SSA names (NEW_SSA_NAMES) and a set of
existing SSA names (OLD_SSA_NAMES), update the SSA form so that:
@@ -2764,6 +3736,9 @@ update_ssa (unsigned update_flags)
timevar_push (TV_TREE_SSA_INCREMENTAL);
+ /* Initialize internal data needed by the renamer. */
+ init_ssa_renamer ();
+
blocks_with_phis_to_rewrite = BITMAP_ALLOC (NULL);
if (!phis_to_rewrite)
phis_to_rewrite = VEC_alloc (tree_vec, heap, last_basic_block);
@@ -2914,6 +3889,12 @@ update_ssa (unsigned update_flags)
rewrite_blocks (start_bb, REWRITE_UPDATE, tmp);
+ /* If the update process generated stale SSA names, their immediate
+ uses need to be replaced with the new name that was created in
+ their stead. */
+ if (stale_ssa_names || names_to_release)
+ replace_stale_ssa_names ();
+
sbitmap_free (tmp);
/* Debugging dumps. */
@@ -2947,6 +3928,7 @@ update_ssa (unsigned update_flags)
/* Free allocated memory. */
done:
+ in_ssa_p = true;
EXECUTE_IF_SET_IN_BITMAP (blocks_with_phis_to_rewrite, 0, i, bi)
{
tree_vec phis = VEC_index (tree_vec, phis_to_rewrite, i);
diff --git a/gcc/tree-nrv.c b/gcc/tree-nrv.c
index 54b964f2583..0238a004423 100644
--- a/gcc/tree-nrv.c
+++ b/gcc/tree-nrv.c
@@ -260,31 +260,33 @@ execute_return_slot_opt (void)
&& (call = TREE_OPERAND (stmt, 1),
TREE_CODE (call) == CALL_EXPR)
&& !CALL_EXPR_RETURN_SLOT_OPT (call)
+ && stmt_references_memory_p (stmt)
&& aggregate_value_p (call, call))
{
- def_operand_p def_p;
- ssa_op_iter op_iter;
+ bitmap loads, stores;
+ unsigned i;
+ bitmap_iterator bi;
/* We determine whether or not the LHS address escapes by
asking whether it is call clobbered. When the LHS isn't a
simple decl, we need to check the VDEFs, so it's simplest
- to just loop through all the DEFs. */
- FOR_EACH_SSA_DEF_OPERAND (def_p, stmt, op_iter, SSA_OP_ALL_DEFS)
- {
- tree def = DEF_FROM_PTR (def_p);
- if (TREE_CODE (def) == SSA_NAME)
- def = SSA_NAME_VAR (def);
- if (is_call_clobbered (def))
- goto unsafe;
- }
+ to just loop through all the stored symbols. */
+ loads = BITMAP_ALLOC (NULL);
+ stores = BITMAP_ALLOC (NULL);
+ get_loads_and_stores (stmt, loads, stores);
+ EXECUTE_IF_SET_IN_BITMAP (stores, 0, i, bi)
+ if (is_call_clobbered (referenced_var (i)))
+ goto unsafe;
/* No defs are call clobbered, so the optimization is safe. */
CALL_EXPR_RETURN_SLOT_OPT (call) = 1;
+
/* This is too late to mark the target addressable like we do
in gimplify_modify_expr_rhs, but that's OK; anything that
wasn't already addressable was handled there. */
-
- unsafe:;
+ unsafe:
+ BITMAP_FREE (stores);
+ BITMAP_FREE (loads);
}
}
}
diff --git a/gcc/tree-pass.h b/gcc/tree-pass.h
index 3121735e85a..0664c7eb841 100644
--- a/gcc/tree-pass.h
+++ b/gcc/tree-pass.h
@@ -151,8 +151,6 @@ struct dump_file_info
#define PROP_rtl (1 << 8)
#define PROP_alias (1 << 9)
#define PROP_gimple_lomp (1 << 10) /* lowered OpenMP directives */
-#define PROP_smt_usage (1 << 11) /* which SMT's are
- used alone. */
#define PROP_trees \
(PROP_gimple_any | PROP_gimple_lcf | PROP_gimple_leh | PROP_gimple_lomp)
diff --git a/gcc/tree-pretty-print.c b/gcc/tree-pretty-print.c
index 17320db77a0..1f42b2a6c80 100644
--- a/gcc/tree-pretty-print.c
+++ b/gcc/tree-pretty-print.c
@@ -1633,6 +1633,8 @@ dump_generic_node (pretty_printer *buffer, tree node, int spc, int flags,
pp_decimal_int (buffer, SSA_NAME_VERSION (node));
if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (node))
pp_string (buffer, "(ab)");
+ else if (node == default_def (SSA_NAME_VAR (node)))
+ pp_string (buffer, "(D)");
break;
case WITH_SIZE_EXPR:
@@ -2573,38 +2575,53 @@ newline_and_indent (pretty_printer *buffer, int spc)
static void
dump_vops (pretty_printer *buffer, tree stmt, int spc, int flags)
{
- tree use;
- use_operand_p use_p;
- def_operand_p def_p;
- ssa_op_iter iter;
- vuse_vec_p vv;
+ struct vdef_optype_d *vdefs;
+ struct vuse_optype_d *vuses;
+ size_t i, n;
if (!ssa_operands_active ())
return;
- FOR_EACH_SSA_VDEF_OPERAND (def_p, vv, stmt, iter)
+ vuses = VUSE_OPS (stmt);
+ while (vuses)
{
- gcc_assert (VUSE_VECT_NUM_ELEM (*vv) == 1);
- use_p = VUSE_ELEMENT_PTR (*vv, 0);
- pp_string (buffer, "# ");
- dump_generic_node (buffer, DEF_FROM_PTR (def_p),
- spc + 2, flags, false);
- pp_string (buffer, " = VDEF <");
- dump_generic_node (buffer, USE_FROM_PTR (use_p),
- spc + 2, flags, false);
- pp_string (buffer, ">;");
+ pp_string (buffer, "# VUSE <");
+
+ n = VUSE_NUM (vuses);
+ for (i = 0; i < n; i++)
+ {
+ dump_generic_node (buffer, VUSE_OP (vuses, i), spc + 2, flags, false);
+ if (i < n - 1)
+ pp_string (buffer, ", ");
+ }
+
+ pp_string (buffer, ">");
newline_and_indent (buffer, spc);
+ vuses = vuses->next;
}
- FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_VUSE)
+ vdefs = VDEF_OPS (stmt);
+ while (vdefs)
{
- pp_string (buffer, "# VUSE <");
- dump_generic_node (buffer, use, spc + 2, flags, false);
- pp_string (buffer, ">;");
+ pp_string (buffer, "# ");
+ dump_generic_node (buffer, VDEF_RESULT (vdefs), spc + 2, flags, false);
+ pp_string (buffer, " = VDEF <");
+
+ n = VDEF_NUM (vdefs);
+ for (i = 0; i < n; i++)
+ {
+ dump_generic_node (buffer, VDEF_OP (vdefs, i), spc + 2, flags, 0);
+ if (i < n - 1)
+ pp_string (buffer, ", ");
+ }
+
+ pp_string (buffer, ">");
newline_and_indent (buffer, spc);
+ vdefs = vdefs->next;
}
}
+
/* Dumps basic block BB to FILE with details described by FLAGS and
indented by INDENT spaces. */
diff --git a/gcc/tree-sra.c b/gcc/tree-sra.c
index 40d341daf03..0292ab280c7 100644
--- a/gcc/tree-sra.c
+++ b/gcc/tree-sra.c
@@ -1451,20 +1451,12 @@ decide_instantiations (void)
/* Mark all the variables in VDEF operands for STMT for
renaming. This becomes necessary when we modify all of a non-scalar. */
-static void
+static inline void
mark_all_v_defs_1 (tree stmt)
{
- tree sym;
- ssa_op_iter iter;
-
update_stmt_if_modified (stmt);
-
- FOR_EACH_SSA_TREE_OPERAND (sym, stmt, iter, SSA_OP_ALL_VIRTUALS)
- {
- if (TREE_CODE (sym) == SSA_NAME)
- sym = SSA_NAME_VAR (sym);
- mark_sym_for_renaming (sym);
- }
+ if (stmt_references_memory_p (stmt))
+ mark_symbols_for_renaming (stmt);
}
@@ -1484,6 +1476,7 @@ mark_all_v_defs (tree list)
}
}
+
/* Mark every replacement under ELT with TREE_NO_WARNING. */
static void
@@ -1831,6 +1824,15 @@ sra_insert_after (block_stmt_iterator *bsi, tree list)
static void
sra_replace (block_stmt_iterator *bsi, tree list)
{
+ tree op, stmt;
+ ssa_op_iter iter;
+
+ stmt = bsi_stmt (*bsi);
+
+ /* None of the virtual SSA names defined in STMT are needed anymore. */
+ FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_VDEF)
+ release_ssa_name_after_update_ssa (op);
+
sra_insert_before (bsi, list);
bsi_remove (bsi, false);
if (bsi_end_p (*bsi))
@@ -2232,10 +2234,11 @@ struct tree_opt_pass pass_sra =
TV_TREE_SRA, /* tv_id */
PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
0, /* properties_provided */
- PROP_smt_usage, /* properties_destroyed */
+ 0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func /* todo_flags_finish */
+ TODO_dump_func
| TODO_update_ssa
- | TODO_ggc_collect | TODO_verify_ssa,
+ | TODO_ggc_collect
+ | TODO_verify_ssa, /* todo_flags_finish */
0 /* letter */
};
diff --git a/gcc/tree-ssa-alias.c b/gcc/tree-ssa-alias.c
index 879202ca103..e160b3aeea5 100644
--- a/gcc/tree-ssa-alias.c
+++ b/gcc/tree-ssa-alias.c
@@ -67,10 +67,6 @@ struct alias_map_d
all the aliases of VAR. */
long total_alias_vops;
- /* Nonzero if the aliases for this memory tag have been grouped
- already. Used in group_aliases. */
- unsigned int grouped_p : 1;
-
/* Set of variables aliased with VAR. This is the exact same
information contained in VAR_ANN (VAR)->MAY_ALIASES, but in
bitmap form to speed up alias grouping. */
@@ -105,14 +101,12 @@ static tree create_memory_tag (tree type, bool is_type_tag);
static tree get_tmt_for (tree, struct alias_info *);
static tree get_nmt_for (tree);
static void add_may_alias (tree, tree);
-static void replace_may_alias (tree, size_t, tree);
static struct alias_info *init_alias_info (void);
static void delete_alias_info (struct alias_info *);
static void compute_flow_sensitive_aliasing (struct alias_info *);
static void setup_pointers_and_addressables (struct alias_info *);
static void create_global_var (void);
static void maybe_create_global_var (struct alias_info *ai);
-static void group_aliases (struct alias_info *);
static void set_pt_anything (tree ptr);
/* Global declarations. */
@@ -394,11 +388,6 @@ set_initial_properties (struct alias_info *ai)
}
-/* This variable is set to true if we are updating the used alone
- information for SMTs, or are in a pass that is going to break it
- temporarily. */
-bool updating_used_alone;
-
/* Compute which variables need to be marked call clobbered because
their tag is call clobbered, and which tags need to be marked
global because they contain global variables. */
@@ -425,119 +414,6 @@ compute_call_clobbered (struct alias_info *ai)
}
-/* Helper for recalculate_used_alone. Return a conservatively correct
- answer as to whether STMT may make a store on the LHS to SYM. */
-
-static bool
-lhs_may_store_to (tree stmt, tree sym ATTRIBUTE_UNUSED)
-{
- tree lhs = TREE_OPERAND (stmt, 0);
-
- lhs = get_base_address (lhs);
-
- if (!lhs)
- return false;
-
- if (TREE_CODE (lhs) == SSA_NAME)
- return false;
- /* We could do better here by looking at the type tag of LHS, but it
- is unclear whether this is worth it. */
- return true;
-}
-
-/* Recalculate the used_alone information for SMTs . */
-
-void
-recalculate_used_alone (void)
-{
- VEC (tree, heap) *calls = NULL;
- block_stmt_iterator bsi;
- basic_block bb;
- tree stmt;
- size_t i;
- referenced_var_iterator rvi;
- tree var;
-
- /* First, reset all the SMT used alone bits to zero. */
- updating_used_alone = true;
- FOR_EACH_REFERENCED_VAR (var, rvi)
- if (TREE_CODE (var) == SYMBOL_MEMORY_TAG)
- {
- SMT_OLD_USED_ALONE (var) = SMT_USED_ALONE (var);
- SMT_USED_ALONE (var) = 0;
- }
-
- /* Walk all the statements.
- Calls get put into a list of statements to update, since we will
- need to update operands on them if we make any changes.
- If we see a bare use of a SMT anywhere in a real virtual use or virtual
- def, mark the SMT as used alone, and for renaming. */
- FOR_EACH_BB (bb)
- {
- for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
- {
- bool iscall = false;
- ssa_op_iter iter;
-
- stmt = bsi_stmt (bsi);
-
- if (TREE_CODE (stmt) == CALL_EXPR
- || (TREE_CODE (stmt) == MODIFY_EXPR
- && TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR))
- {
- iscall = true;
- VEC_safe_push (tree, heap, calls, stmt);
- }
-
- FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter,
- SSA_OP_VUSE | SSA_OP_VIRTUAL_DEFS)
- {
- tree svar = var;
-
- if (TREE_CODE (var) == SSA_NAME)
- svar = SSA_NAME_VAR (var);
-
- if (TREE_CODE (svar) == SYMBOL_MEMORY_TAG)
- {
- /* We only care about the LHS on calls. */
- if (iscall && !lhs_may_store_to (stmt, svar))
- continue;
-
- if (!SMT_USED_ALONE (svar))
- {
- SMT_USED_ALONE (svar) = true;
-
- /* Only need to mark for renaming if it wasn't
- used alone before. */
- if (!SMT_OLD_USED_ALONE (svar))
- mark_sym_for_renaming (svar);
- }
- }
- }
- }
- }
-
- /* Update the operands on all the calls we saw. */
- if (calls)
- {
- for (i = 0; VEC_iterate (tree, calls, i, stmt); i++)
- update_stmt (stmt);
- }
-
- /* We need to mark SMT's that are no longer used for renaming so the
- symbols go away, or else verification will be angry with us, even
- though they are dead. */
- FOR_EACH_REFERENCED_VAR (var, rvi)
- if (TREE_CODE (var) == SYMBOL_MEMORY_TAG)
- {
- if (SMT_OLD_USED_ALONE (var) && !SMT_USED_ALONE (var))
- mark_sym_for_renaming (var);
- }
-
- VEC_free (tree, heap, calls);
- updating_used_alone = false;
-}
-
/* Compute may-alias information for every variable referenced in function
FNDECL.
@@ -593,59 +469,8 @@ recalculate_used_alone (void)
SMT and V conflict (as computed by may_alias_p), then V is marked
as an alias tag and added to the alias set of SMT.
- For instance, consider the following function:
-
- foo (int i)
- {
- int *p, a, b;
-
- if (i > 10)
- p = &a;
- else
- p = &b;
-
- *p = 3;
- a = b + 2;
- return *p;
- }
-
- After aliasing analysis has finished, the symbol memory tag for pointer
- 'p' will have two aliases, namely variables 'a' and 'b'. Every time
- pointer 'p' is dereferenced, we want to mark the operation as a
- potential reference to 'a' and 'b'.
-
- foo (int i)
- {
- int *p, a, b;
-
- if (i_2 > 10)
- p_4 = &a;
- else
- p_6 = &b;
- # p_1 = PHI <p_4(1), p_6(2)>;
-
- # a_7 = VDEF <a_3>;
- # b_8 = VDEF <b_5>;
- *p_1 = 3;
-
- # a_9 = VDEF <a_7>
- # VUSE <b_8>
- a_9 = b_8 + 2;
-
- # VUSE <a_9>;
- # VUSE <b_8>;
- return *p_1;
- }
-
- In certain cases, the list of may aliases for a pointer may grow too
- large. This may cause an explosion in the number of virtual operands
- inserted in the code. Resulting in increased memory consumption and
- compilation time.
-
- When the number of virtual operands needed to represent aliased
- loads and stores grows too large (configurable with @option{--param
- max-aliased-vops}), alias sets are grouped to avoid severe
- compile-time slow downs and memory consumption. See group_aliases. */
+ [ ADD DOCUMENTATION. ]
+*/
static unsigned int
compute_may_aliases (void)
@@ -679,20 +504,16 @@ compute_may_aliases (void)
memory tags. */
compute_flow_insensitive_aliasing (ai);
- /* Determine if we need to enable alias grouping. */
- if (ai->total_alias_vops >= MAX_ALIASED_VOPS)
- group_aliases (ai);
-
/* Compute call clobbering information. */
compute_call_clobbered (ai);
- /* If the program has too many call-clobbered variables and/or function
- calls, create .GLOBAL_VAR and use it to model call-clobbering
- semantics at call sites. This reduces the number of virtual operands
- considerably, improving compile times at the expense of lost
- aliasing precision. */
+ /* If the program makes no reference to global variables, but it
+ contains a mixture of pure and non-pure functions, then we need
+ to create use-def and def-def links between these functions to
+ avoid invalid transformations on them. */
maybe_create_global_var (ai);
+
/* If the program contains ref-all pointers, finalize may-alias information
for them. This pass needs to be run after call-clobbering information
has been computed. */
@@ -712,7 +533,6 @@ compute_may_aliases (void)
/* Deallocate memory used by aliasing data structures. */
delete_alias_info (ai);
- updating_used_alone = true;
{
block_stmt_iterator bsi;
basic_block bb;
@@ -724,8 +544,7 @@ compute_may_aliases (void)
}
}
}
- recalculate_used_alone ();
- updating_used_alone = false;
+
return 0;
}
@@ -1140,7 +959,7 @@ compute_flow_sensitive_aliasing (struct alias_info *ai)
For every pointer P in AI->POINTERS and addressable variable V in
AI->ADDRESSABLE_VARS, add V to the may-alias sets of P's symbol
memory tag (SMT) if their alias sets conflict. V is then marked as
- an alias tag so that the operand scanner knows that statements
+ an aliased symbol so that the operand scanner knows that statements
containing V have aliased operands. */
static void
@@ -1212,6 +1031,7 @@ compute_flow_insensitive_aliasing (struct alias_info *ai)
|| get_subvars_for_var (var) == NULL);
add_may_alias (tag, var);
+
/* Update the bitmap used to represent TAG's alias set
in case we need to group aliases. */
bitmap_set_bit (p_map->may_aliases, DECL_UID (var));
@@ -1224,8 +1044,6 @@ compute_flow_insensitive_aliasing (struct alias_info *ai)
count as a reference to VAR). */
ai->total_alias_vops += (num_var_refs + num_tag_refs);
p_map->total_alias_vops += (num_var_refs + num_tag_refs);
-
-
}
}
}
@@ -1320,268 +1138,25 @@ finalize_ref_all_pointers (struct alias_info *ai)
{
size_t i;
- if (global_var)
- add_may_alias (ai->ref_all_symbol_mem_tag, global_var);
- else
- {
- /* First add the real call-clobbered variables. */
- for (i = 0; i < ai->num_addressable_vars; i++)
- {
- tree var = ai->addressable_vars[i]->var;
- if (is_call_clobbered (var))
- add_may_alias (ai->ref_all_symbol_mem_tag, var);
- }
-
- /* Then add the call-clobbered pointer memory tags. See
- compute_flow_insensitive_aliasing for the rationale. */
- for (i = 0; i < ai->num_pointers; i++)
- {
- tree ptr = ai->pointers[i]->var, tag;
- if (PTR_IS_REF_ALL (ptr))
- continue;
- tag = var_ann (ptr)->symbol_mem_tag;
- if (is_call_clobbered (tag))
- add_may_alias (ai->ref_all_symbol_mem_tag, tag);
- }
- }
-}
-
-
-/* Comparison function for qsort used in group_aliases. */
-
-static int
-total_alias_vops_cmp (const void *p, const void *q)
-{
- const struct alias_map_d **p1 = (const struct alias_map_d **)p;
- const struct alias_map_d **p2 = (const struct alias_map_d **)q;
- long n1 = (*p1)->total_alias_vops;
- long n2 = (*p2)->total_alias_vops;
-
- /* We want to sort in descending order. */
- return (n1 > n2 ? -1 : (n1 == n2) ? 0 : 1);
-}
-
-/* Group all the aliases for TAG to make TAG represent all the
- variables in its alias set. Update the total number
- of virtual operands due to aliasing (AI->TOTAL_ALIAS_VOPS). This
- function will make TAG be the unique alias tag for all the
- variables in its may-aliases. So, given:
-
- may-aliases(TAG) = { V1, V2, V3 }
-
- This function will group the variables into:
-
- may-aliases(V1) = { TAG }
- may-aliases(V2) = { TAG }
- may-aliases(V2) = { TAG } */
-
-static void
-group_aliases_into (tree tag, bitmap tag_aliases, struct alias_info *ai)
-{
- unsigned int i;
- var_ann_t tag_ann = var_ann (tag);
- size_t num_tag_refs = NUM_REFERENCES (tag_ann);
- bitmap_iterator bi;
-
- EXECUTE_IF_SET_IN_BITMAP (tag_aliases, 0, i, bi)
+ /* First add the real call-clobbered variables. */
+ for (i = 0; i < ai->num_addressable_vars; i++)
{
- tree var = referenced_var (i);
- var_ann_t ann = var_ann (var);
-
- /* Make TAG the unique alias of VAR. */
- ann->is_aliased = 0;
- ann->may_aliases = NULL;
-
- /* Note that VAR and TAG may be the same if the function has no
- addressable variables (see the discussion at the end of
- setup_pointers_and_addressables). */
- if (var != tag)
- add_may_alias (var, tag);
-
- /* Reduce total number of virtual operands contributed
- by TAG on behalf of VAR. Notice that the references to VAR
- itself won't be removed. We will merely replace them with
- references to TAG. */
- ai->total_alias_vops -= num_tag_refs;
+ tree var = ai->addressable_vars[i]->var;
+ if (is_call_clobbered (var))
+ add_may_alias (ai->ref_all_symbol_mem_tag, var);
}
- /* We have reduced the number of virtual operands that TAG makes on
- behalf of all the variables formerly aliased with it. However,
- we have also "removed" all the virtual operands for TAG itself,
- so we add them back. */
- ai->total_alias_vops += num_tag_refs;
-
- /* TAG no longer has any aliases. */
- tag_ann->may_aliases = NULL;
-}
-
-
-/* Group may-aliases sets to reduce the number of virtual operands due
- to aliasing.
-
- 1- Sort the list of pointers in decreasing number of contributed
- virtual operands.
-
- 2- Take the first entry in AI->POINTERS and revert the role of
- the memory tag and its aliases. Usually, whenever an aliased
- variable Vi is found to alias with a memory tag T, we add Vi
- to the may-aliases set for T. Meaning that after alias
- analysis, we will have:
-
- may-aliases(T) = { V1, V2, V3, ..., Vn }
-
- This means that every statement that references T, will get 'n'
- virtual operands for each of the Vi tags. But, when alias
- grouping is enabled, we make T an alias tag and add it to the
- alias set of all the Vi variables:
-
- may-aliases(V1) = { T }
- may-aliases(V2) = { T }
- ...
- may-aliases(Vn) = { T }
-
- This has two effects: (a) statements referencing T will only get
- a single virtual operand, and, (b) all the variables Vi will now
- appear to alias each other. So, we lose alias precision to
- improve compile time. But, in theory, a program with such a high
- level of aliasing should not be very optimizable in the first
- place.
-
- 3- Since variables may be in the alias set of more than one
- memory tag, the grouping done in step (2) needs to be extended
- to all the memory tags that have a non-empty intersection with
- the may-aliases set of tag T. For instance, if we originally
- had these may-aliases sets:
-
- may-aliases(T) = { V1, V2, V3 }
- may-aliases(R) = { V2, V4 }
-
- In step (2) we would have reverted the aliases for T as:
-
- may-aliases(V1) = { T }
- may-aliases(V2) = { T }
- may-aliases(V3) = { T }
-
- But note that now V2 is no longer aliased with R. We could
- add R to may-aliases(V2), but we are in the process of
- grouping aliases to reduce virtual operands so what we do is
- add V4 to the grouping to obtain:
-
- may-aliases(V1) = { T }
- may-aliases(V2) = { T }
- may-aliases(V3) = { T }
- may-aliases(V4) = { T }
-
- 4- If the total number of virtual operands due to aliasing is
- still above the threshold set by max-alias-vops, go back to (2). */
-
-static void
-group_aliases (struct alias_info *ai)
-{
- size_t i;
- tree ptr;
-
- /* Sort the POINTERS array in descending order of contributed
- virtual operands. */
- qsort (ai->pointers, ai->num_pointers, sizeof (struct alias_map_d *),
- total_alias_vops_cmp);
-
- /* For every pointer in AI->POINTERS, reverse the roles of its tag
- and the tag's may-aliases set. */
+ /* Then add the call-clobbered pointer memory tags. See
+ compute_flow_insensitive_aliasing for the rationale. */
for (i = 0; i < ai->num_pointers; i++)
{
- size_t j;
- tree tag1 = var_ann (ai->pointers[i]->var)->symbol_mem_tag;
- bitmap tag1_aliases = ai->pointers[i]->may_aliases;
-
- /* Skip tags that have been grouped already. */
- if (ai->pointers[i]->grouped_p)
+ tree ptr = ai->pointers[i]->var, tag;
+ if (PTR_IS_REF_ALL (ptr))
continue;
-
- /* See if TAG1 had any aliases in common with other symbol tags.
- If we find a TAG2 with common aliases with TAG1, add TAG2's
- aliases into TAG1. */
- for (j = i + 1; j < ai->num_pointers; j++)
- {
- bitmap tag2_aliases = ai->pointers[j]->may_aliases;
-
- if (bitmap_intersect_p (tag1_aliases, tag2_aliases))
- {
- tree tag2 = var_ann (ai->pointers[j]->var)->symbol_mem_tag;
-
- bitmap_ior_into (tag1_aliases, tag2_aliases);
-
- /* TAG2 does not need its aliases anymore. */
- bitmap_clear (tag2_aliases);
- var_ann (tag2)->may_aliases = NULL;
-
- /* TAG1 is the unique alias of TAG2. */
- add_may_alias (tag2, tag1);
-
- ai->pointers[j]->grouped_p = true;
- }
- }
-
- /* Now group all the aliases we collected into TAG1. */
- group_aliases_into (tag1, tag1_aliases, ai);
-
- /* If we've reduced total number of virtual operands below the
- threshold, stop. */
- if (ai->total_alias_vops < MAX_ALIASED_VOPS)
- break;
+ tag = var_ann (ptr)->symbol_mem_tag;
+ if (is_call_clobbered (tag))
+ add_may_alias (ai->ref_all_symbol_mem_tag, tag);
}
-
- /* Finally, all the variables that have been grouped cannot be in
- the may-alias set of name memory tags. Suppose that we have
- grouped the aliases in this code so that may-aliases(a) = SMT.20
-
- p_5 = &a;
- ...
- # a_9 = VDEF <a_8>
- p_5->field = 0
- ... Several modifications to SMT.20 ...
- # VUSE <a_9>
- x_30 = p_5->field
-
- Since p_5 points to 'a', the optimizers will try to propagate 0
- into p_5->field, but that is wrong because there have been
- modifications to 'SMT.20' in between. To prevent this we have to
- replace 'a' with 'SMT.20' in the name tag of p_5. */
- for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++)
- {
- size_t j;
- tree name_tag = SSA_NAME_PTR_INFO (ptr)->name_mem_tag;
- VEC(tree,gc) *aliases;
- tree alias;
-
- if (name_tag == NULL_TREE)
- continue;
-
- aliases = var_ann (name_tag)->may_aliases;
- for (j = 0; VEC_iterate (tree, aliases, j, alias); j++)
- {
- var_ann_t ann = var_ann (alias);
-
- if ((!MTAG_P (alias)
- || TREE_CODE (alias) == STRUCT_FIELD_TAG)
- && ann->may_aliases)
- {
- tree new_alias;
-
- gcc_assert (VEC_length (tree, ann->may_aliases) == 1);
-
- new_alias = VEC_index (tree, ann->may_aliases, 0);
- replace_may_alias (name_tag, j, new_alias);
- }
- }
- }
-
- if (dump_file)
- fprintf (dump_file,
- "%s: Total number of aliased vops after grouping: %ld%s\n",
- get_name (current_function_decl),
- ai->total_alias_vops,
- (ai->total_alias_vops < 0) ? " (negative values are OK)" : "");
}
@@ -1778,32 +1353,11 @@ setup_pointers_and_addressables (struct alias_info *ai)
}
-/* Determine whether to use .GLOBAL_VAR to model call clobbering semantics. At
- every call site, we need to emit VDEF expressions to represent the
- clobbering effects of the call for variables whose address escapes the
- current function.
-
- One approach is to group all call-clobbered variables into a single
- representative that is used as an alias of every call-clobbered variable
- (.GLOBAL_VAR). This works well, but it ties the optimizer hands because
- references to any call clobbered variable is a reference to .GLOBAL_VAR.
-
- The second approach is to emit a clobbering VDEF for every
- call-clobbered variable at call sites. This is the preferred way
- in terms of optimization opportunities but it may create too many
- VDEF operands if there are many call clobbered variables and
- function calls in the function.
-
- To decide whether or not to use .GLOBAL_VAR we multiply the number of
- function calls found by the number of call-clobbered variables. If that
- product is beyond a certain threshold, as determined by the parameterized
- values shown below, we use .GLOBAL_VAR.
-
- FIXME. This heuristic should be improved. One idea is to use several
- .GLOBAL_VARs of different types instead of a single one. The thresholds
- have been derived from a typical bootstrap cycle, including all target
- libraries. Compile times were found increase by ~1% compared to using
- .GLOBAL_VAR. */
+/* Determine whether to use .GLOBAL_VAR to model call clobbering
+ semantics. If the function makes no references to global
+ variables and contains at least one call to a non-pure function,
+ then we need to mark the side-effects of the call using .GLOBAL_VAR
+ to represent all possible global memory referenced by the callee. */
static void
maybe_create_global_var (struct alias_info *ai)
@@ -1821,11 +1375,7 @@ maybe_create_global_var (struct alias_info *ai)
n_clobbered++;
}
- /* If the number of virtual operands that would be needed to
- model all the call-clobbered variables is larger than
- GLOBAL_VAR_THRESHOLD, create .GLOBAL_VAR.
-
- Also create .GLOBAL_VAR if there are no call-clobbered
+ /* Create .GLOBAL_VAR if there are no call-clobbered
variables and the program contains a mixture of pure/const
and regular function calls. This is to avoid the problem
described in PR 20115:
@@ -1848,32 +1398,12 @@ maybe_create_global_var (struct alias_info *ai)
So, if we have some pure/const and some regular calls in the
program we create .GLOBAL_VAR to avoid missing these
relations. */
- if (ai->num_calls_found * n_clobbered >= (size_t) GLOBAL_VAR_THRESHOLD
- || (n_clobbered == 0
- && ai->num_calls_found > 0
- && ai->num_pure_const_calls_found > 0
- && ai->num_calls_found > ai->num_pure_const_calls_found))
+ if (n_clobbered == 0
+ && ai->num_calls_found > 0
+ && ai->num_pure_const_calls_found > 0
+ && ai->num_calls_found > ai->num_pure_const_calls_found)
create_global_var ();
}
-
- /* Mark all call-clobbered symbols for renaming. Since the initial
- rewrite into SSA ignored all call sites, we may need to rename
- .GLOBAL_VAR and the call-clobbered variables. */
- EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
- {
- tree var = referenced_var (i);
-
- /* If the function has calls to clobbering functions and
- .GLOBAL_VAR has been created, make it an alias for all
- call-clobbered variables. */
- if (global_var && var != global_var)
- {
- add_may_alias (var, global_var);
- gcc_assert (!get_subvars_for_var (var));
- }
-
- mark_sym_for_renaming (var);
- }
}
@@ -1946,15 +1476,14 @@ may_alias_p (tree ptr, HOST_WIDE_INT mem_alias_set,
return false;
}
- /* If var is a record or union type, ptr cannot point into var
- unless there is some operation explicit address operation in the
- program that can reference a field of the ptr's dereferenced
- type. This also assumes that the types of both var and ptr are
+ /* If VAR is a record or union type, PTR cannot point into VAR
+ unless there is some explicit address operation in the
+ program that can reference a field of the type pointed-to by PTR.
+ This also assumes that the types of both VAR and PTR are
contained within the compilation unit, and that there is no fancy
addressing arithmetic associated with any of the types
involved. */
-
- if ((mem_alias_set != 0) && (var_alias_set != 0))
+ if (mem_alias_set != 0 && var_alias_set != 0)
{
tree ptr_type = TREE_TYPE (ptr);
tree var_type = TREE_TYPE (var);
@@ -1966,13 +1495,13 @@ may_alias_p (tree ptr, HOST_WIDE_INT mem_alias_set,
{
int ptr_star_count = 0;
- /* Ipa_type_escape_star_count_of_interesting_type is a little to
- restrictive for the pointer type, need to allow pointers to
- primitive types as long as those types cannot be pointers
- to everything. */
+ /* ipa_type_escape_star_count_of_interesting_type is a
+ little too restrictive for the pointer type, need to
+ allow pointers to primitive types as long as those types
+ cannot be pointers to everything. */
while (POINTER_TYPE_P (ptr_type))
- /* Strip the *'s off. */
{
+ /* Strip the *s off. */
ptr_type = TREE_TYPE (ptr_type);
ptr_star_count++;
}
@@ -1980,7 +1509,6 @@ may_alias_p (tree ptr, HOST_WIDE_INT mem_alias_set,
/* There does not appear to be a better test to see if the
pointer type was one of the pointer to everything
types. */
-
if (ptr_star_count > 0)
{
alias_stats.structnoaddress_queries++;
@@ -1994,7 +1522,7 @@ may_alias_p (tree ptr, HOST_WIDE_INT mem_alias_set,
}
else if (ptr_star_count == 0)
{
- /* If ptr_type was not really a pointer to type, it cannot
+ /* If PTR_TYPE was not really a pointer to type, it cannot
alias. */
alias_stats.structnoaddress_queries++;
alias_stats.structnoaddress_resolved++;
@@ -2029,6 +1557,10 @@ add_may_alias (tree var, tree alias)
gcc_assert (may_be_aliased (alias));
#endif
+ /* VAR must be a symbol or a name tag. */
+ gcc_assert (TREE_CODE (var) == SYMBOL_MEMORY_TAG
+ || TREE_CODE (var) == NAME_MEMORY_TAG);
+
if (v_ann->may_aliases == NULL)
v_ann->may_aliases = VEC_alloc (tree, gc, 2);
@@ -2042,16 +1574,6 @@ add_may_alias (tree var, tree alias)
}
-/* Replace alias I in the alias sets of VAR with NEW_ALIAS. */
-
-static void
-replace_may_alias (tree var, size_t i, tree new_alias)
-{
- var_ann_t v_ann = var_ann (var);
- VEC_replace (tree, v_ann->may_aliases, i, new_alias);
-}
-
-
/* Mark pointer PTR as pointing to an arbitrary memory location. */
static void
@@ -2614,6 +2136,7 @@ debug_may_aliases_for (tree var)
dump_may_aliases_for (stderr, var);
}
+
/* Return true if VAR may be aliased. */
bool
@@ -2625,26 +2148,22 @@ may_be_aliased (tree var)
/* Globally visible variables can have their addresses taken by other
translation units. */
-
- if (MTAG_P (var)
- && (MTAG_GLOBAL (var) || TREE_PUBLIC (var)))
- return true;
- else if (!MTAG_P (var)
- && (DECL_EXTERNAL (var) || TREE_PUBLIC (var)))
+ if (is_global_var (var))
return true;
- /* Automatic variables can't have their addresses escape any other way.
- This must be after the check for global variables, as extern declarations
- do not have TREE_STATIC set. */
+ /* Automatic variables can't have their addresses escape any other
+ way. This must be after the check for global variables, as
+ extern declarations do not have TREE_STATIC set. */
if (!TREE_STATIC (var))
return false;
- /* If we're in unit-at-a-time mode, then we must have seen all occurrences
- of address-of operators, and so we can trust TREE_ADDRESSABLE. Otherwise
- we can only be sure the variable isn't addressable if it's local to the
- current function. */
+ /* If we're in unit-at-a-time mode, then we must have seen all
+ occurrences of address-of operators, and so we can trust
+ TREE_ADDRESSABLE. Otherwise we can only be sure the variable
+ isn't addressable if it's local to the current function. */
if (flag_unit_at_a_time)
return false;
+
if (decl_function_context (var) == current_function_decl)
return false;
@@ -2653,6 +2172,7 @@ may_be_aliased (tree var)
/* Given two symbols return TRUE if one is in the alias set of the other. */
+
bool
is_aliased_with (tree tag, tree sym)
{
diff --git a/gcc/tree-ssa-ccp.c b/gcc/tree-ssa-ccp.c
index a49b3cee028..95434897235 100644
--- a/gcc/tree-ssa-ccp.c
+++ b/gcc/tree-ssa-ccp.c
@@ -1418,11 +1418,15 @@ struct tree_opt_pass pass_ccp =
TV_TREE_CCP, /* tv_id */
PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
0, /* properties_provided */
- PROP_smt_usage, /* properties_destroyed */
+ 0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_cleanup_cfg | TODO_dump_func | TODO_update_ssa
- | TODO_ggc_collect | TODO_verify_ssa
- | TODO_verify_stmts | TODO_update_smt_usage, /* todo_flags_finish */
+ TODO_cleanup_cfg
+ | TODO_dump_func
+ | TODO_update_ssa
+ | TODO_ggc_collect
+ | TODO_verify_ssa
+ | TODO_verify_stmts
+ | TODO_update_smt_usage, /* todo_flags_finish */
0 /* letter */
};
@@ -1456,12 +1460,15 @@ struct tree_opt_pass pass_store_ccp =
TV_TREE_STORE_CCP, /* tv_id */
PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
0, /* properties_provided */
- PROP_smt_usage, /* properties_destroyed */
+ 0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func | TODO_update_ssa
- | TODO_ggc_collect | TODO_verify_ssa
+ TODO_dump_func
+ | TODO_update_ssa
+ | TODO_ggc_collect
+ | TODO_verify_ssa
| TODO_cleanup_cfg
- | TODO_verify_stmts | TODO_update_smt_usage, /* todo_flags_finish */
+ | TODO_verify_stmts
+ | TODO_update_smt_usage, /* todo_flags_finish */
0 /* letter */
};
@@ -2474,7 +2481,7 @@ convert_to_gimple_builtin (block_stmt_iterator *si_p, tree expr)
tree new_stmt = tsi_stmt (ti);
find_new_referenced_vars (tsi_stmt_ptr (ti));
bsi_insert_before (si_p, new_stmt, BSI_NEW_STMT);
- mark_new_vars_to_rename (bsi_stmt (*si_p));
+ mark_symbols_for_renaming (new_stmt);
bsi_next (si_p);
}
@@ -2536,17 +2543,20 @@ execute_fold_all_builtins (void)
print_generic_stmt (dump_file, *stmtp, dump_flags);
}
+ push_stmt_changes (stmtp);
+
if (!set_rhs (stmtp, result))
{
result = convert_to_gimple_builtin (&i, result);
if (result)
{
bool ok = set_rhs (stmtp, result);
-
gcc_assert (ok);
}
}
- mark_new_vars_to_rename (*stmtp);
+
+ pop_stmt_changes (stmtp);
+
if (maybe_clean_or_replace_eh_stmt (old_stmt, *stmtp)
&& tree_purge_dead_eh_edges (bb))
cfg_changed = true;
diff --git a/gcc/tree-ssa-copy.c b/gcc/tree-ssa-copy.c
index 86d38cbddfd..c415e6e17ae 100644
--- a/gcc/tree-ssa-copy.c
+++ b/gcc/tree-ssa-copy.c
@@ -63,6 +63,16 @@ may_propagate_copy (tree dest, tree orig)
tree type_d = TREE_TYPE (dest);
tree type_o = TREE_TYPE (orig);
+ /* Handle copies between .MEM and memory symbols first. They are
+ always OK, even though they may not be of compatible types. Note
+ that we alway treat .MEM outside the usual symbol rules. It
+ represents all of memory, and as such is compatible with any
+ symbol that needs to reside in memory. */
+ if ((TREE_CODE (dest) == SSA_NAME && SSA_NAME_VAR (dest) == mem_var))
+ return TREE_CODE (orig) == SSA_NAME && !is_gimple_reg (orig);
+ else if (TREE_CODE (orig) == SSA_NAME && SSA_NAME_VAR (orig) == mem_var)
+ return TREE_CODE (dest) == SSA_NAME && !is_gimple_reg (dest);
+
/* Do not copy between types for which we *do* need a conversion. */
if (!tree_ssa_useless_type_conversion_1 (type_d, type_o))
return false;
@@ -188,6 +198,18 @@ merge_alias_info (tree orig, tree new)
var_ann_t new_ann = var_ann (new_sym);
var_ann_t orig_ann = var_ann (orig_sym);
+ /* No merging necessary when .MEM is involved. */
+ if (new_sym == mem_var)
+ {
+ gcc_assert (!is_gimple_reg (orig_sym));
+ return;
+ }
+ else if (orig_sym == mem_var)
+ {
+ gcc_assert (!is_gimple_reg (new_sym));
+ return;
+ }
+
gcc_assert (POINTER_TYPE_P (TREE_TYPE (orig)));
gcc_assert (POINTER_TYPE_P (TREE_TYPE (new)));
diff --git a/gcc/tree-ssa-dce.c b/gcc/tree-ssa-dce.c
index 07ab2096d44..04ae8e4fbb3 100644
--- a/gcc/tree-ssa-dce.c
+++ b/gcc/tree-ssa-dce.c
@@ -550,7 +550,7 @@ propagate_necessity (struct edge_list *el)
}
}
}
-
+
/* Eliminate unnecessary statements. Any instruction not marked as necessary
contributes nothing to the program, and can be deleted. */
@@ -637,9 +637,6 @@ static void
remove_dead_stmt (block_stmt_iterator *i, basic_block bb)
{
tree t = bsi_stmt (*i);
- def_operand_p def_p;
-
- ssa_op_iter iter;
if (dump_file && (dump_flags & TDF_DETAILS))
{
@@ -714,15 +711,11 @@ remove_dead_stmt (block_stmt_iterator *i, basic_block bb)
}
}
- FOR_EACH_SSA_DEF_OPERAND (def_p, t, iter, SSA_OP_VIRTUAL_DEFS)
- {
- tree def = DEF_FROM_PTR (def_p);
- mark_sym_for_renaming (SSA_NAME_VAR (def));
- }
bsi_remove (i, true);
release_defs (t);
}
-
+
+
/* Print out removed statement statistics. */
static void
diff --git a/gcc/tree-ssa-dom.c b/gcc/tree-ssa-dom.c
index 6f1c39ce6ba..7a4a97d8e3b 100644
--- a/gcc/tree-ssa-dom.c
+++ b/gcc/tree-ssa-dom.c
@@ -101,7 +101,11 @@ static VEC(tree,heap) *avail_exprs_stack;
expressions are removed from AVAIL_EXPRS. Else we may change the
hash code for an expression and be unable to find/remove it from
AVAIL_EXPRS. */
-static VEC(tree,heap) *stmts_to_rescan;
+typedef tree *tree_p;
+DEF_VEC_P(tree_p);
+DEF_VEC_ALLOC_P(tree_p,heap);
+
+static VEC(tree_p,heap) *stmts_to_rescan;
/* Structure for entries in the expression hash table.
@@ -248,7 +252,7 @@ tree_ssa_dominator_optimize (void)
avail_exprs = htab_create (1024, real_avail_expr_hash, avail_expr_eq, free);
avail_exprs_stack = VEC_alloc (tree, heap, 20);
const_and_copies_stack = VEC_alloc (tree, heap, 20);
- stmts_to_rescan = VEC_alloc (tree, heap, 20);
+ stmts_to_rescan = VEC_alloc (tree_p, heap, 20);
need_eh_cleanup = BITMAP_ALLOC (NULL);
/* Setup callbacks for the generic dominator tree walker. */
@@ -357,7 +361,7 @@ tree_ssa_dominator_optimize (void)
VEC_free (tree, heap, avail_exprs_stack);
VEC_free (tree, heap, const_and_copies_stack);
- VEC_free (tree, heap, stmts_to_rescan);
+ VEC_free (tree_p, heap, stmts_to_rescan);
return 0;
}
@@ -378,7 +382,7 @@ struct tree_opt_pass pass_dominator =
TV_TREE_SSA_DOMINATOR_OPTS, /* tv_id */
PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
0, /* properties_provided */
- PROP_smt_usage, /* properties_destroyed */
+ 0, /* properties_destroyed */
0, /* todo_flags_start */
TODO_dump_func
| TODO_update_ssa
@@ -699,16 +703,17 @@ dom_opt_finalize_block (struct dom_walk_data *walk_data, basic_block bb)
/* If we queued any statements to rescan in this block, then
go ahead and rescan them now. */
- while (VEC_length (tree, stmts_to_rescan) > 0)
+ while (VEC_length (tree_p, stmts_to_rescan) > 0)
{
- tree stmt = VEC_last (tree, stmts_to_rescan);
+ tree *stmt_p = VEC_last (tree_p, stmts_to_rescan);
+ tree stmt = *stmt_p;
basic_block stmt_bb = bb_for_stmt (stmt);
if (stmt_bb != bb)
break;
- VEC_pop (tree, stmts_to_rescan);
- mark_new_vars_to_rename (stmt);
+ VEC_pop (tree_p, stmts_to_rescan);
+ pop_stmt_changes (stmt_p);
}
}
@@ -1530,9 +1535,7 @@ eliminate_redundant_computations (tree stmt)
Detect and record those equivalences. */
static void
-record_equivalences_from_stmt (tree stmt,
- int may_optimize_p,
- stmt_ann_t ann)
+record_equivalences_from_stmt (tree stmt, int may_optimize_p, stmt_ann_t ann)
{
tree lhs = TREE_OPERAND (stmt, 0);
enum tree_code lhs_code = TREE_CODE (lhs);
@@ -1561,6 +1564,7 @@ record_equivalences_from_stmt (tree stmt,
vops and recording the result in the available expression table,
we may be able to expose more redundant loads. */
if (!ann->has_volatile_ops
+ && stmt_references_memory_p (stmt)
&& (TREE_CODE (TREE_OPERAND (stmt, 1)) == SSA_NAME
|| is_gimple_min_invariant (TREE_OPERAND (stmt, 1)))
&& !is_gimple_reg (lhs))
@@ -1766,6 +1770,7 @@ optimize_stmt (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
ann = stmt_ann (stmt);
opt_stats.num_stmts++;
may_have_exposed_new_symbols = false;
+ push_stmt_changes (bsi_stmt_ptr (si));
if (dump_file && (dump_flags & TDF_DETAILS))
{
@@ -1826,9 +1831,7 @@ optimize_stmt (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
/* Record any additional equivalences created by this statement. */
if (TREE_CODE (stmt) == MODIFY_EXPR)
- record_equivalences_from_stmt (stmt,
- may_optimize_p,
- ann);
+ record_equivalences_from_stmt (stmt, may_optimize_p, ann);
/* If STMT is a COND_EXPR and it was modified, then we may know
where it goes. If that is the case, then mark the CFG as altered.
@@ -1855,7 +1858,6 @@ optimize_stmt (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
Ultimately I suspect we're going to need to change the interface
into the SSA_NAME manager. */
-
if (ann->modified)
{
tree val = NULL;
@@ -1879,7 +1881,20 @@ optimize_stmt (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED,
}
if (may_have_exposed_new_symbols)
- VEC_safe_push (tree, heap, stmts_to_rescan, bsi_stmt (si));
+ {
+ /* Queue the statement to be re-scanned after all the
+ AVAIL_EXPRS have been processed. The change buffer stack for
+ all the pushed statements will be processed when this queue
+ is emptied. */
+ VEC_safe_push (tree_p, heap, stmts_to_rescan, bsi_stmt_ptr (si));
+ }
+ else
+ {
+ /* Otherwise, just pop the recently pushed change buffer. If
+ not, the STMTS_TO_RESCAN queue will get out of synch with the
+ change buffer stack. */
+ pop_stmt_changes (bsi_stmt_ptr (si));
+ }
}
/* Search for an existing instance of STMT in the AVAIL_EXPRS table. If
@@ -2154,6 +2169,8 @@ propagate_rhs_into_lhs (tree stmt, tree lhs, tree rhs, bitmap interesting_names)
fprintf (dump_file, "\n");
}
+ push_stmt_changes (&use_stmt);
+
/* Propagate the RHS into this use of the LHS. */
FOR_EACH_IMM_USE_ON_STMT (use_p, iter)
propagate_value (use_p, rhs);
@@ -2188,6 +2205,8 @@ propagate_rhs_into_lhs (tree stmt, tree lhs, tree rhs, bitmap interesting_names)
tree result = get_lhs_or_phi_result (use_stmt);
bitmap_set_bit (interesting_names, SSA_NAME_VERSION (result));
}
+
+ pop_stmt_changes (&use_stmt);
continue;
}
@@ -2196,11 +2215,7 @@ propagate_rhs_into_lhs (tree stmt, tree lhs, tree rhs, bitmap interesting_names)
we may expose new operands, expose dead EH edges,
etc. */
fold_stmt_inplace (use_stmt);
-
- /* Sometimes propagation can expose new operands to the
- renamer. Note this will call update_stmt at the
- appropriate time. */
- mark_new_vars_to_rename (use_stmt);
+ pop_stmt_changes (&use_stmt);
/* Dump details. */
if (dump_file && (dump_flags & TDF_DETAILS))
@@ -2488,7 +2503,7 @@ struct tree_opt_pass pass_phi_only_cprop =
TV_TREE_PHI_CPROP, /* tv_id */
PROP_cfg | PROP_ssa | PROP_alias, /* properties_required */
0, /* properties_provided */
- PROP_smt_usage, /* properties_destroyed */
+ 0, /* properties_destroyed */
0, /* todo_flags_start */
TODO_cleanup_cfg | TODO_dump_func
| TODO_ggc_collect | TODO_verify_ssa
diff --git a/gcc/tree-ssa-dse.c b/gcc/tree-ssa-dse.c
index 717f20f5265..b55ec723557 100644
--- a/gcc/tree-ssa-dse.c
+++ b/gcc/tree-ssa-dse.c
@@ -795,7 +795,8 @@ tree_ssa_dse (void)
static bool
gate_dse (void)
{
- return flag_tree_dse != 0;
+ return false;
+ /*return flag_tree_dse != 0;*/
}
struct tree_opt_pass pass_dse = {
diff --git a/gcc/tree-ssa-forwprop.c b/gcc/tree-ssa-forwprop.c
index e2c5ed48e70..a229b263d0d 100644
--- a/gcc/tree-ssa-forwprop.c
+++ b/gcc/tree-ssa-forwprop.c
@@ -585,8 +585,6 @@ tidy_after_forward_propagate_addr (tree stmt)
if (TREE_CODE (TREE_OPERAND (stmt, 1)) == ADDR_EXPR)
recompute_tree_invariant_for_addr_expr (TREE_OPERAND (stmt, 1));
-
- mark_new_vars_to_rename (stmt);
}
/* STMT defines LHS which is contains the address of the 0th element
@@ -852,9 +850,13 @@ forward_propagate_addr_expr (tree stmt, bool *some)
continue;
}
+ push_stmt_changes (&use_stmt);
+
result = forward_propagate_addr_expr_1 (stmt, use_stmt, some);
*some |= result;
all &= result;
+
+ pop_stmt_changes (&use_stmt);
}
return all;
@@ -1042,10 +1044,11 @@ struct tree_opt_pass pass_forwprop = {
PROP_cfg | PROP_ssa
| PROP_alias, /* properties_required */
0, /* properties_provided */
- PROP_smt_usage, /* properties_destroyed */
+ 0, /* properties_destroyed */
0, /* todo_flags_start */
- TODO_dump_func /* todo_flags_finish */
+ TODO_dump_func
| TODO_ggc_collect
- | TODO_update_ssa | TODO_verify_ssa,
- 0 /* letter */
+ | TODO_update_ssa
+ | TODO_verify_ssa, /* todo_flags_finish */
+ 0 /* letter */
};
diff --git a/gcc/tree-ssa-loop-im.c b/gcc/tree-ssa-loop-im.c
index f974b60b536..56a8019f29d 100644
--- a/gcc/tree-ssa-loop-im.c
+++ b/gcc/tree-ssa-loop-im.c
@@ -916,17 +916,22 @@ free_mem_ref_locs (struct mem_ref_loc *mem_refs)
static void
rewrite_mem_refs (tree tmp_var, struct mem_ref_loc *mem_refs)
{
- tree var;
- ssa_op_iter iter;
+ bitmap loads = BITMAP_ALLOC (NULL);
+ bitmap stores = BITMAP_ALLOC (NULL);
for (; mem_refs; mem_refs = mem_refs->next)
{
- FOR_EACH_SSA_TREE_OPERAND (var, mem_refs->stmt, iter, SSA_OP_ALL_VIRTUALS)
- mark_sym_for_renaming (SSA_NAME_VAR (var));
-
+ bitmap_clear (loads);
+ bitmap_clear (stores);
+ get_loads_and_stores (mem_refs->stmt, loads, stores);
+ mark_set_for_renaming (stores);
+ mark_set_for_renaming (loads);
*mem_refs->ref = tmp_var;
update_stmt (mem_refs->stmt);
}
+
+ BITMAP_FREE (loads);
+ BITMAP_FREE (stores);
}
/* The name and the length of the currently generated variable
@@ -1211,13 +1216,15 @@ gather_mem_refs_stmt (struct loop *loop, htab_t mem_refs,
hashval_t hash;
PTR *slot;
struct mem_ref *ref = NULL;
- ssa_op_iter oi;
- tree vname;
bool is_stored;
+ bitmap loads, stores;
if (ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
return;
+ loads = BITMAP_ALLOC (NULL);
+ stores = BITMAP_ALLOC (NULL);
+
/* Recognize MEM = (SSA_NAME | invariant) and SSA_NAME = MEM patterns. */
if (TREE_CODE (stmt) != MODIFY_EXPR)
goto fail;
@@ -1270,14 +1277,20 @@ gather_mem_refs_stmt (struct loop *loop, htab_t mem_refs,
}
ref->is_stored |= is_stored;
- FOR_EACH_SSA_TREE_OPERAND (vname, stmt, oi, SSA_OP_VIRTUAL_USES)
- bitmap_set_bit (ref->vops, DECL_UID (SSA_NAME_VAR (vname)));
+ get_loads_and_stores (stmt, loads, stores);
+ bitmap_ior_into (ref->vops, loads);
+ bitmap_ior_into (ref->vops, stores);
record_mem_ref_loc (&ref->locs, stmt, mem);
+ BITMAP_FREE (loads);
+ BITMAP_FREE (stores);
return;
fail:
- FOR_EACH_SSA_TREE_OPERAND (vname, stmt, oi, SSA_OP_VIRTUAL_USES)
- bitmap_set_bit (clobbered_vops, DECL_UID (SSA_NAME_VAR (vname)));
+ get_loads_and_stores (stmt, loads, stores);
+ bitmap_ior_into (clobbered_vops, loads);
+ bitmap_ior_into (clobbered_vops, stores);
+ BITMAP_FREE (loads);
+ BITMAP_FREE (stores);
}
/* Gathers memory references in LOOP. Notes vops accessed through unrecognized
diff --git a/gcc/tree-ssa-loop-ivopts.c b/gcc/tree-ssa-loop-ivopts.c
index 2bb2f0621b7..c54a476aa64 100644
--- a/gcc/tree-ssa-loop-ivopts.c
+++ b/gcc/tree-ssa-loop-ivopts.c
@@ -5572,9 +5572,10 @@ compute_phi_arg_on_exit (edge exit, tree stmts, tree op)
/* Rewrites USE using candidate CAND. */
static void
-rewrite_use (struct ivopts_data *data,
- struct iv_use *use, struct iv_cand *cand)
+rewrite_use (struct ivopts_data *data, struct iv_use *use, struct iv_cand *cand)
{
+ push_stmt_changes (&use->stmt);
+
switch (use->type)
{
case USE_NONLINEAR_EXPR:
@@ -5592,7 +5593,8 @@ rewrite_use (struct ivopts_data *data,
default:
gcc_unreachable ();
}
- mark_new_vars_to_rename (use->stmt);
+
+ pop_stmt_changes (&use->stmt);
}
/* Rewrite the uses using the selected induction variables. */
diff --git a/gcc/tree-ssa-loop-prefetch.c b/gcc/tree-ssa-loop-prefetch.c
index b58dbf6fccd..15d3b8e3de6 100644
--- a/gcc/tree-ssa-loop-prefetch.c
+++ b/gcc/tree-ssa-loop-prefetch.c
@@ -46,6 +46,9 @@ Software Foundation, 59 Temple Place - Suite 330, Boston, MA
#include "params.h"
#include "langhooks.h"
+#undef SSA_NAME_VAR
+#define SSA_NAME_VAR(NODE) __extension__ ({ extern tree mem_var; const tree __t = SSA_NAME_CHECK (NODE)->ssa_name.var; gcc_assert (__t != mem_var); __t; })
+
/* This pass inserts prefetch instructions to optimize cache usage during
accesses to arrays in loops. It processes loops sequentially and:
diff --git a/gcc/tree-ssa-operands.c b/gcc/tree-ssa-operands.c
index f26e5f54cdf..1ec274b5518 100644
--- a/gcc/tree-ssa-operands.c
+++ b/gcc/tree-ssa-operands.c
@@ -75,14 +75,53 @@ Boston, MA 02110-1301, USA. */
vector for VUSE, then the new vector will also be modified such that
it contains 'a_5' rather than 'a'. */
+
+/* Structure storing statistics on how many call clobbers we have, and
+ how many where avoided. */
+
+static struct
+{
+ /* Number of call-clobbered ops we attempt to add to calls in
+ add_call_clobbered_mem_symbols. */
+ unsigned int clobbered_vars;
+
+ /* Number of write-clobbers (VDEFs) avoided by using
+ not_written information. */
+ unsigned int static_write_clobbers_avoided;
+
+ /* Number of reads (VUSEs) avoided by using not_read information. */
+ unsigned int static_read_clobbers_avoided;
+
+ /* Number of write-clobbers avoided because the variable can't escape to
+ this call. */
+ unsigned int unescapable_clobbers_avoided;
+
+ /* Number of read-only uses we attempt to add to calls in
+ add_call_read_mem_symbols. */
+ unsigned int readonly_clobbers;
+
+ /* Number of read-only uses we avoid using not_read information. */
+ unsigned int static_readonly_clobbers_avoided;
+} clobber_stats;
+
+
+/* MEM is an artificial symbol representing memory references. Any
+ symbol for which for which is_gimple_reg returns false is
+ considered "memory". SSA renaming will build factored use-def and
+ def-def chains over MEM, joining all versions that access
+ conflicting regions of memory (be it due to aliasing, or
+ overlapping references to the same aggregate object, or side-effect
+ references to call-clobbered symbols at call-sites). */
+tree mem_var;
+
/* Flags to describe operand properties in helpers. */
/* By default, operands are loaded. */
-#define opf_none 0
+#define opf_use 0
/* Operand is the target of an assignment expression or a
call-clobbered variable. */
-#define opf_is_def (1 << 0)
+#define opf_def (1 << 0)
/* No virtual operands should be created in the expression. This is used
when traversing ADDR_EXPR nodes which have different semantics than
@@ -92,10 +131,10 @@ Boston, MA 02110-1301, USA. */
VUSE for 'b'. */
#define opf_no_vops (1 << 2)
-/* Operand is a "non-specific" kill for call-clobbers and such. This
- is used to distinguish "reset the world" events from explicit
- MODIFY_EXPRs. */
-#define opf_non_specific (1 << 3)
+/* Operand is an implicit reference. This is used to distinguish
+ explicit assignments in the form of MODIFY_EXPRs from clobbering
+ sites like function calls or ASM_EXPRs. */
+#define opf_implicit (1 << 3)
/* Array for building all the def operands. */
static VEC(tree,heap) *build_defs;
@@ -122,6 +161,59 @@ static use_optype_p free_uses = NULL;
static vuse_optype_p free_vuses = NULL;
static vdef_optype_p free_vdefs = NULL;
+/* Sets of symbols loaded and stored by a statement. These sets are
+ only used when the operand scanner is called via get_loads_and_stores. */
+static bitmap loaded_syms = NULL;
+static bitmap stored_syms = NULL;
+
+/* Statement change buffer. Data structure used to record state
+ information for statements. This is used to determine what needs
+ to be done in order to update the SSA web after a statement is
+ modified by a pass. If STMT is a statement that has just been
+ created, or needs to be folded via fold_stmt, or anything that
+ changes its physical structure then the pass should:
+
+ 1- Call push_stmt_changes (&stmt) to record the current state of
+ STMT before any modifications are made.
+
+ 2- Make all appropriate modifications to the statement.
+
+ 3- Call pop_stmt_changes (&stmt) to find new symbols that
+ need to be put in SSA form, SSA name mappings for names that
+ have disappeared, recompute invariantness for address
+ expressions, cleanup EH information, etc. */
+
+struct scb_d
+{
+ /* Pointer to the statement being modified. */
+ tree *stmt_p;
+
+ /* If the statement references memory these are the sets of symbols
+ loaded and stored by the statement. */
+ bitmap loads;
+ bitmap stores;
+};
+
+typedef struct scb_d *scb_t;
+DEF_VEC_P(scb_t);
+DEF_VEC_ALLOC_P(scb_t,heap);
+
+/* Stack of statement change buffers (SCB). Every call to
+ push_stmt_changes pushes a new buffer onto the stack. Calls to
+ pop_stmt_changes pop a buffer off of the stack and compute the set
+ of changes for the popped statement. */
+static VEC(scb_t,heap) *scb_stack;
+
+
+/* Return true if the parser should just gather symbols loaded/stored
+ by the statement instead of scanning for operands. */
+
+static inline bool
+gathering_loads_stores (void)
+{
+ return loaded_syms || stored_syms;
+}
+
/* Return the DECL_UID of the base variable of T. */
@@ -195,35 +287,6 @@ ssa_operands_active (void)
}
-/* Structure storing statistics on how many call clobbers we have, and
- how many where avoided. */
-
-static struct
-{
- /* Number of call-clobbered ops we attempt to add to calls in
- add_call_clobber_ops. */
- unsigned int clobbered_vars;
-
- /* Number of write-clobbers (VDEFs) avoided by using not_written
- information. */
- unsigned int static_write_clobbers_avoided;
-
- /* Number of reads (VUSEs) avoided by using not_read information. */
- unsigned int static_read_clobbers_avoided;
-
- /* Number of write-clobbers avoided because the variable can't escape to
- this call. */
- unsigned int unescapable_clobbers_avoided;
-
- /* Number of read-only uses we attempt to add to calls in
- add_call_read_ops. */
- unsigned int readonly_clobbers;
-
- /* Number of read-only uses we avoid using not_read information. */
- unsigned int static_readonly_clobbers_avoided;
-} clobber_stats;
-
-
/* Initialize the operand cache routines. */
void
@@ -233,11 +296,11 @@ init_ssa_operands (void)
build_uses = VEC_alloc (tree, heap, 10);
build_vuses = VEC_alloc (tree, heap, 25);
build_vdefs = VEC_alloc (tree, heap, 25);
+ scb_stack = VEC_alloc (scb_t, heap, 20);
gcc_assert (operand_memory == NULL);
operand_memory_index = SSA_OPERAND_MEMORY_SIZE;
ops_active = true;
- memset (&clobber_stats, 0, sizeof (clobber_stats));
}
@@ -255,6 +318,12 @@ fini_ssa_operands (void)
free_uses = NULL;
free_vuses = NULL;
free_vdefs = NULL;
+
+ /* The change buffer stack had better be empty. */
+ gcc_assert (VEC_length (scb_t, scb_stack) == 0);
+ VEC_free (scb_t, heap, scb_stack);
+ scb_stack = NULL;
+
while ((ptr = operand_memory) != NULL)
{
operand_memory = operand_memory->next;
@@ -262,22 +331,18 @@ fini_ssa_operands (void)
}
ops_active = false;
-
- if (dump_file && (dump_flags & TDF_STATS))
- {
- fprintf (dump_file, "Original clobbered vars:%d\n",
- clobber_stats.clobbered_vars);
- fprintf (dump_file, "Static write clobbers avoided:%d\n",
- clobber_stats.static_write_clobbers_avoided);
- fprintf (dump_file, "Static read clobbers avoided:%d\n",
- clobber_stats.static_read_clobbers_avoided);
- fprintf (dump_file, "Unescapable clobbers avoided:%d\n",
- clobber_stats.unescapable_clobbers_avoided);
- fprintf (dump_file, "Original read-only clobbers:%d\n",
- clobber_stats.readonly_clobbers);
- fprintf (dump_file, "Static read-only clobbers avoided:%d\n",
- clobber_stats.static_readonly_clobbers_avoided);
- }
+}
+
+
+/* Reset the operand cache arrays. */
+
+static void
+truncate_ssa_stmt_operands (void)
+{
+ VEC_truncate (tree, build_defs, 0);
+ VEC_truncate (tree, build_uses, 0);
+ VEC_truncate (tree, build_vdefs, 0);
+ VEC_truncate (tree, build_vuses, 0);
}
@@ -287,6 +352,9 @@ static inline void *
ssa_operand_alloc (unsigned size)
{
char *ptr;
+
+ gcc_assert (size <= SSA_OPERAND_MEMORY_SIZE);
+
if (operand_memory_index + size >= SSA_OPERAND_MEMORY_SIZE)
{
struct ssa_operand_memory_d *ptr;
@@ -533,6 +601,7 @@ realloc_vdef (struct vdef_optype_d *ptr, int num_elem)
/* its growing. Allocate a new one and replace the old one. */
tmp = ptr;;
ret = add_vdef_op (stmt, val, num_elem, &ptr);
+ ret->next = NULL;
ptr = tmp;
lim = VUSE_VECT_NUM_ELEM (ptr->usev);
@@ -592,6 +661,7 @@ realloc_vuse (struct vuse_optype_d *ptr, int num_elem)
/* its growing. Allocate a new one and reaplce the old one. */
tmp = ptr;
ret = add_vuse_op (stmt, val, num_elem, &ptr);
+ ret->next = NULL;
ptr = tmp;
lim = VUSE_VECT_NUM_ELEM (ptr->usev);
@@ -778,104 +848,66 @@ finalize_ssa_uses (tree stmt)
static inline void
finalize_ssa_vdef_ops (tree stmt)
{
- int x;
- unsigned new_i;
- struct vdef_optype_d new_list;
- vdef_optype_p old_ops, ptr, last;
- tree act;
- unsigned old_base, new_base;
-
- new_list.next = NULL;
- last = &new_list;
+ vdef_optype_p old_ops;
old_ops = VDEF_OPS (stmt);
- new_i = 0;
- while (old_ops && new_i < VEC_length (tree, build_vdefs))
+ if (old_ops)
{
- act = VEC_index (tree, build_vdefs, new_i);
- new_base = get_name_decl (act);
- old_base = get_name_decl (VDEF_RESULT (old_ops));
+ int x;
- if (old_base == new_base)
- {
- /* if variables are the same, reuse this node. */
- MOVE_HEAD_AFTER (old_ops, last);
- for (x = 0; x < VUSE_VECT_NUM_ELEM (last->usev); x++)
- set_virtual_use_link (VDEF_OP_PTR (last, x), stmt);
- new_i++;
- }
- else if (old_base < new_base)
+ if (VEC_length (tree, build_vdefs) == 0)
{
- /* if old is less than new, old goes to the free list. */
+ /* The statement used to have a VDEF but it doesn't
+ anymore. Unlink the operands in the RHS. */
+ gcc_assert (old_ops->next == NULL);
for (x = 0; x < VUSE_VECT_NUM_ELEM (old_ops->usev); x++)
delink_imm_use (VDEF_OP_PTR (old_ops, x));
- MOVE_HEAD_TO_FREELIST (old_ops, vdef);
+
+ /* The SSA name on the LHS is not needed anymore, release it
+ so the SSA updater can consider it a stale name and
+ ignore it when rewriting VOPS. */
+ release_ssa_name_after_update_ssa (VDEF_RESULT (old_ops));
+
+ old_ops->next = free_vdefs;
+ free_vdefs = old_ops;
+ VDEF_OPS (stmt) = NULL;
}
else
{
- /* This is a new operand. */
- add_vdef_op (stmt, act, 1, &last);
- new_i++;
+ /* Make sure all the immediate use links are still correct. */
+ for (x = 0; x < VUSE_VECT_NUM_ELEM (old_ops->usev); x++)
+ set_virtual_use_link (VDEF_OP_PTR (old_ops, x), stmt);
}
}
-
- /* If there is anything remaining in the build_vdefs list, simply emit it. */
- for ( ; new_i < VEC_length (tree, build_vdefs); new_i++)
- add_vdef_op (stmt, VEC_index (tree, build_vdefs, new_i), 1, &last);
-
- last->next = NULL;
-
- /* If there is anything in the old list, free it. */
- if (old_ops)
+ else if (old_ops == NULL && VEC_length (tree, build_vdefs) > 0)
{
- for (ptr = old_ops; ptr; ptr = ptr->next)
- for (x = 0; x < VUSE_VECT_NUM_ELEM (ptr->usev); x++)
- delink_imm_use (VDEF_OP_PTR (ptr, x));
- old_ops->next = free_vdefs;
- free_vdefs = old_ops;
+ /* This is the first time the statement gets a VDEF,
+ instantiate it. */
+ tree op;
+ struct vdef_optype_d new_list;
+ vdef_optype_p last;
+
+ gcc_assert (VEC_length (tree, build_vdefs) == 1);
+
+ new_list.next = NULL;
+ last = &new_list;
+ op = VEC_index (tree, build_vdefs, 0);
+ add_vdef_op (stmt, op, 1, &last);
+ last->next = NULL;
+
+ /* Now set the stmt's operands. */
+ VDEF_OPS (stmt) = new_list.next;
}
-
- /* Now set the stmt's operands. */
- VDEF_OPS (stmt) = new_list.next;
-
-#ifdef ENABLE_CHECKING
- {
- unsigned x = 0;
- for (ptr = VDEF_OPS (stmt); ptr; ptr = ptr->next)
- x++;
-
- gcc_assert (x == VEC_length (tree, build_vdefs));
- }
-#endif
}
+
static void
finalize_ssa_vdefs (tree stmt)
{
finalize_ssa_vdef_ops (stmt);
-}
-
-
-/* Clear the in_list bits and empty the build array for VDEFs. */
-
-static inline void
-cleanup_vdefs (void)
-{
- unsigned x, num;
- num = VEC_length (tree, build_vdefs);
-
- for (x = 0; x < num; x++)
- {
- tree t = VEC_index (tree, build_vdefs, x);
- if (TREE_CODE (t) != SSA_NAME)
- {
- var_ann_t ann = var_ann (t);
- ann->in_vdef_list = 0;
- }
- }
VEC_truncate (tree, build_vdefs, 0);
-}
+}
/* Takes elements from build_vuses and turns them into vuse operands of
@@ -884,146 +916,64 @@ cleanup_vdefs (void)
static inline void
finalize_ssa_vuse_ops (tree stmt)
{
- int x;
- unsigned new_i;
- struct vuse_optype_d new_list;
- vuse_optype_p old_ops, ptr, last;
- tree act;
- unsigned old_base, new_base;
-
- new_list.next = NULL;
- last = &new_list;
+ vuse_optype_p old_ops;
old_ops = VUSE_OPS (stmt);
- new_i = 0;
- while (old_ops && new_i < VEC_length (tree, build_vuses))
+ if (old_ops)
{
- act = VEC_index (tree, build_vuses, new_i);
- new_base = get_name_decl (act);
- old_base = get_name_decl (VUSE_OP (old_ops, 0));
+ int x;
- if (old_base == new_base)
- {
- /* if variables are the same, reuse this node. */
- MOVE_HEAD_AFTER (old_ops, last);
- for (x = 0; x < VUSE_VECT_NUM_ELEM (last->usev); x++)
- set_virtual_use_link (VUSE_OP_PTR (last, x), stmt);
- new_i++;
- }
- else if (old_base < new_base)
+ if (VEC_length (tree, build_vuses) == 0)
{
- /* if old is less than new, old goes to the free list. */
+ /* The statement used to have a VDEF but it doesn't
+ anymore. Unlink the operands in the RHS. */
+ gcc_assert (old_ops->next == NULL);
for (x = 0; x < VUSE_VECT_NUM_ELEM (old_ops->usev); x++)
delink_imm_use (VUSE_OP_PTR (old_ops, x));
- MOVE_HEAD_TO_FREELIST (old_ops, vuse);
+
+ old_ops->next = free_vuses;
+ free_vuses = old_ops;
+ VUSE_OPS (stmt) = NULL;
}
else
{
- /* This is a new operand. */
- add_vuse_op (stmt, act, 1, &last);
- new_i++;
+ /* Make sure all the immediate use links are still correct. */
+ for (x = 0; x < VUSE_VECT_NUM_ELEM (old_ops->usev); x++)
+ set_virtual_use_link (VUSE_OP_PTR (old_ops, x), stmt);
}
}
-
- /* If there is anything remaining in the build_vuses list, simply emit it. */
- for ( ; new_i < VEC_length (tree, build_vuses); new_i++)
- add_vuse_op (stmt, VEC_index (tree, build_vuses, new_i), 1, &last);
-
- last->next = NULL;
-
- /* If there is anything in the old list, free it. */
- if (old_ops)
+ else if (VEC_length (tree, build_vuses) > 0)
{
- for (ptr = old_ops; ptr; ptr = ptr->next)
- for (x = 0; x < VUSE_VECT_NUM_ELEM (ptr->usev); x++)
- delink_imm_use (VUSE_OP_PTR (ptr, x));
- old_ops->next = free_vuses;
- free_vuses = old_ops;
+ /* This is the first time the statement gets a VUSE,
+ instantiate it. */
+ tree op;
+ struct vuse_optype_d new_list;
+ vuse_optype_p last;
+
+ gcc_assert (VEC_length (tree, build_vuses) == 1);
+
+ new_list.next = NULL;
+ last = &new_list;
+ op = VEC_index (tree, build_vuses, 0);
+ add_vuse_op (stmt, op, 1, &last);
+ last->next = NULL;
+
+ /* Now set STMT's operands. */
+ VUSE_OPS (stmt) = new_list.next;
}
-
- /* Now set the stmt's operands. */
- VUSE_OPS (stmt) = new_list.next;
-
-#ifdef ENABLE_CHECKING
- {
- unsigned x = 0;
- for (ptr = VUSE_OPS (stmt); ptr; ptr = ptr->next)
- x++;
-
- gcc_assert (x == VEC_length (tree, build_vuses));
- }
-#endif
}
-/* Return a new VUSE operand vector, comparing to OLD_OPS_P. */
+
+/* Return a new VUSE operand vector for STMT. */
static void
finalize_ssa_vuses (tree stmt)
{
- unsigned num, num_vdefs;
- unsigned vuse_index;
-
- /* Remove superfluous VUSE operands. If the statement already has a
- VDEF operation for a variable 'a', then a VUSE for 'a' is
- not needed because VDEFs imply a VUSE of the variable. For
- instance, suppose that variable 'a' is aliased:
-
- # VUSE <a_2>
- # a_3 = VDEF <a_2>
- a = a + 1;
-
- The VUSE <a_2> is superfluous because it is implied by the
- VDEF operation. */
- num = VEC_length (tree, build_vuses);
- num_vdefs = VEC_length (tree, build_vdefs);
-
- if (num > 0 && num_vdefs > 0)
- {
- for (vuse_index = 0; vuse_index < VEC_length (tree, build_vuses); )
- {
- tree vuse;
- vuse = VEC_index (tree, build_vuses, vuse_index);
- if (TREE_CODE (vuse) != SSA_NAME)
- {
- var_ann_t ann = var_ann (vuse);
- ann->in_vuse_list = 0;
- if (ann->in_vdef_list)
- {
- VEC_ordered_remove (tree, build_vuses, vuse_index);
- continue;
- }
- }
- vuse_index++;
- }
- }
- else
- {
- /* Clear out the in_list bits. */
- for (vuse_index = 0;
- vuse_index < VEC_length (tree, build_vuses);
- vuse_index++)
- {
- tree t = VEC_index (tree, build_vuses, vuse_index);
- if (TREE_CODE (t) != SSA_NAME)
- {
- var_ann_t ann = var_ann (t);
- ann->in_vuse_list = 0;
- }
- }
- }
-
finalize_ssa_vuse_ops (stmt);
-
- /* The VDEF build vector wasn't cleaned up because we needed it. */
- cleanup_vdefs ();
-
- /* Free the VUSEs build vector. */
VEC_truncate (tree, build_vuses, 0);
-
}
-
/* Finalize all the build vectors, fill the new ones into INFO. */
static inline void
@@ -1066,41 +1016,48 @@ append_use (tree *use_p)
}
-/* Add a new virtual def for variable VAR to the build array. */
+/* Add a new VDEF operator for .MEM to the build array. */
static inline void
-append_vdef (tree var)
+append_vdef (void)
{
- if (TREE_CODE (var) != SSA_NAME)
- {
- var_ann_t ann = get_var_ann (var);
-
- /* Don't allow duplicate entries. */
- if (ann->in_vdef_list)
- return;
- ann->in_vdef_list = 1;
- }
+ /* We only ever need one VDEF operator. FIXME. build_vdefs
+ should not be a vector anymore. */
+ if (VEC_length (tree, build_vdefs) == 1)
+ return;
- VEC_safe_push (tree, heap, build_vdefs, (tree)var);
+ VEC_safe_push (tree, heap, build_vdefs, mem_var);
}
-/* Add VAR to the list of virtual uses. */
+/* Add a new VUSE operator for .MEM to the build array. */
static inline void
-append_vuse (tree var)
+append_vuse (void)
{
- /* Don't allow duplicate entries. */
- if (TREE_CODE (var) != SSA_NAME)
- {
- var_ann_t ann = get_var_ann (var);
+ /* We only need one VUSE operator. FIXME, build_vuses should not be
+ a vector anymore. */
+ if (VEC_length (tree, build_vuses) == 1)
+ return;
- if (ann->in_vuse_list || ann->in_vdef_list)
- return;
- ann->in_vuse_list = 1;
- }
+ VEC_safe_push (tree, heap, build_vuses, mem_var);
+}
+
+
+/* Add a virtual operator for .MEM according to value of FLAGS. */
- VEC_safe_push (tree, heap, build_vuses, (tree)var);
+static inline void
+add_virtual_operator (int flags)
+{
+ /* If we are inside an ADDR_EXPR, then VAR is not being referenced
+ but used to compute address arithmetic. */
+ if (flags & opf_no_vops)
+ return;
+
+ if (flags & opf_def)
+ append_vdef ();
+ else
+ append_vuse ();
}
@@ -1255,373 +1212,286 @@ access_can_touch_variable (tree ref, tree alias, HOST_WIDE_INT offset,
}
-/* Add VAR to the virtual operands array. FLAGS is as in
- get_expr_operands. FULL_REF is a tree that contains the entire
- pointer dereference expression, if available, or NULL otherwise.
- OFFSET and SIZE come from the memory access expression that
- generated this virtual operand. FOR_CLOBBER is true is this is
- adding a virtual operand for a call clobber. */
+/* Add VAR to STORED_SYMS or LOADED_SYMS depending on the value of
+ FLAGS. */
-static void
-add_virtual_operand (tree var, stmt_ann_t s_ann, int flags,
- tree full_ref, HOST_WIDE_INT offset,
- HOST_WIDE_INT size, bool for_clobber)
+static inline void
+add_mem_symbol (tree decl, int flags)
{
- VEC(tree,gc) *aliases;
- tree sym;
- var_ann_t v_ann;
-
- sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
- v_ann = var_ann (sym);
-
- /* Mark statements with volatile operands. Optimizers should back
- off from statements having volatile operands. */
- if (TREE_THIS_VOLATILE (sym) && s_ann)
- s_ann->has_volatile_ops = true;
-
- /* If the variable cannot be modified and this is a VDEF change
- it into a VUSE. This happens when read-only variables are marked
- call-clobbered and/or aliased to writable variables. So we only
- check that this only happens on non-specific stores.
-
- Note that if this is a specific store, i.e. associated with a
- modify_expr, then we can't suppress the VDEF, lest we run
- into validation problems.
-
- This can happen when programs cast away const, leaving us with a
- store to read-only memory. If the statement is actually executed
- at runtime, then the program is ill formed. If the statement is
- not executed then all is well. At the very least, we cannot ICE. */
- if ((flags & opf_non_specific) && unmodifiable_var_p (var))
- flags &= ~opf_is_def;
-
- /* The variable is not a GIMPLE register. Add it (or its aliases) to
- virtual operands, unless the caller has specifically requested
- not to add virtual operands (used when adding operands inside an
- ADDR_EXPR expression). */
- if (flags & opf_no_vops)
- return;
-
- aliases = v_ann->may_aliases;
- if (aliases == NULL)
- {
- /* The variable is not aliased or it is an alias tag. */
- if (flags & opf_is_def)
- append_vdef (var);
- else
- append_vuse (var);
- }
+ unsigned uid;
+
+ /* GIMPLE registers should not be added as memory symbols. */
+ gcc_assert (!is_gimple_reg (decl));
+
+ /* If the variable cannot be modified and this is an implicit store
+ operation, change it into a load. This happens when read-only
+ variables are marked call-clobbered and/or aliased to writable
+ variables. So we only check that this only happens on
+ non-specific stores.
+
+ Note that if this is an explicit store (i.e. associated with a
+ MODIFY_EXPR), then we can't suppress it, lest we run into
+ validation problems. This can happen when programs cast away
+ const, leaving us with a store to read-only memory. If the
+ statement is actually executed at runtime, then the program is
+ ill formed. If the statement is not executed then all is well.
+ At the very least, we cannot ICE. */
+ if ((flags & opf_implicit) && unmodifiable_var_p (decl))
+ flags &= ~opf_def;
+
+ uid = DECL_UID (decl);
+ if (flags & opf_def)
+ bitmap_set_bit (stored_syms, uid);
else
- {
- unsigned i;
- tree al;
-
- /* The variable is aliased. Add its aliases to the virtual
- operands. */
- gcc_assert (VEC_length (tree, aliases) != 0);
-
- if (flags & opf_is_def)
- {
-
- bool none_added = true;
+ bitmap_set_bit (loaded_syms, uid);
+}
- for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
- {
- if (!access_can_touch_variable (full_ref, al, offset, size))
- continue;
-
- none_added = false;
- append_vdef (al);
- }
- /* If the variable is also an alias tag, add a virtual
- operand for it, otherwise we will miss representing
- references to the members of the variable's alias set.
- This fixes the bug in gcc.c-torture/execute/20020503-1.c.
-
- It is also necessary to add bare defs on clobbers for
- SMT's, so that bare SMT uses caused by pruning all the
- aliases will link up properly with calls. In order to
- keep the number of these bare defs we add down to the
- minimum necessary, we keep track of which SMT's were used
- alone in statement vdefs or VUSEs. */
- if (v_ann->is_aliased
- || none_added
- || (TREE_CODE (var) == SYMBOL_MEMORY_TAG
- && for_clobber
- && SMT_USED_ALONE (var)))
- {
- /* Every bare SMT def we add should have SMT_USED_ALONE
- set on it, or else we will get the wrong answer on
- clobbers. */
- if (none_added
- && !updating_used_alone && aliases_computed_p
- && TREE_CODE (var) == SYMBOL_MEMORY_TAG)
- gcc_assert (SMT_USED_ALONE (var));
-
- append_vdef (var);
- }
- }
- else
- {
- bool none_added = true;
- for (i = 0; VEC_iterate (tree, aliases, i, al); i++)
- {
- if (!access_can_touch_variable (full_ref, al, offset, size))
- continue;
- none_added = false;
- append_vuse (al);
- }
+/* Add memory symbols associated with variable DECL to LOADED_SYMS or
+ STORED_SYMS according to FLAGS. If DECL has sub-variables, then
+ its sub-variables are added. Otherwise, DECL is added. FLAGS is
+ as in get_expr_operands. */
- /* Similarly, append a virtual uses for VAR itself, when
- it is an alias tag. */
- if (v_ann->is_aliased || none_added)
- append_vuse (var);
- }
+static void
+add_mem_symbols_in_decl (tree decl, int flags)
+{
+ subvar_t svars;
+
+ /* GIMPLE registers should not be added as memory symbols. */
+ gcc_assert (!is_gimple_reg (decl));
+
+ /* If DECL has sub-variables, add them instead of DECL. */
+ if (var_can_have_subvars (decl) && (svars = get_subvars_for_var (decl)))
+ {
+ subvar_t sv;
+ for (sv = svars; sv; sv = sv->next)
+ add_mem_symbol (sv->var, flags);
}
+ else
+ add_mem_symbol (decl, flags);
}
-/* Add *VAR_P to the appropriate operand array for S_ANN. FLAGS is as in
- get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
- the statement's real operands, otherwise it is added to virtual
- operands. */
+/* Get memory symbols associated with memory tag TAG. FLAGS, FULL_REF,
+ OFFSET and SIZE are as in get_mem_symbols_in_indirect_ref. */
static void
-add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
+get_mem_symbols_in_tag (tree tag, int flags, tree full_ref,
+ HOST_WIDE_INT offset, HOST_WIDE_INT size)
{
- bool is_real_op;
- tree var, sym;
+ VEC(tree,gc) *aliases;
+ tree sym, alias;
+ unsigned i;
var_ann_t v_ann;
-
- var = *var_p;
- gcc_assert (SSA_VAR_P (var));
-
- is_real_op = is_gimple_reg (var);
-
- /* If this is a real operand, the operand is either an SSA name or a
- decl. Virtual operands may only be decls. */
- gcc_assert (is_real_op || DECL_P (var));
-
- sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
+ bool added_symbols_p;
+
+ sym = (TREE_CODE (tag) == SSA_NAME ? SSA_NAME_VAR (tag) : tag);
v_ann = var_ann (sym);
+
+ aliases = v_ann->may_aliases;
- /* Mark statements with volatile operands. Optimizers should back
- off from statements having volatile operands. */
- if (TREE_THIS_VOLATILE (sym) && s_ann)
- s_ann->has_volatile_ops = true;
-
- if (is_real_op)
+ added_symbols_p = false;
+ if (aliases)
{
- /* The variable is a GIMPLE register. Add it to real operands. */
- if (flags & opf_is_def)
- append_def (var_p);
- else
- append_use (var_p);
+ /* If the tag has aliases, add all the aliases that may be
+ affected by FULL_REF. */
+ for (i = 0; VEC_iterate (tree, aliases, i, alias); i++)
+ if (access_can_touch_variable (full_ref, alias, offset, size))
+ {
+ add_mem_symbol (alias, flags);
+ added_symbols_p = true;
+ }
}
- else
- add_virtual_operand (var, s_ann, flags, NULL_TREE, 0, -1, false);
+
+ /* If no symbols were added, we need to add the tag itself to avoid
+ missing dependencies in cases where no symbols are involved in
+ aliasing (e.g., when two pointers point to the same
+ heap-allocated structure). */
+ if (!added_symbols_p)
+ add_mem_symbol (sym, flags);
}
-/* A subroutine of get_expr_operands to handle INDIRECT_REF,
- ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF.
+/* Get memory symbols referenced by the *INDIRECT_REF node EXPR.
- STMT is the statement being processed, EXPR is the INDIRECT_REF
- that got us here.
-
FLAGS is as in get_expr_operands.
- FULL_REF contains the full pointer dereference expression, if we
- have it, or NULL otherwise.
+ FULL_REF contains the full pointer dereference expression, if
+ available. NULL otherwise.
OFFSET and SIZE are the location of the access inside the
- dereferenced pointer, if known.
-
- RECURSE_ON_BASE should be set to true if we want to continue
- calling get_expr_operands on the base pointer, and false if
- something else will do it for us. */
+ dereferenced pointer, if known. Otherwise, OFFSET should be 0
+ and SIZE should be -1. */
static void
-get_indirect_ref_operands (tree stmt, tree expr, int flags,
- tree full_ref,
- HOST_WIDE_INT offset, HOST_WIDE_INT size,
- bool recurse_on_base)
+get_mem_symbols_in_indirect_ref (tree expr, int flags, tree full_ref,
+ HOST_WIDE_INT offset, HOST_WIDE_INT size)
{
+ struct ptr_info_def *pi = NULL;
tree *pptr = &TREE_OPERAND (expr, 0);
tree ptr = *pptr;
- stmt_ann_t s_ann = stmt_ann (stmt);
- if (SSA_VAR_P (ptr))
- {
- struct ptr_info_def *pi = NULL;
-
- /* If PTR has flow-sensitive points-to information, use it. */
- if (TREE_CODE (ptr) == SSA_NAME
- && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
- && pi->name_mem_tag)
- {
- /* PTR has its own memory tag. Use it. */
- add_virtual_operand (pi->name_mem_tag, s_ann, flags,
- full_ref, offset, size, false);
- }
- else
- {
- /* If PTR is not an SSA_NAME or it doesn't have a name
- tag, use its symbol memory tag. */
- var_ann_t v_ann;
-
- /* If we are emitting debugging dumps, display a warning if
- PTR is an SSA_NAME with no flow-sensitive alias
- information. That means that we may need to compute
- aliasing again. */
- if (dump_file
- && TREE_CODE (ptr) == SSA_NAME
- && pi == NULL)
- {
- fprintf (dump_file,
- "NOTE: no flow-sensitive alias info for ");
- print_generic_expr (dump_file, ptr, dump_flags);
- fprintf (dump_file, " in ");
- print_generic_stmt (dump_file, stmt, dump_flags);
- }
+ /* If we don't have aliasing information, do nothing. */
+ if (!aliases_computed_p)
+ return;
- if (TREE_CODE (ptr) == SSA_NAME)
- ptr = SSA_NAME_VAR (ptr);
- v_ann = var_ann (ptr);
+ /* No symbols referenced if PTR is not a variable. */
+ if (!SSA_VAR_P (ptr))
+ return;
- if (v_ann->symbol_mem_tag)
- add_virtual_operand (v_ann->symbol_mem_tag, s_ann, flags,
- full_ref, offset, size, false);
- }
- }
- else if (TREE_CODE (ptr) == INTEGER_CST)
+ if (TREE_CODE (ptr) == SSA_NAME
+ && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
+ && pi->name_mem_tag)
{
- /* If a constant is used as a pointer, we can't generate a real
- operand for it but we mark the statement volatile to prevent
- optimizations from messing things up. */
- if (s_ann)
- s_ann->has_volatile_ops = true;
- return;
+ /* If PTR has flow-sensitive points-to information, use it. */
+ get_mem_symbols_in_tag (pi->name_mem_tag, flags, full_ref, offset, size);
}
else
{
- /* Ok, this isn't even is_gimple_min_invariant. Something's broke. */
- gcc_unreachable ();
+ var_ann_t ann;
+
+ /* If PTR is not an SSA_NAME or it doesn't have a name
+ tag, use its symbol memory tag. */
+ if (TREE_CODE (ptr) == SSA_NAME)
+ ptr = SSA_NAME_VAR (ptr);
+
+ /* If alias information has not been computed for PTR, it may
+ not yet have a memory tag (e.g., when folding builtins new
+ pointers get instantiated which force an alias pass). */
+ ann = var_ann (ptr);
+ if (ann->symbol_mem_tag)
+ get_mem_symbols_in_tag (ann->symbol_mem_tag, flags, full_ref, offset,
+ size);
}
-
- /* If requested, add a USE operand for the base pointer. */
- if (recurse_on_base)
- get_expr_operands (stmt, pptr, opf_none);
}
-/* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
+/* Add symbols referenced by an aggregate type. */
static void
-get_tmr_operands (tree stmt, tree expr, int flags)
+get_mem_symbols_in_aggregate (tree expr, int flags)
{
- tree tag = TMR_TAG (expr), ref;
+ tree ref;
HOST_WIDE_INT offset, size, maxsize;
- subvar_t svars, sv;
- stmt_ann_t s_ann = stmt_ann (stmt);
-
- /* First record the real operands. */
- get_expr_operands (stmt, &TMR_BASE (expr), opf_none);
- get_expr_operands (stmt, &TMR_INDEX (expr), opf_none);
- if (TMR_SYMBOL (expr))
+ ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
+ if (SSA_VAR_P (ref))
{
- stmt_ann_t ann = stmt_ann (stmt);
- add_to_addressable_set (TMR_SYMBOL (expr), &ann->addresses_taken);
- }
+ bool added_symbols_p = false;
- if (!tag)
- {
- /* Something weird, so ensure that we will be careful. */
- stmt_ann (stmt)->has_volatile_ops = true;
- return;
- }
+ if (get_subvars_for_var (ref))
+ {
+ subvar_t svars, sv;
- if (DECL_P (tag))
- {
- get_expr_operands (stmt, &tag, flags);
- return;
- }
+ svars = get_subvars_for_var (ref);
+ for (sv = svars; sv; sv = sv->next)
+ if (overlap_subvar (offset, maxsize, sv->var, NULL))
+ {
+ added_symbols_p = true;
+ add_mem_symbol (sv->var, flags);
+ }
+ }
- ref = get_ref_base_and_extent (tag, &offset, &size, &maxsize);
- gcc_assert (ref != NULL_TREE);
- svars = get_subvars_for_var (ref);
- for (sv = svars; sv; sv = sv->next)
- {
- bool exact;
- if (overlap_subvar (offset, maxsize, sv->var, &exact))
- add_stmt_operand (&sv->var, s_ann, flags);
+ /* If no symbols were added, we need to add REF itself to avoid
+ missing dependencies when EXPR does not reference any of the
+ sub-variables we computed for REF. */
+ if (!added_symbols_p)
+ add_mem_symbol (ref, flags);
}
+ else if (TREE_CODE (ref) == INDIRECT_REF)
+ get_mem_symbols_in_indirect_ref (ref, flags, expr, offset, maxsize);
+}
+
+
+/* Get memory symbols accessed by TARGET_MEM_REF EXPR in statement
+ STMT. FLAGS is as in get_expr_operands. */
+
+static void
+get_mem_symbols_in_tmr (tree expr, int flags)
+{
+ tree tag;
+
+ tag = TMR_TAG (expr);
+ if (!tag)
+ return;
+
+ if (MTAG_P (tag))
+ get_mem_symbols_in_tag (tag, flags, NULL_TREE, 0, -1);
+ else if (DECL_P (tag))
+ add_mem_symbol (tag, flags);
+ else
+ get_mem_symbols_in_aggregate (tag, flags);
}
-/* Add clobbering definitions for .GLOBAL_VAR or for each of the call
- clobbered variables in the function. */
+/* Helper for get_call_expr_operands. When gathering memory symbols,
+ add all the call-clobbered symbols that may be modified by a call
+ to function CALLEE to the set STORED_SYMS. CALL_FLAGS are the
+ flags for the CALL_EXPR site.
+
+ If we are scanning for regular operands, add a VDEF operator if any
+ call-clobbered symbol may be modified by CALLEE and add a VUSE
+ operator if any call-clobbered symbol may be read by CALLEE. */
static void
-add_call_clobber_ops (tree stmt, tree callee)
+add_call_clobbered_mem_symbols (tree callee, int call_flags)
{
unsigned u;
bitmap_iterator bi;
- stmt_ann_t s_ann = stmt_ann (stmt);
bitmap not_read_b, not_written_b;
+ bool added_symbols_p, add_vdef_p, add_vuse_p;
- /* Functions that are not const, pure or never return may clobber
- call-clobbered variables. */
- if (s_ann)
- s_ann->makes_clobbering_call = true;
-
- /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
- for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
- if (global_var)
- {
- add_stmt_operand (&global_var, s_ann, opf_is_def);
- return;
- }
-
/* Get info for local and module level statics. There is a bit
set for each static if the call being processed does not read
or write that variable. */
not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
not_written_b = callee ? ipa_reference_get_not_written_global (callee) : NULL;
- /* Add a VDEF operand for every call clobbered variable. */
+ /* Check which call-clobbered variables may be modified by a call to
+ CALLEE. Add those affected to STORED_SYMS. */
+ added_symbols_p = false;
+ add_vdef_p = false;
+ add_vuse_p = false;
EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
{
- tree var = referenced_var_lookup (u);
- unsigned int escape_mask = var_ann (var)->escape_mask;
- tree real_var = var;
- bool not_read;
- bool not_written;
-
+ tree real_var, var;
+ unsigned int escape_mask;
+ bool not_read, not_written;
+
+ /* If we have already decided to add both a VDEF and a VUSE,
+ there is no point in examining any more call-clobbered
+ symbols. */
+ if (add_vdef_p && add_vuse_p)
+ break;
+
+ var = referenced_var_lookup (u);
+ escape_mask = var_ann (var)->escape_mask;
+
/* Not read and not written are computed on regular vars, not
subvars, so look at the parent var if this is an SFT. */
+ real_var = var;
if (TREE_CODE (var) == STRUCT_FIELD_TAG)
real_var = SFT_PARENT_VAR (var);
- not_read = not_read_b ? bitmap_bit_p (not_read_b,
- DECL_UID (real_var)) : false;
- not_written = not_written_b ? bitmap_bit_p (not_written_b,
- DECL_UID (real_var)) : false;
gcc_assert (!unmodifiable_var_p (var));
-
+
clobber_stats.clobbered_vars++;
- /* See if this variable is really clobbered by this function. */
+ /* See if this variable is really clobbered by a call to CALLEE. */
/* Trivial case: Things escaping only to pure/const are not
clobbered by non-pure-const, and only read by pure/const. */
- if ((escape_mask & ~(ESCAPE_TO_PURE_CONST)) == 0)
+ if (escape_mask == ESCAPE_TO_PURE_CONST)
{
- tree call = get_call_expr_in (stmt);
- if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
+ if (call_flags & (ECF_CONST | ECF_PURE))
{
- add_stmt_operand (&var, s_ann, opf_none);
+ if (gathering_loads_stores ())
+ {
+ added_symbols_p = true;
+ add_mem_symbols_in_decl (var, opf_use);
+ }
+ else
+ add_vuse_p = true;
+
clobber_stats.unescapable_clobbers_avoided++;
continue;
}
@@ -1631,69 +1501,282 @@ add_call_clobber_ops (tree stmt, tree callee)
continue;
}
}
-
+
+ /* Analysis on static globals may have determined that the
+ variable is never written to nor read from by any function.
+ Use that, if possible. */
+ not_read = not_read_b
+ ? bitmap_bit_p (not_read_b, DECL_UID (real_var))
+ : false;
+
+ not_written = not_written_b
+ ? bitmap_bit_p (not_written_b, DECL_UID (real_var))
+ : false;
+
if (not_written)
{
clobber_stats.static_write_clobbers_avoided++;
if (!not_read)
- add_stmt_operand (&var, s_ann, opf_none);
+ {
+ if (gathering_loads_stores ())
+ {
+ added_symbols_p = true;
+ add_mem_symbols_in_decl (var, opf_use);
+ }
+ else
+ add_vuse_p = true;
+ }
else
clobber_stats.static_read_clobbers_avoided++;
}
else
- add_virtual_operand (var, s_ann, opf_is_def, NULL, 0, -1, true);
+ {
+ if (gathering_loads_stores ())
+ {
+ added_symbols_p = true;
+ add_mem_symbols_in_decl (var, opf_def | opf_implicit);
+ }
+ else
+ add_vdef_p = true;
+ }
}
+
+ if (add_vdef_p)
+ add_virtual_operator (opf_def);
+
+ if (add_vuse_p)
+ add_virtual_operator (opf_use);
}
-/* Add VUSE operands for .GLOBAL_VAR or all call clobbered variables in the
- function. */
+/* Helper for get_call_expr_operands. When gathering memory symbols,
+ add all the call-clobbered symbols that may be read by a call to
+ function CALLEE to the set LOADED_SYMS.
+
+ If we are scanning for regular operands, add a VUSE operator if any
+ call-clobbered symbol may be read by CALLEE. */
static void
-add_call_read_ops (tree stmt, tree callee)
+add_call_read_mem_symbols (tree callee)
{
unsigned u;
bitmap_iterator bi;
- stmt_ann_t s_ann = stmt_ann (stmt);
bitmap not_read_b;
+ bool added_symbols_p;
- /* if the function is not pure, it may reference memory. Add
- a VUSE for .GLOBAL_VAR if it has been created. See add_referenced_var
- for the heuristic used to decide whether to create .GLOBAL_VAR. */
- if (global_var)
- {
- add_stmt_operand (&global_var, s_ann, opf_none);
- return;
- }
-
+ /* Get info for local and module level statics. There is a bit
+ set for each static if the call being processed does not read
+ or write that variable. */
not_read_b = callee ? ipa_reference_get_not_read_global (callee) : NULL;
- /* Add a VUSE for each call-clobbered variable. */
+ /* Check which call-clobbered variables may be read by a call to
+ CALLEE. Add those to LOADED_SYMS. */
+ added_symbols_p = false;
EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
{
- tree var = referenced_var (u);
- tree real_var = var;
+ tree real_var, var;
bool not_read;
-
+ var_ann_t ann;
+
clobber_stats.readonly_clobbers++;
+ var = referenced_var (u);
+ ann = var_ann (var);
+
/* Not read and not written are computed on regular vars, not
subvars, so look at the parent var if this is an SFT. */
-
+ real_var = var;
if (TREE_CODE (var) == STRUCT_FIELD_TAG)
real_var = SFT_PARENT_VAR (var);
- not_read = not_read_b ? bitmap_bit_p (not_read_b, DECL_UID (real_var))
- : false;
-
+ not_read = not_read_b
+ ? bitmap_bit_p (not_read_b, DECL_UID (real_var))
+ : false;
+
if (not_read)
{
clobber_stats.static_readonly_clobbers_avoided++;
continue;
}
-
- add_stmt_operand (&var, s_ann, opf_none | opf_non_specific);
+
+ if (gathering_loads_stores ())
+ {
+ added_symbols_p = true;
+ add_mem_symbols_in_decl (var, opf_use);
+ }
+ else
+ {
+ /* If we have already decided to add a VUSE, there is no
+ point in examining any more call-clobbered symbols. */
+ add_virtual_operator (opf_use);
+ return;
+ }
+ }
+
+ /* Similarly to the logic in add_call_clobbered_mem_symbols, add a
+ load of .MEM if no symbols have been added. */
+ if (gathering_loads_stores () && !added_symbols_p)
+ add_mem_symbol (mem_var, opf_use);
+}
+
+
+/* Add *VAR_P to the appropriate operand array for S_ANN. FLAGS is as in
+ get_expr_operands. If *VAR_P is a GIMPLE register, it will be added to
+ the statement's real operands, otherwise it is added to virtual
+ operands. */
+
+static void
+add_stmt_operand (tree *var_p, stmt_ann_t s_ann, int flags)
+{
+ tree var, sym;
+ var_ann_t v_ann;
+
+ gcc_assert (SSA_VAR_P (*var_p) && s_ann);
+
+ var = *var_p;
+ sym = (TREE_CODE (var) == SSA_NAME ? SSA_NAME_VAR (var) : var);
+ v_ann = var_ann (sym);
+
+ /* Mark statements with volatile operands. */
+ if (TREE_THIS_VOLATILE (sym))
+ s_ann->has_volatile_ops = true;
+
+ if (is_gimple_reg (sym))
+ {
+ /* The variable is a GIMPLE register. Add it to real operands. */
+ if (flags & opf_def)
+ append_def (var_p);
+ else
+ append_use (var_p);
+ }
+ else
+ {
+ /* .MEM should not be found in the code. */
+ gcc_assert (var != mem_var);
+
+ /* When gathering loads and stores, look for the symbols
+ associated with VAR, otherwise add the virtual operator
+ indicated by FLAGS. */
+ if (gathering_loads_stores ())
+ add_mem_symbols_in_decl (var, flags);
+ else
+ add_virtual_operator (flags);
+ }
+}
+
+
+/* A subroutine of get_expr_operands to handle INDIRECT_REF,
+ ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF.
+
+ STMT is the statement being processed, EXPR is the INDIRECT_REF
+ that got us here.
+
+ FLAGS is as in get_expr_operands.
+
+ FULL_REF contains the full pointer dereference expression, if we
+ have it, or NULL otherwise.
+
+ OFFSET and SIZE are the location of the access inside the
+ dereferenced pointer, if known. */
+
+static void
+get_indirect_ref_operands (tree stmt, tree expr, int flags)
+{
+ tree *pptr = &TREE_OPERAND (expr, 0);
+ tree ptr = *pptr;
+ stmt_ann_t s_ann = stmt_ann (stmt);
+
+ /* If the dereference is volatile, mark the statement accordingly. */
+ if (TREE_THIS_VOLATILE (expr))
+ s_ann->has_volatile_ops = true;
+
+ /* When gathering memory symbols, there is no need to keep analyzing
+ the rest of the expression. */
+ if (gathering_loads_stores ())
+ {
+ get_mem_symbols_in_indirect_ref (expr, flags, NULL_TREE, 0, -1);
+ return;
+ }
+
+ if (SSA_VAR_P (ptr))
+ {
+ /* Only add a reference to .MEM if we have alias information
+ available. Otherwise, the SSA renamer will try to access the
+ aliased symbols associated with this pointer dereference and
+ find nothing. */
+ if (aliases_computed_p)
+ {
+ tree tag;
+
+ if (TREE_CODE (ptr) == SSA_NAME
+ && SSA_NAME_PTR_INFO (ptr)
+ && SSA_NAME_PTR_INFO (ptr)->name_mem_tag)
+ tag = SSA_NAME_PTR_INFO (ptr)->name_mem_tag;
+ else
+ {
+ tree sym = DECL_P (ptr) ? ptr : SSA_NAME_VAR (ptr);
+ tag = var_ann (sym)->symbol_mem_tag;
+ }
+
+ if (tag)
+ add_virtual_operator (flags);
+ }
+ }
+ else if (TREE_CODE (ptr) == INTEGER_CST)
+ {
+ /* If a constant is used as a pointer, we can't generate a real
+ operand for it but we mark the statement volatile to prevent
+ optimizations from messing things up. */
+ if (s_ann)
+ s_ann->has_volatile_ops = true;
+ return;
+ }
+ else
+ {
+ /* OK, this isn't even is_gimple_min_invariant. Something's
+ broke. */
+ gcc_unreachable ();
+ }
+
+ /* Recurse to get to the base pointer and add it as a USEd operand. */
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_use);
+}
+
+
+/* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
+
+static void
+get_tmr_operands (tree stmt, tree expr, int flags)
+{
+ tree tag;
+
+ /* When gathering memory symbols, there is no need to keep analyzing
+ the rest of the expression. */
+ if (gathering_loads_stores ())
+ {
+ get_mem_symbols_in_tmr (expr, flags);
+ return;
+ }
+
+ /* First record the real operands. */
+ get_expr_operands (stmt, &TMR_BASE (expr), opf_use);
+ get_expr_operands (stmt, &TMR_INDEX (expr), opf_use);
+
+ if (TMR_SYMBOL (expr))
+ {
+ stmt_ann_t ann = stmt_ann (stmt);
+ add_to_addressable_set (TMR_SYMBOL (expr), &ann->addresses_taken);
}
+
+ tag = TMR_TAG (expr);
+ if (!tag)
+ {
+ /* Something weird, so ensure that we will be careful. */
+ stmt_ann (stmt)->has_volatile_ops = true;
+ return;
+ }
+
+ add_virtual_operator (flags);
}
@@ -1705,37 +1788,29 @@ get_call_expr_operands (tree stmt, tree expr)
tree op;
int call_flags = call_expr_flags (expr);
- /* If aliases have been computed already, add VDEF or V_USE
- operands for all the symbols that have been found to be
- call-clobbered.
-
- Note that if aliases have not been computed, the global effects
- of calls will not be included in the SSA web. This is fine
- because no optimizer should run before aliases have been
- computed. By not bothering with virtual operands for CALL_EXPRs
- we avoid adding superfluous virtual operands, which can be a
- significant compile time sink (See PR 15855). */
- if (aliases_computed_p
- && !bitmap_empty_p (call_clobbered_vars)
- && !(call_flags & ECF_NOVOPS))
+ if (!bitmap_empty_p (call_clobbered_vars) && !(call_flags & ECF_NOVOPS))
{
/* A 'pure' or a 'const' function never call-clobbers anything.
A 'noreturn' function might, but since we don't return anyway
there is no point in recording that. */
if (TREE_SIDE_EFFECTS (expr)
&& !(call_flags & (ECF_PURE | ECF_CONST | ECF_NORETURN)))
- add_call_clobber_ops (stmt, get_callee_fndecl (expr));
+ add_call_clobbered_mem_symbols (get_callee_fndecl (expr), call_flags);
else if (!(call_flags & ECF_CONST))
- add_call_read_ops (stmt, get_callee_fndecl (expr));
+ add_call_read_mem_symbols (get_callee_fndecl (expr));
}
/* Find uses in the called function. */
- get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
+ if (!gathering_loads_stores ())
+ {
+ /* There may not be memory symbols in these operands, so we only
+ traverse them when scanning for real operands. */
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_use);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use);
+ }
for (op = TREE_OPERAND (expr, 1); op; op = TREE_CHAIN (op))
- get_expr_operands (stmt, &TREE_VALUE (op), opf_none);
-
- get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
+ get_expr_operands (stmt, &TREE_VALUE (op), opf_use);
}
@@ -1744,16 +1819,19 @@ get_call_expr_operands (tree stmt, tree expr)
static void
get_asm_expr_operands (tree stmt)
{
- stmt_ann_t s_ann = stmt_ann (stmt);
- int noutputs = list_length (ASM_OUTPUTS (stmt));
- const char **oconstraints
- = (const char **) alloca ((noutputs) * sizeof (const char *));
- int i;
- tree link;
+ stmt_ann_t s_ann;
+ int i, noutputs;
+ const char **oconstraints;
const char *constraint;
- bool allows_mem, allows_reg, is_inout;
+ bool allows_mem, allows_reg, is_inout, add_vdef_p, add_vuse_p;
+ tree link;
+
+ s_ann = stmt_ann (stmt);
+ noutputs = list_length (ASM_OUTPUTS (stmt));
+ oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
- for (i=0, link = ASM_OUTPUTS (stmt); link; ++i, link = TREE_CHAIN (link))
+ /* Gather all output operands. */
+ for (i = 0, link = ASM_OUTPUTS (stmt); link; i++, link = TREE_CHAIN (link))
{
constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
oconstraints[i] = constraint;
@@ -1772,14 +1850,15 @@ get_asm_expr_operands (tree stmt)
add_to_addressable_set (t, &s_ann->addresses_taken);
}
- get_expr_operands (stmt, &TREE_VALUE (link), opf_is_def);
+ get_expr_operands (stmt, &TREE_VALUE (link), opf_def);
}
+ /* Gather all input operands. */
for (link = ASM_INPUTS (stmt); link; link = TREE_CHAIN (link))
{
constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
- parse_input_constraint (&constraint, 0, 0, noutputs, 0,
- oconstraints, &allows_mem, &allows_reg);
+ parse_input_constraint (&constraint, 0, 0, noutputs, 0, oconstraints,
+ &allows_mem, &allows_reg);
/* Memory operands are addressable. Note that STMT needs the
address of this operand. */
@@ -1793,45 +1872,69 @@ get_asm_expr_operands (tree stmt)
get_expr_operands (stmt, &TREE_VALUE (link), 0);
}
-
- /* Clobber memory for asm ("" : : : "memory"); */
+ /* Clobber all memory and addressable symbols for asm ("" : : : "memory"); */
+ add_vdef_p = false;
+ add_vuse_p = false;
for (link = ASM_CLOBBERS (stmt); link; link = TREE_CHAIN (link))
if (strcmp (TREE_STRING_POINTER (TREE_VALUE (link)), "memory") == 0)
{
unsigned i;
bitmap_iterator bi;
- /* Clobber all call-clobbered variables (or .GLOBAL_VAR if we
- decided to group them). */
- if (global_var)
- add_stmt_operand (&global_var, s_ann, opf_is_def);
- else
- EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
- {
- tree var = referenced_var (i);
- add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
- }
+ EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
+ {
+ tree var;
- /* Now clobber all addressables. */
- EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i, bi)
- {
- tree var = referenced_var (i);
-
- /* Subvars are explicitly represented in this list, so
- we don't need the original to be added to the clobber
- ops, but the original *will* be in this list because
- we keep the addressability of the original
- variable up-to-date so we don't screw up the rest of
- the backend. */
- if (var_can_have_subvars (var)
- && get_subvars_for_var (var) != NULL)
- continue;
-
- add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
- }
+ /* Don't bother checking any more variables if we have
+ decided to add both a VDEF and a VUSE operator already. */
+ if (add_vdef_p && add_vuse_p)
+ break;
+
+ var = referenced_var (i);
+ if (gathering_loads_stores ())
+ add_mem_symbols_in_decl (var, opf_def | opf_implicit);
+ else if (unmodifiable_var_p (var))
+ add_vuse_p = true;
+ else
+ add_vdef_p = true;
+ }
+
+ EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i, bi)
+ {
+ tree var;
+
+ /* Don't bother checking any more variables if we have
+ decided to add both a VDEF and a VUSE operator already. */
+ if (add_vdef_p && add_vuse_p)
+ break;
+
+ var = referenced_var (i);
+
+ /* Subvars are explicitly represented in this list, so we
+ don't need the original to be added to the clobber ops,
+ but the original *will* be in this list because we keep
+ the addressability of the original variable up-to-date
+ to avoid confusing the back-end. */
+ if (var_can_have_subvars (var)
+ && get_subvars_for_var (var) != NULL)
+ continue;
+
+ if (gathering_loads_stores ())
+ add_mem_symbols_in_decl (var, opf_def | opf_implicit);
+ else if (unmodifiable_var_p (var))
+ add_vuse_p = true;
+ else
+ add_vdef_p = true;
+ }
break;
}
+
+ if (add_vdef_p)
+ add_virtual_operator (opf_def);
+
+ if (add_vuse_p)
+ add_virtual_operator (opf_use);
}
@@ -1841,9 +1944,9 @@ static void
get_modify_expr_operands (tree stmt, tree expr)
{
/* First get operands from the RHS. */
- get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_use);
- /* For the LHS, use a regular definition (OPF_IS_DEF) for GIMPLE
+ /* For the LHS, use a regular definition (opf_def) for GIMPLE
registers. If the LHS is a store to memory, we will need
a preserving definition (VDEF).
@@ -1854,7 +1957,49 @@ get_modify_expr_operands (tree stmt, tree expr)
We used to distinguish between preserving and killing definitions.
We always emit preserving definitions now. */
- get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_is_def);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_def);
+}
+
+
+/* Scan operands in EXPR, a reference to an aggregate type (arrays,
+ structures and complex). STMT is the statement holding EXPR.
+ FLAGS is as in get_expr_operands. */
+
+static void
+get_aggregate_operands (tree stmt, tree expr, int flags)
+{
+ enum tree_code code = TREE_CODE (expr);
+ stmt_ann_t s_ann = stmt_ann (stmt);
+
+ /* If the reference is volatile, mark the statement accordingly. */
+ if (TREE_THIS_VOLATILE (expr))
+ s_ann->has_volatile_ops = true;
+
+ /* When gathering memory symbols, there is no need to keep analyzing
+ the rest of the expression. */
+ if (gathering_loads_stores ())
+ {
+ get_mem_symbols_in_aggregate (expr, flags);
+ return;
+ }
+
+ /* References to aggregates are references to the virtual variable
+ representing the aggregate. If the aggregate is a
+ non-addressable stack variable, the virtual variable will be the
+ base symbol for the reference. Otherwise, addressable and global
+ variables are represented with the artificial symbol MEM. */
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
+
+ if (code == COMPONENT_REF)
+ {
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use);
+ }
+ else if (code == ARRAY_REF || code == ARRAY_RANGE_REF)
+ {
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_use);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_use);
+ }
}
@@ -1895,38 +2040,19 @@ get_expr_operands (tree stmt, tree *expr_p, int flags)
not really accessed. The only operands that we should find
here are ARRAY_REF indices which will always be real operands
(GIMPLE does not allow non-registers as array indices). */
+ if (gathering_loads_stores ())
+ return;
+
flags |= opf_no_vops;
get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
return;
case SSA_NAME:
- case STRUCT_FIELD_TAG:
- case SYMBOL_MEMORY_TAG:
- case NAME_MEMORY_TAG:
- add_stmt_operand (expr_p, s_ann, flags);
- return;
-
case VAR_DECL:
case PARM_DECL:
case RESULT_DECL:
- {
- subvar_t svars;
-
- /* Add the subvars for a variable, if it has subvars, to DEFS
- or USES. Otherwise, add the variable itself. Whether it
- goes to USES or DEFS depends on the operand flags. */
- if (var_can_have_subvars (expr)
- && (svars = get_subvars_for_var (expr)))
- {
- subvar_t sv;
- for (sv = svars; sv; sv = sv->next)
- add_stmt_operand (&sv->var, s_ann, flags);
- }
- else
- add_stmt_operand (expr_p, s_ann, flags);
-
- return;
- }
+ add_stmt_operand (expr_p, s_ann, flags);
+ return;
case MISALIGNED_INDIRECT_REF:
get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
@@ -1934,7 +2060,7 @@ get_expr_operands (tree stmt, tree *expr_p, int flags)
case ALIGN_INDIRECT_REF:
case INDIRECT_REF:
- get_indirect_ref_operands (stmt, expr, flags, NULL_TREE, 0, -1, true);
+ get_indirect_ref_operands (stmt, expr, flags);
return;
case TARGET_MEM_REF:
@@ -1946,69 +2072,13 @@ get_expr_operands (tree stmt, tree *expr_p, int flags)
case COMPONENT_REF:
case REALPART_EXPR:
case IMAGPART_EXPR:
- {
- tree ref;
- HOST_WIDE_INT offset, size, maxsize;
- bool none = true;
-
- /* This component reference becomes an access to all of the
- subvariables it can touch, if we can determine that, but
- *NOT* the real one. If we can't determine which fields we
- could touch, the recursion will eventually get to a
- variable and add *all* of its subvars, or whatever is the
- minimum correct subset. */
- ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
- if (SSA_VAR_P (ref) && get_subvars_for_var (ref))
- {
- subvar_t sv;
- subvar_t svars = get_subvars_for_var (ref);
-
- for (sv = svars; sv; sv = sv->next)
- {
- bool exact;
-
- if (overlap_subvar (offset, maxsize, sv->var, &exact))
- {
- none = false;
- add_stmt_operand (&sv->var, s_ann, flags);
- }
- }
-
- if (!none)
- flags |= opf_no_vops;
- }
- else if (TREE_CODE (ref) == INDIRECT_REF)
- {
- get_indirect_ref_operands (stmt, ref, flags, expr, offset,
- maxsize, false);
- flags |= opf_no_vops;
- }
-
- /* Even if we found subvars above we need to ensure to see
- immediate uses for d in s.a[d]. In case of s.a having
- a subvar or we would miss it otherwise. */
- get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
-
- if (code == COMPONENT_REF)
- {
- if (s_ann && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
- s_ann->has_volatile_ops = true;
- get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
- }
- else if (code == ARRAY_REF || code == ARRAY_RANGE_REF)
- {
- get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
- get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
- get_expr_operands (stmt, &TREE_OPERAND (expr, 3), opf_none);
- }
-
- return;
- }
+ get_aggregate_operands (stmt, expr, flags);
+ return;
case WITH_SIZE_EXPR:
/* WITH_SIZE_EXPR is a pass-through reference to its first argument,
and an rvalue reference to its second argument. */
- get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_use);
get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
return;
@@ -2018,9 +2088,9 @@ get_expr_operands (tree stmt, tree *expr_p, int flags)
case COND_EXPR:
case VEC_COND_EXPR:
- get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_none);
- get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
- get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_use);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_use);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_use);
return;
case MODIFY_EXPR:
@@ -2037,7 +2107,7 @@ get_expr_operands (tree stmt, tree *expr_p, int flags)
for (idx = 0;
VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (expr), idx, ce);
idx++)
- get_expr_operands (stmt, &ce->value, opf_none);
+ get_expr_operands (stmt, &ce->value, opf_use);
return;
}
@@ -2124,11 +2194,11 @@ parse_ssa_operands (tree stmt)
break;
case COND_EXPR:
- get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_none);
+ get_expr_operands (stmt, &COND_EXPR_COND (stmt), opf_use);
break;
case SWITCH_EXPR:
- get_expr_operands (stmt, &SWITCH_COND (stmt), opf_none);
+ get_expr_operands (stmt, &SWITCH_COND (stmt), opf_use);
break;
case ASM_EXPR:
@@ -2136,15 +2206,15 @@ parse_ssa_operands (tree stmt)
break;
case RETURN_EXPR:
- get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_none);
+ get_expr_operands (stmt, &TREE_OPERAND (stmt, 0), opf_use);
break;
case GOTO_EXPR:
- get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_none);
+ get_expr_operands (stmt, &GOTO_DESTINATION (stmt), opf_use);
break;
case LABEL_EXPR:
- get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_none);
+ get_expr_operands (stmt, &LABEL_EXPR_LABEL (stmt), opf_use);
break;
case BIND_EXPR:
@@ -2163,7 +2233,7 @@ parse_ssa_operands (tree stmt)
will fail in add_stmt_operand. This default will handle
statements like empty statements, or CALL_EXPRs that may
appear on the RHS of a statement or as statements themselves. */
- get_expr_operands (stmt, &stmt, opf_none);
+ get_expr_operands (stmt, &stmt, opf_use);
break;
}
}
@@ -2220,6 +2290,9 @@ update_stmt_operands (tree stmt)
gcc_assert (ann->modified);
+ /* This cannot be called from get_loads_and_stores. */
+ gcc_assert (!gathering_loads_stores ());
+
timevar_push (TV_TREE_OPS);
build_ssa_operands (stmt);
@@ -2237,49 +2310,39 @@ void
copy_virtual_operands (tree dest, tree src)
{
tree t;
- ssa_op_iter iter, old_iter;
- use_operand_p use_p;
- vuse_vec_p u3, u4;
- def_operand_p def_p, d2;
-
- build_ssa_operands (dest);
-
- /* Copy all the virtual fields. */
- FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VUSE)
- append_vuse (t);
- FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VDEF)
- append_vdef (t);
-
- if (VEC_length (tree, build_vuses) == 0
- && VEC_length (tree, build_vdefs) == 0)
- return;
-
- /* Now commit the virtual operands to this stmt. */
- finalize_ssa_vdefs (dest);
- finalize_ssa_vuses (dest);
+ unsigned int n;
+ ssa_op_iter iter;
- /* Finally, set the field to the same values as then originals. */
- t = op_iter_init_tree (&old_iter, src, SSA_OP_VUSE);
- FOR_EACH_SSA_USE_OPERAND (use_p, dest, iter, SSA_OP_VUSE)
+ n = NUM_SSA_OPERANDS (src, SSA_OP_VUSE);
+ if (n > 0)
{
- gcc_assert (!op_iter_done (&old_iter));
- SET_USE (use_p, t);
- t = op_iter_next_tree (&old_iter);
+ int x = 0;
+ struct vuse_optype_d vuse;
+ vuse_optype_p vuse_p = &vuse;
+ vuse_p = add_vuse_op (dest, NULL, n, &vuse_p);
+ vuse_p->next = NULL;
+ FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VUSE)
+ SET_USE (VUSE_OP_PTR (vuse_p, x++), t);
+ VUSE_OPS (dest) = vuse.next;
}
- gcc_assert (op_iter_done (&old_iter));
+ else
+ VUSE_OPS (dest) = NULL;
- op_iter_init_vdef (&old_iter, src, &u3, &d2);
- FOR_EACH_SSA_VDEF_OPERAND (def_p, u4, dest, iter)
+ n = NUM_SSA_OPERANDS (src, SSA_OP_VMAYUSE);
+ if (n > 0)
{
- gcc_assert (!op_iter_done (&old_iter));
- gcc_assert (VUSE_VECT_NUM_ELEM (*u3) == 1);
- gcc_assert (VUSE_VECT_NUM_ELEM (*u4) == 1);
- SET_USE (VUSE_ELEMENT_PTR_NC (*u4, 0), VUSE_ELEMENT_VAR (*u3, 0));
- SET_DEF (def_p, DEF_FROM_PTR (d2));
- op_iter_next_vdef (&u3, &d2, &old_iter);
+ int x = 0;
+ struct vdef_optype_d vuse;
+ vdef_optype_p vuse_p = &vuse;
+ vuse_p = add_vdef_op (dest, NULL, n, &vuse_p);
+ vuse_p->next = NULL;
+ FOR_EACH_SSA_TREE_OPERAND (t, src, iter, SSA_OP_VMAYUSE)
+ SET_USE (VDEF_OP_PTR (vuse_p, x++), t);
+ SET_DEF (VDEF_RESULT_PTR (vuse_p), VDEF_RESULT (VDEF_OPS (src)));
+ VDEF_OPS (dest) = vuse.next;
}
- gcc_assert (op_iter_done (&old_iter));
-
+ else
+ VDEF_OPS (dest) = NULL;
}
@@ -2292,49 +2355,34 @@ copy_virtual_operands (tree dest, tree src)
void
create_ssa_artficial_load_stmt (tree new_stmt, tree old_stmt)
{
- stmt_ann_t ann;
tree op;
ssa_op_iter iter;
use_operand_p use_p;
- unsigned x;
+ vdef_optype_p vdefs;
+ vuse_optype_p vuses;
- ann = get_stmt_ann (new_stmt);
+ get_stmt_ann (new_stmt);
- /* Process the stmt looking for operands. */
+ /* Process NEW_STMT looking for operands. */
start_ssa_stmt_operands ();
parse_ssa_operands (new_stmt);
- for (x = 0; x < VEC_length (tree, build_vuses); x++)
- {
- tree t = VEC_index (tree, build_vuses, x);
- if (TREE_CODE (t) != SSA_NAME)
- {
- var_ann_t ann = var_ann (t);
- ann->in_vuse_list = 0;
- }
- }
-
- for (x = 0; x < VEC_length (tree, build_vdefs); x++)
- {
- tree t = VEC_index (tree, build_vdefs, x);
- if (TREE_CODE (t) != SSA_NAME)
- {
- var_ann_t ann = var_ann (t);
- ann->in_vdef_list = 0;
- }
- }
-
/* Remove any virtual operands that were found. */
VEC_truncate (tree, build_vdefs, 0);
VEC_truncate (tree, build_vuses, 0);
- /* For each VDEF on the original statement, we want to create a
- VUSE of the VDEF result on the new statement. */
- FOR_EACH_SSA_TREE_OPERAND (op, old_stmt, iter, SSA_OP_VDEF)
- append_vuse (op);
-
- /* Now build the operands for this new stmt. */
+ /* Create a VUSE for the LHS of the VDEF operator in the original
+ statement (OLD_STMT). */
+ vdefs = VDEF_OPS (old_stmt);
+ gcc_assert (vdefs && vdefs->next == NULL);
+
+ /* Add a VUSE operator for NEW_STMT and set its operand to the LHS
+ of the old VDEF. */
+ append_vuse ();
finalize_ssa_stmt_operands (new_stmt);
+ op = VDEF_RESULT (vdefs);
+ vuses = VUSE_OPS (new_stmt);
+ SET_USE (VUSE_OP_PTR (vuses, 0), op);
/* All uses in this fake stmt must not be in the immediate use lists. */
FOR_EACH_SSA_USE_OPERAND (use_p, new_stmt, iter, SSA_OP_ALL_USES)
@@ -2578,4 +2626,291 @@ debug_immediate_uses_for (tree var)
dump_immediate_uses_for (stderr, var);
}
+void debug_loads_and_stores (tree);
+void dump_loads_and_stores (FILE *, tree, bitmap, bitmap);
+
+
+/* Dump symbols loaded and stored by STMT to FILE. If LOADS and
+ STORES are given, the statement is not scanned. */
+
+void
+dump_loads_and_stores (FILE *file, tree stmt, bitmap loads, bitmap stores)
+{
+ unsigned i;
+ bitmap_iterator bi;
+ bool free_memory_p = false;
+
+ if (loads == NULL && stores == NULL)
+ {
+ loads = BITMAP_ALLOC (NULL);
+ stores = BITMAP_ALLOC (NULL);
+ get_loads_and_stores (stmt, loads, stores);
+ free_memory_p = true;
+ }
+
+ fprintf (file, "\n\n");
+ print_generic_stmt (file, stmt, TDF_VOPS);
+ fprintf (file, "\tLOADS: { ");
+ EXECUTE_IF_SET_IN_BITMAP (loads, 0, i, bi)
+ {
+ print_generic_expr (file, referenced_var (i), 0);
+ fprintf (file, " ");
+ }
+ fprintf (file, "}\n");
+
+ fprintf (file, "\tSTORES: { ");
+ EXECUTE_IF_SET_IN_BITMAP (stores, 0, i, bi)
+ {
+ print_generic_expr (file, referenced_var (i), 0);
+ fprintf (file, " ");
+ }
+ fprintf (file, "}\n");
+
+ if (free_memory_p)
+ {
+ BITMAP_FREE (loads);
+ BITMAP_FREE (stores);
+ }
+}
+
+
+/* Dump symbols loaded and stored by STMT to stderr. */
+
+void
+debug_loads_and_stores (tree stmt)
+{
+ dump_loads_and_stores (stderr, stmt, NULL, NULL);
+}
+
+
+/* Collect all the symbols loaded and stored by the arguments of PHI
+ node PHI. Store the sets in LOADS and STORES respectively. */
+
+static void
+get_loads_and_stores_for_phi (tree phi, bitmap loads, bitmap stores)
+{
+ int i;
+ tree lhs = PHI_RESULT (phi);
+
+ gcc_assert (!is_gimple_reg (lhs));
+
+ for (i = 0; i < PHI_NUM_ARGS (phi); i++)
+ {
+ tree sym, arg;
+
+ arg = PHI_ARG_DEF (phi, i);
+
+ /* Avoid infinite recursion. */
+ if (arg == lhs)
+ continue;
+
+ sym = SSA_NAME_VAR (arg);
+
+ if (sym == mem_var)
+ {
+ /* Recurse for an memory factored SSA name. */
+ get_loads_and_stores (SSA_NAME_DEF_STMT (arg), loads, stores);
+ }
+ else
+ {
+ /* Otherwise, this PHI node will both load and store the
+ underlying symbol for ARG. */
+ bitmap_set_bit (loads, DECL_UID (sym));
+ bitmap_set_bit (stores, DECL_UID (sym));
+ }
+ }
+}
+
+
+
+
+/* Add to LOADED the set of symbols loaded by STMT and add to STORED
+ the set of symbols stored by STMT. Both sets must be allocated by
+ the caller. FIXME, should allow for either set to be NULL. */
+
+void
+get_loads_and_stores (tree stmt, bitmap loads, bitmap stores)
+{
+ gcc_assert (loads && stores);
+ memset (&clobber_stats, 0, sizeof (clobber_stats));
+
+ if (TREE_CODE (stmt) != PHI_NODE)
+ {
+ gcc_assert (stmt_references_memory_p (stmt));
+
+ /* Point the internal loaded/stored sets to the ones provided. */
+ loaded_syms = loads;
+ stored_syms = stores;
+
+ /* Parse the statement. We don't really care for its operands, so
+ there's no need to initialize anything. If any operand was added
+ to the cache, it is discarded. */
+ parse_ssa_operands (stmt);
+ truncate_ssa_stmt_operands ();
+
+ /* We don't need the symbol sets anymore. */
+ loaded_syms = NULL;
+ stored_syms = NULL;
+ }
+ else
+ {
+ /* PHI nodes need special treatment. A PHI node loads/stores
+ all the symbols loaded/stored by its arguments. */
+ get_loads_and_stores_for_phi (stmt, loads, stores);
+ }
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ dump_loads_and_stores (dump_file, stmt, loads, stores);
+
+ if (dump_file && (dump_flags & TDF_STATS))
+ {
+ fprintf (dump_file, "Original clobbered vars: %d\n",
+ clobber_stats.clobbered_vars);
+ fprintf (dump_file, "Static write clobbers avoided: %d\n",
+ clobber_stats.static_write_clobbers_avoided);
+ fprintf (dump_file, "Static read clobbers avoided: %d\n",
+ clobber_stats.static_read_clobbers_avoided);
+ fprintf (dump_file, "Unescapable clobbers avoided: %d\n",
+ clobber_stats.unescapable_clobbers_avoided);
+ fprintf (dump_file, "Original read-only clobbers: %d\n",
+ clobber_stats.readonly_clobbers);
+ fprintf (dump_file, "Static read-only clobbers avoided: %d\n",
+ clobber_stats.static_readonly_clobbers_avoided);
+ }
+}
+
+
+/* Create a new change buffer for the statement pointed by STMT_P and
+ push the buffer into SCB_STACK. Each change buffer
+ records state information needed to determine what changed in the
+ statement. Mainly, this keeps track of symbols that may need to be
+ put into SSA form, SSA name replacements and other information
+ needed to keep the SSA form up to date. */
+
+void
+push_stmt_changes (tree *stmt_p)
+{
+ tree stmt;
+ scb_t buf;
+
+ stmt = *stmt_p;
+
+ /* It makes no sense to keep track of PHI nodes. */
+ if (TREE_CODE (stmt) == PHI_NODE)
+ return;
+
+ buf = xmalloc (sizeof *buf);
+ memset (buf, 0, sizeof *buf);
+
+ buf->stmt_p = stmt_p;
+
+ if (stmt_references_memory_p (stmt))
+ {
+ buf->loads = BITMAP_ALLOC (NULL);
+ buf->stores = BITMAP_ALLOC (NULL);
+ get_loads_and_stores (stmt, buf->loads, buf->stores);
+ }
+
+ VEC_safe_push (scb_t, heap, scb_stack, buf);
+}
+
+
+/* Given two sets S1 and S2, mark the symbols in S1 U S2 for renaming
+ if S1 != S2. Sets S1 and S2 may be overwritten by this function. */
+
+static void
+mark_difference_for_renaming (bitmap s1, bitmap s2)
+{
+ bitmap tmp = NULL;
+
+ if (s1 == NULL && s2 == NULL)
+ return;
+
+ if (s1 && s2 == NULL)
+ tmp = s1;
+ else if (s1 == NULL && s2)
+ tmp = s2;
+ else if (!bitmap_equal_p (s1, s2))
+ {
+ tmp = s1;
+ bitmap_ior_into (tmp, s2);
+ }
+
+ if (tmp)
+ mark_set_for_renaming (tmp);
+}
+
+
+/* Pop the top SCB from SCB_STACK and act on the differences between
+ what was recorded by push_stmt_changes and the current state of
+ the statement. */
+
+void
+pop_stmt_changes (tree *stmt_p)
+{
+ tree op, stmt;
+ ssa_op_iter iter;
+ bitmap loads, stores;
+ scb_t buf;
+
+ stmt = *stmt_p;
+
+ /* It makes no sense to keep track of PHI nodes. */
+ if (TREE_CODE (stmt) == PHI_NODE)
+ return;
+
+ buf = VEC_pop (scb_t, scb_stack);
+ gcc_assert (stmt_p == buf->stmt_p);
+
+ /* Force an operand re-scan on the statement and mark any newly
+ exposed variables. */
+ update_stmt (stmt);
+
+ /* Determine whether any memory symbols need to be renamed. If the
+ sets of loads and stores are different after the statement is
+ modified, then the affected symbols need to be renamed. */
+ loads = stores = NULL;
+ if (stmt_references_memory_p (stmt))
+ {
+ loads = BITMAP_ALLOC (NULL);
+ stores = BITMAP_ALLOC (NULL);
+ get_loads_and_stores (stmt, loads, stores);
+ }
+
+ /* If LOADS is different from BUF->LOADS, the affected
+ symbols need to be marked for renaming. */
+ mark_difference_for_renaming (loads, buf->loads);
+
+ /* Similarly for STORES And BUF->STORES. */
+ mark_difference_for_renaming (stores, buf->stores);
+
+ /* Mark all the GIMPLE register operands for renaming. */
+ FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF|SSA_OP_USE)
+ if (DECL_P (op))
+ mark_sym_for_renaming (op);
+
+ /* FIXME, need to add more finalizers here. Cleanup EH info,
+ recompute invariants for address expressions, add
+ SSA replacement mappings, etc. For instance, given
+ testsuite/gcc.c-torture/compile/pr16808.c, we fold a statement of
+ the form:
+
+ # SMT.4_20 = VDEF <SMT.4_16>
+ D.1576_11 = 1.0e+0;
+
+ So, the VDEF will disappear, but instead of marking SMT.4 for
+ renaming it would be far more efficient to establish a
+ replacement mapping that would replace every reference of
+ SMT.4_20 with SMT.4_16. */
+
+ /* Free memory used by the buffer. */
+ BITMAP_FREE (buf->loads);
+ BITMAP_FREE (buf->stores);
+ buf->stmt_p = NULL;
+ free (buf);
+
+ BITMAP_FREE (loads);
+ BITMAP_FREE (stores);
+}
+
#include "gt-tree-ssa-operands.h"
diff --git a/gcc/tree-ssa-operands.h b/gcc/tree-ssa-operands.h
index 1f739951e33..9deee154f4e 100644
--- a/gcc/tree-ssa-operands.h
+++ b/gcc/tree-ssa-operands.h
@@ -104,7 +104,7 @@ struct vuse_optype_d
typedef struct vuse_optype_d *vuse_optype_p;
-#define SSA_OPERAND_MEMORY_SIZE (2048 - sizeof (void *))
+#define SSA_OPERAND_MEMORY_SIZE (102400 - sizeof (void *))
struct ssa_operand_memory_d GTY((chain_next("%h.next")))
{
@@ -192,6 +192,9 @@ extern void debug_immediate_uses_for (tree var);
extern bool ssa_operands_active (void);
extern void add_to_addressable_set (tree, bitmap *);
+extern void get_loads_and_stores (tree, bitmap, bitmap);
+extern void push_stmt_changes (tree *);
+extern void pop_stmt_changes (tree *);
enum ssa_op_iter_type {
ssa_op_iter_none = 0,
@@ -200,6 +203,7 @@ enum ssa_op_iter_type {
ssa_op_iter_def,
ssa_op_iter_vdef
};
+
/* This structure is used in the operand iterator loops. It contains the
items required to determine which operand is retrieved next. During
optimization, this structure is scalarized, and any unused fields are
diff --git a/gcc/tree-ssa-pre.c b/gcc/tree-ssa-pre.c
index 6a82acbe22f..47007032cc5 100644
--- a/gcc/tree-ssa-pre.c
+++ b/gcc/tree-ssa-pre.c
@@ -1863,8 +1863,14 @@ compute_vuse_representatives (void)
}
FOR_EACH_PHI_ARG (usep, phi, iter, SSA_OP_ALL_USES)
{
+ bitmap usebitmap;
+
tree use = USE_FROM_PTR (usep);
- bitmap usebitmap = get_representative (vuse_names,
+
+ if (is_gimple_min_invariant (use))
+ continue;
+
+ usebitmap = get_representative (vuse_names,
SSA_NAME_VERSION (use));
if (usebitmap != NULL)
{
@@ -2405,7 +2411,6 @@ create_expression_by_pieces (basic_block block, tree expr, tree stmts)
vn_add (forcedname, val);
bitmap_value_replace_in_set (NEW_SETS (block), forcedname);
bitmap_value_replace_in_set (AVAIL_OUT (block), forcedname);
- mark_new_vars_to_rename (stmt);
}
tsi = tsi_last (stmts);
tsi_link_after (&tsi, forced_stmts, TSI_CONTINUE_LINKING);
@@ -2433,7 +2438,9 @@ create_expression_by_pieces (basic_block block, tree expr, tree stmts)
tsi = tsi_last (stmts);
tsi_link_after (&tsi, newexpr, TSI_CONTINUE_LINKING);
VEC_safe_push (tree, heap, inserted_exprs, newexpr);
- mark_new_vars_to_rename (newexpr);
+
+ /* All the symbols in NEWEXPR should be put into SSA form. */
+ mark_symbols_for_renaming (newexpr);
/* Add a value handle to the temporary.
The value may already exist in either NEW_SETS, or AVAIL_OUT, because
@@ -2515,29 +2522,6 @@ insert_into_preds_of_block (basic_block block, value_set_node_t node,
if (can_PRE_operation (eprime))
{
-#ifdef ENABLE_CHECKING
- tree vh;
-
- /* eprime may be an invariant. */
- vh = TREE_CODE (eprime) == VALUE_HANDLE
- ? eprime
- : get_value_handle (eprime);
-
- /* ensure that the virtual uses we need reach our block. */
- if (TREE_CODE (vh) == VALUE_HANDLE)
- {
- int i;
- tree vuse;
- for (i = 0;
- VEC_iterate (tree, VALUE_HANDLE_VUSES (vh), i, vuse);
- i++)
- {
- size_t id = SSA_NAME_VERSION (vuse);
- gcc_assert (bitmap_bit_p (RVUSE_OUT (bprime), id)
- || IS_EMPTY_STMT (SSA_NAME_DEF_STMT (vuse)));
- }
- }
-#endif
builtexpr = create_expression_by_pieces (bprime,
eprime,
stmts);
@@ -3936,7 +3920,8 @@ do_pre (void)
static bool
gate_pre (void)
{
- return flag_tree_pre != 0;
+ return false;
+ /*return flag_tree_pre != 0;*/
}
struct tree_opt_pass pass_pre =
diff --git a/gcc/tree-ssa-propagate.c b/gcc/tree-ssa-propagate.c
index 2e3089f8da9..5801f52900b 100644
--- a/gcc/tree-ssa-propagate.c
+++ b/gcc/tree-ssa-propagate.c
@@ -1115,6 +1115,9 @@ substitute_and_fold (prop_value_t *prop_value, bool use_ranges_p)
&& TREE_CODE (TREE_OPERAND (stmt, 1)) == ASSERT_EXPR)
continue;
+ /* Record the state of the statement before replacements. */
+ push_stmt_changes (bsi_stmt_ptr (i));
+
/* Replace the statement with its folded version and mark it
folded. */
did_replace = false;
@@ -1150,10 +1153,6 @@ substitute_and_fold (prop_value_t *prop_value, bool use_ranges_p)
fold_stmt (bsi_stmt_ptr (i));
stmt = bsi_stmt (i);
- /* If we folded a builtin function, we'll likely
- need to rename VDEFs. */
- mark_new_vars_to_rename (stmt);
-
/* If we cleaned up EH information from the statement,
remove EH edges. */
if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
@@ -1182,6 +1181,8 @@ substitute_and_fold (prop_value_t *prop_value, bool use_ranges_p)
if (use_ranges_p)
simplify_stmt_using_ranges (stmt);
+ /* Determine what needs to be done to update the SSA form. */
+ pop_stmt_changes (bsi_stmt_ptr (i));
}
}
diff --git a/gcc/tree-ssa-structalias.c b/gcc/tree-ssa-structalias.c
index 9bc0369e69f..d9b90d5fb52 100644
--- a/gcc/tree-ssa-structalias.c
+++ b/gcc/tree-ssa-structalias.c
@@ -3141,7 +3141,8 @@ update_alias_info (tree stmt, struct alias_info *ai)
/* Update reference counter for definitions to any
potentially aliased variable. This is used in the alias
- grouping heuristics. */
+ grouping heuristics. FIXME, not needed anymore? There are no
+ more alias grouping heuristics. */
FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_DEF)
{
tree var = SSA_NAME_VAR (op);
@@ -3153,10 +3154,14 @@ update_alias_info (tree stmt, struct alias_info *ai)
}
/* Mark variables in VDEF operands as being written to. */
- FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_VIRTUAL_DEFS)
+ if (stmt_references_memory_p (stmt))
{
- tree var = DECL_P (op) ? op : SSA_NAME_VAR (op);
- bitmap_set_bit (ai->written_vars, DECL_UID (var));
+ bitmap loads = BITMAP_ALLOC (NULL);
+ bitmap stores = BITMAP_ALLOC (NULL);
+ get_loads_and_stores (stmt, loads, stores);
+ bitmap_ior_into (ai->written_vars, stores);
+ BITMAP_FREE (loads);
+ BITMAP_FREE (stores);
}
}
diff --git a/gcc/tree-ssa.c b/gcc/tree-ssa.c
index d8b10b42a5b..b20f60032a6 100644
--- a/gcc/tree-ssa.c
+++ b/gcc/tree-ssa.c
@@ -742,7 +742,7 @@ verify_ssa (bool check_modified_stmt)
if (check_modified_stmt && stmt_modified_p (stmt))
{
- error ("stmt (%p) marked modified after optimization pass : ",
+ error ("stmt (%p) marked modified after optimization pass: ",
(void *)stmt);
print_generic_stmt (stderr, stmt, TDF_VOPS);
goto err;
@@ -757,7 +757,9 @@ verify_ssa (bool check_modified_stmt)
base_address = get_base_address (lhs);
if (base_address
+ && aliases_computed_p
&& SSA_VAR_P (base_address)
+ && !stmt_ann (stmt)->has_volatile_ops
&& ZERO_SSA_OPERANDS (stmt, SSA_OP_VDEF))
{
error ("statement makes a memory store, but has no VDEFS");
@@ -821,6 +823,27 @@ int_tree_map_hash (const void *item)
}
+/* Create .MEM, an artificial variable to represent load and store
+ operations to "memory". This includes: global variables, pointer
+ dereferences and aliased variables (i.e., anything for which
+ is_gimple_reg returns false). */
+
+static void
+create_mem_var (void)
+{
+ mem_var = build_decl (VAR_DECL, get_identifier (".MEM"), void_type_node);
+ DECL_ARTIFICIAL (mem_var) = 1;
+ TREE_READONLY (mem_var) = 0;
+ DECL_EXTERNAL (mem_var) = 1;
+ TREE_STATIC (mem_var) = 1;
+ TREE_USED (mem_var) = 1;
+ DECL_CONTEXT (mem_var) = NULL_TREE;
+ TREE_THIS_VOLATILE (mem_var) = 0;
+ TREE_ADDRESSABLE (mem_var) = 0;
+ create_var_ann (mem_var);
+}
+
+
/* Initialize global DFA and SSA structures. */
void
@@ -834,7 +857,8 @@ init_tree_ssa (void)
init_alias_heapvars ();
init_ssanames ();
init_phinodes ();
- global_var = NULL_TREE;
+ if (mem_var == NULL_TREE)
+ create_mem_var ();
aliases_computed_p = false;
}
@@ -883,21 +907,22 @@ delete_tree_ssa (void)
ggc_free (var->common.ann);
var->common.ann = NULL;
}
+
htab_delete (referenced_vars);
referenced_vars = NULL;
fini_ssanames ();
fini_phinodes ();
- global_var = NULL_TREE;
-
htab_delete (default_defs);
+ default_defs = NULL;
+
BITMAP_FREE (call_clobbered_vars);
- call_clobbered_vars = NULL;
BITMAP_FREE (addressable_vars);
- addressable_vars = NULL;
+
modified_noreturn_calls = NULL;
aliases_computed_p = false;
+
delete_alias_heapvars ();
gcc_assert (!need_ssa_update_p ());
}
@@ -1006,7 +1031,8 @@ tree_ssa_useless_type_conversion (tree expr)
return false;
}
-/* Returns true if statement STMT may read memory. */
+
+/* Returns true if statement STMT may access memory. */
bool
stmt_references_memory_p (tree stmt)
@@ -1019,6 +1045,7 @@ stmt_references_memory_p (tree stmt)
return (!ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS));
}
+
/* Internal helper for walk_use_def_chains. VAR, FN and DATA are as
described in walk_use_def_chains.
@@ -1305,4 +1332,3 @@ struct tree_opt_pass pass_late_warn_uninitialized =
0, /* todo_flags_finish */
0 /* letter */
};
-
diff --git a/gcc/tree-ssanames.c b/gcc/tree-ssanames.c
index 5077808947b..8ef4c44c94e 100644
--- a/gcc/tree-ssanames.c
+++ b/gcc/tree-ssanames.c
@@ -304,6 +304,7 @@ release_defs (tree stmt)
void
replace_ssa_name_symbol (tree ssa_name, tree sym)
{
+ gcc_assert (sym != mem_var && SSA_NAME_VAR (ssa_name) != mem_var);
SSA_NAME_VAR (ssa_name) = sym;
TREE_TYPE (ssa_name) = TREE_TYPE (sym);
}
diff --git a/gcc/tree-stdarg.c b/gcc/tree-stdarg.c
index 4a67bc850f6..110c9f164b4 100644
--- a/gcc/tree-stdarg.c
+++ b/gcc/tree-stdarg.c
@@ -506,7 +506,9 @@ static bool
check_all_va_list_escapes (struct stdarg_info *si)
{
basic_block bb;
+ bitmap loads, stores;
+ loads = stores = NULL;
FOR_EACH_BB (bb)
{
block_stmt_iterator i;
@@ -518,8 +520,26 @@ check_all_va_list_escapes (struct stdarg_info *si)
FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_ALL_USES)
{
- if (! bitmap_bit_p (si->va_list_escape_vars,
- DECL_UID (SSA_NAME_VAR (use))))
+ tree sym = SSA_NAME_VAR (use);
+
+ if (sym == mem_var)
+ {
+ if (loads == NULL)
+ {
+ loads = BITMAP_ALLOC (NULL);
+ stores = BITMAP_ALLOC (NULL);
+ }
+ else
+ {
+ bitmap_clear (loads);
+ bitmap_clear (stores);
+ }
+
+ get_loads_and_stores (stmt, loads, stores);
+ if (!bitmap_intersect_p (si->va_list_escape_vars, loads))
+ continue;
+ }
+ else if (!bitmap_bit_p (si->va_list_escape_vars, DECL_UID (sym)))
continue;
if (TREE_CODE (stmt) == MODIFY_EXPR)
@@ -581,11 +601,16 @@ check_all_va_list_escapes (struct stdarg_info *si)
print_generic_expr (dump_file, stmt, dump_flags);
fputc ('\n', dump_file);
}
+
+ BITMAP_FREE (loads);
+ BITMAP_FREE (stores);
return true;
}
}
}
+ BITMAP_FREE (loads);
+ BITMAP_FREE (stores);
return false;
}
diff --git a/gcc/tree-vect-transform.c b/gcc/tree-vect-transform.c
index d0d57c8d4b2..1184aa5da2e 100644
--- a/gcc/tree-vect-transform.c
+++ b/gcc/tree-vect-transform.c
@@ -1669,6 +1669,7 @@ vectorizable_store (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt)
ssa_op_iter iter;
tree def, def_stmt;
enum vect_def_type dt;
+ bitmap loads, stores;
/* Is vectorizable store? */
@@ -1733,14 +1734,15 @@ vectorizable_store (tree stmt, block_stmt_iterator *bsi, tree *vec_stmt)
copy_virtual_operands (*vec_stmt, stmt);
FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_VDEF)
- {
- SSA_NAME_DEF_STMT (def) = *vec_stmt;
-
- /* If this virtual def has a use outside the loop and a loop peel is
- performed then the def may be renamed by the peel. Mark it for
- renaming so the later use will also be renamed. */
- mark_sym_for_renaming (SSA_NAME_VAR (def));
- }
+ SSA_NAME_DEF_STMT (def) = *vec_stmt;
+
+ loads = BITMAP_ALLOC (NULL);
+ stores = BITMAP_ALLOC (NULL);
+ get_loads_and_stores (stmt, loads, stores);
+ mark_set_for_renaming (loads);
+ mark_set_for_renaming (stores);
+ BITMAP_FREE (loads);
+ BITMAP_FREE (stores);
return true;
}
diff --git a/gcc/tree-vrp.c b/gcc/tree-vrp.c
index 3ac01fb70f3..5d00de9c77e 100644
--- a/gcc/tree-vrp.c
+++ b/gcc/tree-vrp.c
@@ -4698,13 +4698,13 @@ struct tree_opt_pass pass_vrp =
TV_TREE_VRP, /* tv_id */
PROP_ssa | PROP_alias, /* properties_required */
0, /* properties_provided */
- PROP_smt_usage, /* properties_destroyed */
+ 0, /* properties_destroyed */
0, /* todo_flags_start */
TODO_cleanup_cfg
| TODO_ggc_collect
| TODO_verify_ssa
| TODO_dump_func
| TODO_update_ssa
- | TODO_update_smt_usage, /* todo_flags_finish */
+ | TODO_update_smt_usage, /* todo_flags_finish */
0 /* letter */
};
diff --git a/gcc/tree.c b/gcc/tree.c
index c369d604e47..89c9d7fd5b3 100644
--- a/gcc/tree.c
+++ b/gcc/tree.c
@@ -6878,8 +6878,10 @@ is_global_var (tree t)
bool
needs_to_live_in_memory (tree t)
{
- return (TREE_ADDRESSABLE (t)
- || is_global_var (t)
+ if (TREE_CODE (t) == SSA_NAME)
+ t = SSA_NAME_VAR (t);
+
+ return (may_be_aliased (t)
|| (TREE_CODE (t) == RESULT_DECL
&& aggregate_value_p (t, current_function_decl)));
}
diff --git a/gcc/tree.h b/gcc/tree.h
index aab46a20e90..76777f0f750 100644
--- a/gcc/tree.h
+++ b/gcc/tree.h
@@ -2328,17 +2328,11 @@ struct tree_memory_tag GTY(())
{
struct tree_decl_minimal common;
unsigned int is_global:1;
- unsigned int is_used_alone:1;
unsigned int old_used_alone:1;
};
#define MTAG_GLOBAL(NODE) (TREE_MEMORY_TAG_CHECK (NODE)->mtag.is_global)
-/* This flag is true if a SMT is used as the VDEF or VUSE operand
- directly, because the access had all of the SMT's aliases pruned
- from it. */
-#define SMT_USED_ALONE(NODE) (SYMBOL_MEMORY_TAG_CHECK (NODE)->mtag.is_used_alone)
-
/* This flag is used to temporarily store the old value of the used alone
flag when updating so we know whether to mark the symbol for
renaming. */