aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--gcc/ChangeLog.parloop109
-rw-r--r--gcc/fortran/trans-openmp.c2
-rw-r--r--gcc/gimple-low.c1
-rw-r--r--gcc/gimplify.c59
-rw-r--r--gcc/omp-low.c868
-rw-r--r--gcc/passes.c3
-rw-r--r--gcc/testsuite/g++.dg/gomp/for-10.C2
-rw-r--r--gcc/testsuite/g++.dg/gomp/for-4.C2
-rw-r--r--gcc/testsuite/g++.dg/gomp/for-5.C2
-rw-r--r--gcc/testsuite/g++.dg/gomp/for-6.C2
-rw-r--r--gcc/testsuite/g++.dg/gomp/for-7.C2
-rw-r--r--gcc/testsuite/g++.dg/gomp/for-8.C2
-rw-r--r--gcc/testsuite/g++.dg/gomp/for-9.C2
-rw-r--r--gcc/testsuite/gcc.dg/gomp/critical-1.c2
-rw-r--r--gcc/testsuite/gcc.dg/gomp/critical-3.c2
-rw-r--r--gcc/testsuite/gcc.dg/gomp/for-10.c2
-rw-r--r--gcc/testsuite/gcc.dg/gomp/for-13.c2
-rw-r--r--gcc/testsuite/gcc.dg/gomp/for-4.c2
-rw-r--r--gcc/testsuite/gcc.dg/gomp/for-5.c2
-rw-r--r--gcc/testsuite/gcc.dg/gomp/for-6.c2
-rw-r--r--gcc/testsuite/gcc.dg/gomp/for-7.c2
-rw-r--r--gcc/testsuite/gcc.dg/gomp/for-8.c2
-rw-r--r--gcc/testsuite/gcc.dg/gomp/for-9.c2
-rw-r--r--gcc/testsuite/gcc.dg/gomp/master-3.c2
-rw-r--r--gcc/testsuite/gcc.dg/gomp/ordered-1.c2
-rw-r--r--gcc/tree-cfg.c255
-rw-r--r--gcc/tree-flow-inline.h10
-rw-r--r--gcc/tree-flow.h24
-rw-r--r--gcc/tree-gimple.c1
-rw-r--r--gcc/tree-inline.c1
-rw-r--r--gcc/tree-outof-ssa.c73
-rw-r--r--gcc/tree-parloops.c55
-rw-r--r--gcc/tree-pass.h1
-rw-r--r--gcc/tree-pretty-print.c18
-rw-r--r--gcc/tree-profile.c8
-rw-r--r--gcc/tree-scalar-evolution.c3
-rw-r--r--gcc/tree-ssa-address.c19
-rw-r--r--gcc/tree-ssa-alias.c59
-rw-r--r--gcc/tree-ssa-live.c146
-rw-r--r--gcc/tree-ssa-live.h26
-rw-r--r--gcc/tree-ssa-loop-prefetch.c6
-rw-r--r--gcc/tree-ssa-operands.c59
-rw-r--r--gcc/tree-ssa-reassoc.c2
-rw-r--r--gcc/tree.def15
-rw-r--r--gcc/tree.h2
45 files changed, 1262 insertions, 601 deletions
diff --git a/gcc/ChangeLog.parloop b/gcc/ChangeLog.parloop
index ad531d1d1d2..45a13b36027 100644
--- a/gcc/ChangeLog.parloop
+++ b/gcc/ChangeLog.parloop
@@ -1,3 +1,112 @@
+2006-10-16 Zdenek Dvorak <dvorakz@suse.cz>
+
+ * tree-pretty-print.c (dump_generic_node): Handle OMP_SECTIONS_SWITCH
+ and extra arguments of OMP_SECTIONS and OMP_CONTINUE.
+ * tree.h (OMP_DIRECTIVE_P): Add OMP_SECTIONS_SWITCH.
+ (OMP_SECTIONS_CONTROL): New macro.
+ * tree-pass.h (pass_expand_omp_O0): Declare.
+ * omp-low.c (find_omp_clause): Export.
+ (copy_var_decl): New function.
+ (omp_copy_decl_2): Use copy_var_decl.
+ (build_omp_barrier): Do not gimplify the result.
+ (lower_rec_input_clauses): Gimplify result of build_omp_barrier.
+ (extract_omp_for_data, expand_parallel_call, expand_omp_parallel,
+ expand_omp_for_generic, expand_omp_for_static_nochunk,
+ expand_omp_for_static_chunk, expand_omp_for, expand_omp_sections,
+ expand_omp_single): Work on ssa form.
+ (is_combined_parallel): Adjust for the cfg changes.
+ (build_omp_regions_1): Handle OMP_SECTIONS_SWITCH.
+ (gate_expand_omp_O0, pass_expand_omp_O0): New pass structure.
+ (lower_omp_sections): Emit OMP_SECTIONS_SWITCH. Add arguments to
+ OMP_CONTINUE and OMP_SECTIONS.
+ (lower_omp_for): Regimplify OMP_CLAUSE_SCHEDULE_CHUNK_EXPR.
+ Add arguments to OMP_CONTINUE.
+ * tree-gimple.c (is_gimple_stmt): Add OMP_SECTIONS_SWITCH.
+ * tree-ssa-alias.c (is_escape_site): Handle OMP_PARALLEL.
+ (remove_phis_for): New function.
+ (setup_pointers_and_addressables): Call remove_phis_for.
+ * gimple-low.c (lower_stmt): Handle OMP_SECTIONS_SWITCH.
+ * tree-parloops.c (mark_virtual_ops_for_renaming): Moved to tree-cfg.c.
+ (mark_call_virtual_operands): Exported.
+ (extract_loop_to_function): Move some parts to go_out_of_ssa.
+ * tree.def (OMP_SECTIONS): Change number of operands to 3.
+ (OMP_SECTIONS_SWITCH): New code.
+ (OMP_CONTINUE): Change number of operands to 2.
+ * tree-inline.c (estimate_num_insns_1): Handle OMP_SECTIONS_SWITCH.
+ * tree-outof-ssa.c (go_out_of_ssa): Take new function as an argument.
+ Remove references to variables from the old function, and clear
+ their annotations.
+ (create_temp): Do not record the variable as referenced.
+ * tree-flow.h (copy_var_decl, find_omp_clause,
+ mark_virtual_ops_for_renaming, mark_call_virtual_operands): Declare.
+ (go_out_of_ssa, force_gimple_operand_bsi): Declaration changed.
+ * tree-cfg.c (make_edges): Generate more precise cfg for OMP
+ constructs.
+ (tree_redirect_edge_and_branch): Handle OMP codes.
+ (replace_by_duplicate_decl, mark_virtual_ops_in_region): New functions.
+ (new_label_mapper): Use XNEW.
+ (move_stmt_r, move_block_to_fn, move_sese_region_to_fn): Copy the
+ region to clear decls.
+ (mark_virtual_ops_for_renaming): Moved from tree-parloops.c.
+ * passes.c (init_optimization_passes): Move pass_expand_omp to
+ pass_all_optimizations. Use pass_expand_omp_O0 instead of it.
+ * tree-ssa-operands.c (get_expr_operands): Record operands of
+ OMP constructs.
+ * fortran/trans-openmp.c (gfc_trans_omp_sections): Use build3 for
+ OMP_SECTIONS.
+ * gimplify.c (gimplify_omp_for): Ensure that the control variable
+ is gimple reg.
+
+ * gimplify.c (force_gimple_operand_bsi): Add arguments to specify
+ direction of insertion.
+ * tree-scalar-evolution.c (scev_const_prop): Change arguments to
+ force_gimple_operand_bsi.
+ * tree-ssa-address.c (gimplify_mem_ref_parts, create_mem_ref): Ditto.
+ * tree-ssa-loop-prefetch.c (issue_prefetch_ref): Ditto.
+ * tree-profile.c (prepare_instrumented_value,
+ tree_gen_interval_profiler, tree_gen_pow2_profiler,
+ tree_gen_one_value_profiler): Ditto.
+ * tree-ssa-reassoc.c (negate_value): Ditto.
+
+ * tree-flow-inline.h (set_is_used): Removed.
+ * tree-ssa-live.c (var_partition_map, out_of_ssa): New variables.
+ (int_int_map_eq, int_int_map_hash, delete_var_partition_map,
+ init_var_partition_map, int_int_map_find_or_insert): New functions.
+ (change_partition_var, mark_all_vars_used, mark_all_vars_used_1,
+ remove_unused_locals, create_ssa_var_map, root_var_init): Do not
+ use variable annotations.
+ * tree-ssa-live.h (VAR_ANN_PARTITION, VAR_ANN_ROOT_INDEX): Removed.
+ (struct int_int_map, var_partition_map, out_of_ssa,
+ int_int_map_find_or_insert, init_var_partition_map,
+ delete_var_partition_map): Declare.
+ (var_to_partition): Do not use variable annotations.
+ * tree-outof-ssa.c (insert_copy_on_edge, coalesce_ssa_name,
+ assign_vars, replace_use_variable, replace_def_variable,
+ rewrite_out_of_ssa): Do not use variable annotations.
+ * tree-flow.h (struct var_ann_d): Removed out_of_ssa_tag,
+ root_var_processed, partition and root_index.
+ (set_is_used): Declaration removed.
+
+ * testsuite/gcc.dg/gomp/for-13.c,
+ testsuite/gcc.dg/gomp/critical-1.c,
+ testsuite/gcc.dg/gomp/critical-3.c,
+ testsuite/gcc.dg/gomp/ordered-1.c,
+ testsuite/gcc.dg/gomp/for-4.c,
+ testsuite/gcc.dg/gomp/for-6.c,
+ testsuite/gcc.dg/gomp/master-3.c,
+ testsuite/gcc.dg/gomp/for-8.c,
+ testsuite/gcc.dg/gomp/for-10.c,
+ testsuite/gcc.dg/gomp/for-5.c,
+ testsuite/gcc.dg/gomp/for-7.c,
+ testsuite/gcc.dg/gomp/for-9.c,
+ testsuite/g++.dg/gomp/for-4.C,
+ testsuite/g++.dg/gomp/for-5.C,
+ testsuite/g++.dg/gomp/for-6.C,
+ testsuite/g++.dg/gomp/for-7.C,
+ testsuite/g++.dg/gomp/for-8.C,
+ testsuite/g++.dg/gomp/for-9.C,
+ testsuite/g++.dg/gomp/for-10.C): Add -O1 flag.
+
2006-10-01 Zdenek Dvorak <dvorakz@suse.cz>
* tree-parloops.c (eliminate_local_variables_1): Handle addresses of
diff --git a/gcc/fortran/trans-openmp.c b/gcc/fortran/trans-openmp.c
index e817196abb7..3aae122408d 100644
--- a/gcc/fortran/trans-openmp.c
+++ b/gcc/fortran/trans-openmp.c
@@ -1200,7 +1200,7 @@ gfc_trans_omp_sections (gfc_code *code, gfc_omp_clauses *clauses)
}
stmt = gfc_finish_block (&body);
- stmt = build2_v (OMP_SECTIONS, stmt, omp_clauses);
+ stmt = build3_v (OMP_SECTIONS, stmt, omp_clauses, NULL_TREE);
gfc_add_expr_to_block (&block, stmt);
return gfc_finish_block (&block);
diff --git a/gcc/gimple-low.c b/gcc/gimple-low.c
index ff6b8b27f30..cc8ff9f34bf 100644
--- a/gcc/gimple-low.c
+++ b/gcc/gimple-low.c
@@ -214,6 +214,7 @@ lower_stmt (tree_stmt_iterator *tsi, struct lower_data *data)
case SWITCH_EXPR:
case OMP_FOR:
case OMP_SECTIONS:
+ case OMP_SECTIONS_SWITCH:
case OMP_SECTION:
case OMP_SINGLE:
case OMP_MASTER:
diff --git a/gcc/gimplify.c b/gcc/gimplify.c
index 0408816d4d7..eabf6e54d7e 100644
--- a/gcc/gimplify.c
+++ b/gcc/gimplify.c
@@ -4866,8 +4866,9 @@ gimplify_omp_parallel (tree *expr_p, tree *pre_p)
static enum gimplify_status
gimplify_omp_for (tree *expr_p, tree *pre_p)
{
- tree for_stmt, decl, t;
+ tree for_stmt, decl, var, t;
enum gimplify_status ret = 0;
+ tree body, init_decl = NULL_TREE;
for_stmt = *expr_p;
@@ -4885,12 +4886,27 @@ gimplify_omp_for (tree *expr_p, tree *pre_p)
else
omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
+ /* If DECL is not a gimple register, create a temporary variable to act as an
+ iteration counter. This is valid, since DECL cannot be modified in the
+ body of the loop. */
+ if (!is_gimple_reg (decl))
+ {
+ var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
+ TREE_OPERAND (t, 0) = var;
+
+ init_decl = build2 (MODIFY_EXPR, void_type_node, decl, var);
+ omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
+ }
+ else
+ var = decl;
+
ret |= gimplify_expr (&TREE_OPERAND (t, 1), &OMP_FOR_PRE_BODY (for_stmt),
NULL, is_gimple_val, fb_rvalue);
t = OMP_FOR_COND (for_stmt);
gcc_assert (COMPARISON_CLASS_P (t));
gcc_assert (TREE_OPERAND (t, 0) == decl);
+ TREE_OPERAND (t, 0) = var;
ret |= gimplify_expr (&TREE_OPERAND (t, 1), &OMP_FOR_PRE_BODY (for_stmt),
NULL, is_gimple_val, fb_rvalue);
@@ -4907,13 +4923,15 @@ gimplify_omp_for (tree *expr_p, tree *pre_p)
t = build_int_cst (TREE_TYPE (decl), -1);
goto build_modify;
build_modify:
- t = build2 (PLUS_EXPR, TREE_TYPE (decl), decl, t);
- t = build2 (MODIFY_EXPR, void_type_node, decl, t);
+ t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
+ t = build2 (MODIFY_EXPR, void_type_node, var, t);
OMP_FOR_INCR (for_stmt) = t;
break;
case MODIFY_EXPR:
gcc_assert (TREE_OPERAND (t, 0) == decl);
+ TREE_OPERAND (t, 0) = var;
+
t = TREE_OPERAND (t, 1);
switch (TREE_CODE (t))
{
@@ -4921,11 +4939,12 @@ gimplify_omp_for (tree *expr_p, tree *pre_p)
if (TREE_OPERAND (t, 1) == decl)
{
TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
- TREE_OPERAND (t, 0) = decl;
+ TREE_OPERAND (t, 0) = var;
break;
}
case MINUS_EXPR:
gcc_assert (TREE_OPERAND (t, 0) == decl);
+ TREE_OPERAND (t, 0) = var;
break;
default:
gcc_unreachable ();
@@ -4939,7 +4958,14 @@ gimplify_omp_for (tree *expr_p, tree *pre_p)
gcc_unreachable ();
}
- gimplify_to_stmt_list (&OMP_FOR_BODY (for_stmt));
+ body = OMP_FOR_BODY (for_stmt);
+ gimplify_to_stmt_list (&body);
+ t = alloc_stmt_list ();
+ if (init_decl)
+ append_to_statement_list (init_decl, &t);
+ append_to_statement_list (body, &t);
+ OMP_FOR_BODY (for_stmt) = t;
+
gimplify_adjust_omp_clauses (&OMP_FOR_CLAUSES (for_stmt));
return ret == GS_ALL_DONE ? GS_ALL_DONE : GS_ERROR;
@@ -6372,17 +6398,34 @@ force_gimple_operand (tree expr, tree *stmts, bool simple, tree var)
}
/* Invokes force_gimple_operand for EXPR with parameters SIMPLE_P and VAR. If
- some statements are produced, emits them before BSI. */
+ some statements are produced, emits them before BSI. If BEFORE is true.
+ the statements are appended before BSI, otherwise they are appended after
+ it. M specifies the way BSI moves after insertion (BSI_SAME_STMT or
+ BSI_CONTINUE_LINKING are the usual values). */
tree
force_gimple_operand_bsi (block_stmt_iterator *bsi, tree expr,
- bool simple_p, tree var)
+ bool simple_p, tree var, bool before,
+ enum bsi_iterator_update m)
{
tree stmts;
expr = force_gimple_operand (expr, &stmts, simple_p, var);
if (stmts)
- bsi_insert_before (bsi, stmts, BSI_SAME_STMT);
+ {
+ tree_stmt_iterator tsi;
+
+ if (in_ssa_p)
+ {
+ for (tsi = tsi_start (stmts); !tsi_end_p (tsi); tsi_next (&tsi))
+ mark_new_vars_to_rename (tsi_stmt (tsi));
+ }
+
+ if (before)
+ bsi_insert_before (bsi, stmts, m);
+ else
+ bsi_insert_after (bsi, stmts, m);
+ }
return expr;
}
diff --git a/gcc/omp-low.c b/gcc/omp-low.c
index 6ae37745b4e..6dde9eacc89 100644
--- a/gcc/omp-low.c
+++ b/gcc/omp-low.c
@@ -117,7 +117,7 @@ static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
/* Find an OpenMP clause of type KIND within CLAUSES. */
-static tree
+tree
find_omp_clause (tree clauses, enum tree_code kind)
{
for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
@@ -151,7 +151,7 @@ is_combined_parallel (struct omp_region *region)
static void
extract_omp_for_data (tree for_stmt, struct omp_for_data *fd)
{
- tree t;
+ tree t, var;
fd->for_stmt = for_stmt;
fd->pre = NULL;
@@ -159,13 +159,14 @@ extract_omp_for_data (tree for_stmt, struct omp_for_data *fd)
t = OMP_FOR_INIT (for_stmt);
gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
fd->v = TREE_OPERAND (t, 0);
- gcc_assert (DECL_P (fd->v));
+ gcc_assert (SSA_VAR_P (fd->v));
+ var = TREE_CODE (fd->v) == SSA_NAME ? SSA_NAME_VAR (fd->v) : fd->v;
gcc_assert (TREE_CODE (TREE_TYPE (fd->v)) == INTEGER_TYPE);
fd->n1 = TREE_OPERAND (t, 1);
t = OMP_FOR_COND (for_stmt);
fd->cond_code = TREE_CODE (t);
- gcc_assert (TREE_OPERAND (t, 0) == fd->v);
+ gcc_assert (TREE_OPERAND (t, 0) == var);
fd->n2 = TREE_OPERAND (t, 1);
switch (fd->cond_code)
{
@@ -188,9 +189,9 @@ extract_omp_for_data (tree for_stmt, struct omp_for_data *fd)
t = OMP_FOR_INCR (fd->for_stmt);
gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
- gcc_assert (TREE_OPERAND (t, 0) == fd->v);
+ gcc_assert (TREE_OPERAND (t, 0) == var);
t = TREE_OPERAND (t, 1);
- gcc_assert (TREE_OPERAND (t, 0) == fd->v);
+ gcc_assert (TREE_OPERAND (t, 0) == var);
switch (TREE_CODE (t))
{
case PLUS_EXPR:
@@ -347,8 +348,11 @@ get_ws_args_for (tree ws_stmt)
}
else if (TREE_CODE (ws_stmt) == OMP_SECTIONS)
{
- basic_block bb = bb_for_stmt (ws_stmt);
- t = build_int_cst (unsigned_type_node, EDGE_COUNT (bb->succs));
+ /* Number of sections is equal to the number of edges from the
+ OMP_SECTIONS_SWITCH statement, except for the one to the exit
+ of the sections region. */
+ basic_block bb = single_succ (bb_for_stmt (ws_stmt));
+ t = build_int_cst (unsigned_type_node, EDGE_COUNT (bb->succs) - 1);
t = tree_cons (NULL, t, NULL);
return t;
}
@@ -505,10 +509,10 @@ use_pointer_for_field (tree decl, bool shared_p)
return false;
}
-/* Construct a new automatic decl similar to VAR. */
+/* Create a new VAR_DECL and copy information from VAR to it. */
-static tree
-omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
+tree
+copy_var_decl (tree var, tree name, tree type)
{
tree copy = build_decl (VAR_DECL, name, type);
@@ -516,10 +520,21 @@ omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
DECL_COMPLEX_GIMPLE_REG_P (copy) = DECL_COMPLEX_GIMPLE_REG_P (var);
DECL_ARTIFICIAL (copy) = DECL_ARTIFICIAL (var);
DECL_IGNORED_P (copy) = DECL_IGNORED_P (var);
+ DECL_CONTEXT (copy) = DECL_CONTEXT (var);
TREE_USED (copy) = 1;
- DECL_CONTEXT (copy) = current_function_decl;
DECL_SEEN_IN_BIND_EXPR_P (copy) = 1;
+ return copy;
+}
+
+/* Construct a new automatic decl similar to VAR. */
+
+static tree
+omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
+{
+ tree copy = copy_var_decl (var, name, type);
+
+ DECL_CONTEXT (copy) = current_function_decl;
TREE_CHAIN (copy) = ctx->block_vars;
ctx->block_vars = copy;
@@ -1423,14 +1438,14 @@ scan_omp (tree *stmt_p, omp_context *ctx)
/* Build a call to GOMP_barrier. */
-static void
-build_omp_barrier (tree *stmt_list)
+static tree
+build_omp_barrier (void)
{
tree t;
t = built_in_decls[BUILT_IN_GOMP_BARRIER];
t = build_function_call_expr (t, NULL);
- gimplify_and_add (t, stmt_list);
+ return t;
}
/* If a context was created for STMT when it was scanned, return it. */
@@ -1828,7 +1843,7 @@ lower_rec_input_clauses (tree clauses, tree *ilist, tree *dlist,
lastprivate clauses we need to ensure the lastprivate copying
happens after firstprivate copying in all threads. */
if (copyin_by_ref || lastprivate_firstprivate)
- build_omp_barrier (ilist);
+ gimplify_and_add (build_omp_barrier (), ilist);
}
@@ -2154,12 +2169,11 @@ static void
expand_parallel_call (struct omp_region *region, basic_block bb,
tree entry_stmt, tree ws_args)
{
- tree t, args, val, cond, c, list, clauses;
+ tree t, args, val, cond, c, clauses;
block_stmt_iterator si;
int start_ix;
clauses = OMP_PARALLEL_CLAUSES (entry_stmt);
- push_gimplify_context ();
/* Determine what flavor of GOMP_parallel_start we will be
emitting. */
@@ -2205,15 +2219,28 @@ expand_parallel_call (struct omp_region *region, basic_block bb,
cond = gimple_boolify (cond);
if (integer_zerop (val))
- val = build2 (EQ_EXPR, unsigned_type_node, cond,
- build_int_cst (TREE_TYPE (cond), 0));
+ val = fold_build2 (EQ_EXPR, unsigned_type_node, cond,
+ build_int_cst (TREE_TYPE (cond), 0));
else
{
basic_block cond_bb, then_bb, else_bb;
- edge e;
- tree t, then_lab, else_lab, tmp;
+ edge e, e_then, e_else;
+ tree t, then_lab, else_lab, tmp_then, tmp_else, tmp_join, tmp_var;
+
+ tmp_var = create_tmp_var (TREE_TYPE (val), NULL);
+ if (in_ssa_p)
+ {
+ tmp_then = make_ssa_name (tmp_var, NULL_TREE);
+ tmp_else = make_ssa_name (tmp_var, NULL_TREE);
+ tmp_join = make_ssa_name (tmp_var, NULL_TREE);
+ }
+ else
+ {
+ tmp_then = tmp_var;
+ tmp_else = tmp_var;
+ tmp_join = tmp_var;
+ }
- tmp = create_tmp_var (TREE_TYPE (val), NULL);
e = split_block (bb, NULL);
cond_bb = e->src;
bb = e->dest;
@@ -2237,31 +2264,43 @@ expand_parallel_call (struct omp_region *region, basic_block bb,
si = bsi_start (then_bb);
t = build1 (LABEL_EXPR, void_type_node, then_lab);
bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
- t = build2 (MODIFY_EXPR, void_type_node, tmp, val);
+ t = build2 (MODIFY_EXPR, void_type_node, tmp_then, val);
bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
+ if (in_ssa_p)
+ SSA_NAME_DEF_STMT (tmp_then) = t;
si = bsi_start (else_bb);
t = build1 (LABEL_EXPR, void_type_node, else_lab);
bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
- t = build2 (MODIFY_EXPR, void_type_node, tmp,
+ t = build2 (MODIFY_EXPR, void_type_node, tmp_else,
build_int_cst (unsigned_type_node, 1));
bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
+ if (in_ssa_p)
+ SSA_NAME_DEF_STMT (tmp_else) = t;
make_edge (cond_bb, then_bb, EDGE_TRUE_VALUE);
make_edge (cond_bb, else_bb, EDGE_FALSE_VALUE);
- make_edge (then_bb, bb, EDGE_FALLTHRU);
- make_edge (else_bb, bb, EDGE_FALLTHRU);
+ e_then = make_edge (then_bb, bb, EDGE_FALLTHRU);
+ e_else = make_edge (else_bb, bb, EDGE_FALLTHRU);
- val = tmp;
+ if (in_ssa_p)
+ {
+ tree phi = create_phi_node (tmp_join, bb);
+ SSA_NAME_DEF_STMT (tmp_join) = phi;
+ add_phi_arg (phi, tmp_then, e_then);
+ add_phi_arg (phi, tmp_else, e_else);
+ }
+
+ val = tmp_join;
}
- list = NULL_TREE;
- val = get_formal_tmp_var (val, &list);
si = bsi_start (bb);
- bsi_insert_after (&si, list, BSI_CONTINUE_LINKING);
+ val = force_gimple_operand_bsi (&si, val, true, NULL_TREE,
+ false, BSI_CONTINUE_LINKING);
}
- list = NULL_TREE;
+ si = bsi_last (bb);
+
args = tree_cons (NULL, val, NULL);
t = OMP_PARALLEL_DATA_ARG (entry_stmt);
if (t == NULL)
@@ -2277,25 +2316,24 @@ expand_parallel_call (struct omp_region *region, basic_block bb,
t = built_in_decls[start_ix];
t = build_function_call_expr (t, args);
- gimplify_and_add (t, &list);
+ force_gimple_operand_bsi (&si, t, true, NULL_TREE,
+ false, BSI_CONTINUE_LINKING);
t = OMP_PARALLEL_DATA_ARG (entry_stmt);
if (t == NULL)
t = null_pointer_node;
else
t = build_fold_addr_expr (t);
+
args = tree_cons (NULL, t, NULL);
t = build_function_call_expr (OMP_PARALLEL_FN (entry_stmt), args);
- gimplify_and_add (t, &list);
+ force_gimple_operand_bsi (&si, t, true, NULL_TREE,
+ false, BSI_CONTINUE_LINKING);
t = built_in_decls[BUILT_IN_GOMP_PARALLEL_END];
t = build_function_call_expr (t, NULL);
- gimplify_and_add (t, &list);
-
- si = bsi_last (bb);
- bsi_insert_after (&si, list, BSI_CONTINUE_LINKING);
-
- pop_gimplify_context (NULL_TREE);
+ force_gimple_operand_bsi (&si, t, true, NULL_TREE,
+ false, BSI_CONTINUE_LINKING);
}
@@ -2474,6 +2512,7 @@ expand_omp_parallel (struct omp_region *region)
{
basic_block entry_succ_bb = single_succ (entry_bb);
block_stmt_iterator si;
+ tree parcopy_stmt = NULL_TREE, arg, narg;
for (si = bsi_start (entry_succ_bb); ; bsi_next (&si))
{
@@ -2490,13 +2529,32 @@ expand_omp_parallel (struct omp_region *region)
&& TREE_OPERAND (arg, 0)
== OMP_PARALLEL_DATA_ARG (entry_stmt))
{
- if (TREE_OPERAND (stmt, 0) == DECL_ARGUMENTS (child_fn))
- bsi_remove (&si, true);
- else
- TREE_OPERAND (stmt, 1) = DECL_ARGUMENTS (child_fn);
+ parcopy_stmt = stmt;
break;
}
}
+
+ gcc_assert (parcopy_stmt != NULL_TREE);
+ arg = DECL_ARGUMENTS (child_fn);
+
+ if (!in_ssa_p)
+ {
+ if (TREE_OPERAND (parcopy_stmt, 0) == arg)
+ bsi_remove (&si, true);
+ else
+ TREE_OPERAND (parcopy_stmt, 1) = arg;
+ }
+ else
+ {
+ /* If we are in ssa form, we must load the value from the default
+ definition of the argument. That should not be defined now,
+ since the argument is not used uninitialized. */
+ gcc_assert (default_def (arg) == NULL);
+ narg = make_ssa_name (arg, build_empty_stmt ());
+ set_default_def (arg, narg);
+ TREE_OPERAND (parcopy_stmt, 1) = narg;
+ update_stmt (parcopy_stmt);
+ }
}
/* Declare local variables needed in CHILD_CFUN. */
@@ -2504,10 +2562,7 @@ expand_omp_parallel (struct omp_region *region)
BLOCK_VARS (block) = list2chain (child_cfun->unexpanded_var_list);
DECL_SAVED_TREE (child_fn) = single_succ (entry_bb)->stmt_list;
- /* Reset DECL_CONTEXT on locals and function arguments. */
- for (t = BLOCK_VARS (block); t; t = TREE_CHAIN (t))
- DECL_CONTEXT (t) = child_fn;
-
+ /* Reset DECL_CONTEXT on function arguments. */
for (t = DECL_ARGUMENTS (child_fn); t; t = TREE_CHAIN (t))
DECL_CONTEXT (t) = child_fn;
@@ -2521,12 +2576,6 @@ expand_omp_parallel (struct omp_region *region)
entry_bb = e->dest;
single_succ_edge (entry_bb)->flags = EDGE_FALLTHRU;
- /* Move the parallel region into CHILD_CFUN. */
- new_bb = move_sese_region_to_fn (child_cfun, entry_bb, exit_bb);
- if (exit_bb)
- single_succ_edge (new_bb)->flags = EDGE_FALLTHRU;
- cgraph_add_new_function (child_fn);
-
/* Convert OMP_RETURN into a RETURN_EXPR. */
if (exit_bb)
{
@@ -2537,12 +2586,25 @@ expand_omp_parallel (struct omp_region *region)
bsi_insert_after (&si, t, TSI_SAME_STMT);
bsi_remove (&si, true);
}
+
+ /* Move the parallel region into CHILD_CFUN. */
+ new_bb = move_sese_region_to_fn (child_cfun, entry_bb, exit_bb);
+ if (exit_bb)
+ single_succ_edge (new_bb)->flags = EDGE_FALLTHRU;
+
+ /* Rewrite the child function out of ssa, and inform the callgraph about
+ the new function. */
+ if (in_ssa_p)
+ go_out_of_ssa (child_fn);
+ cgraph_add_new_function (child_fn);
}
/* Emit a library call to launch the children threads. */
expand_parallel_call (region, new_bb, entry_stmt, ws_args);
-}
+ /* Fix up the ssa form. */
+ update_ssa (TODO_update_ssa_only_virtuals);
+}
/* A subroutine of expand_omp_for. Generate code for a parallel
loop with any schedule. Given parameters:
@@ -2565,7 +2627,7 @@ expand_omp_parallel (struct omp_region *region)
L3:
If this is a combined omp parallel loop, instead of the call to
- GOMP_loop_foo_start, we emit 'goto L3'. */
+ GOMP_loop_foo_start, we emit 'goto L2'. */
static void
expand_omp_for_generic (struct omp_region *region,
@@ -2575,8 +2637,8 @@ expand_omp_for_generic (struct omp_region *region,
{
tree l0, l1, l2, l3;
tree type, istart0, iend0, iend;
- tree t, args, list;
- basic_block entry_bb, cont_bb, exit_bb, l0_bb, l1_bb, l2_bb, l3_bb;
+ tree t, args, vmain, vback;
+ basic_block entry_bb, cont_bb, l0_bb, l1_bb, l2_bb, l3_bb, exit_bb;
block_stmt_iterator si;
bool in_combined_parallel = is_combined_parallel (region);
@@ -2584,16 +2646,26 @@ expand_omp_for_generic (struct omp_region *region,
istart0 = create_tmp_var (long_integer_type_node, ".istart0");
iend0 = create_tmp_var (long_integer_type_node, ".iend0");
- iend = create_tmp_var (type, NULL);
TREE_ADDRESSABLE (istart0) = 1;
TREE_ADDRESSABLE (iend0) = 1;
+ if (in_ssa_p)
+ {
+ add_referenced_var (istart0);
+ add_referenced_var (iend0);
+ mark_call_clobbered (istart0, ESCAPE_TO_CALL);
+ mark_call_clobbered (iend0, ESCAPE_TO_CALL);
+ }
entry_bb = region->entry;
- l0_bb = create_empty_bb (entry_bb);
- l1_bb = single_succ (entry_bb);
cont_bb = region->cont;
+ gcc_assert (EDGE_COUNT (entry_bb->succs) == 2);
+ gcc_assert (BRANCH_EDGE (entry_bb)->dest == FALLTHRU_EDGE (cont_bb)->dest);
+ l0_bb = split_edge (FALLTHRU_EDGE (entry_bb));
+ l1_bb = single_succ (l0_bb);
l2_bb = create_empty_bb (cont_bb);
- l3_bb = single_succ (cont_bb);
+ gcc_assert (BRANCH_EDGE (cont_bb)->dest == l1_bb);
+ gcc_assert (EDGE_COUNT (cont_bb->succs) == 2);
+ l3_bb = FALLTHRU_EDGE (cont_bb)->dest;
exit_bb = region->exit;
l0 = tree_block_label (l0_bb);
@@ -2607,7 +2679,7 @@ expand_omp_for_generic (struct omp_region *region,
{
/* If this is not a combined parallel loop, emit a call to
GOMP_loop_foo_start in ENTRY_BB. */
- list = alloc_stmt_list ();
+
t = build_fold_addr_expr (iend0);
args = tree_cons (NULL, t, NULL);
t = build_fold_addr_expr (istart0);
@@ -2624,63 +2696,70 @@ expand_omp_for_generic (struct omp_region *region,
t = fold_convert (long_integer_type_node, fd->n1);
args = tree_cons (NULL, t, args);
t = build_function_call_expr (built_in_decls[start_fn], args);
- t = get_formal_tmp_var (t, &list);
+ t = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
+ true, BSI_SAME_STMT);
+
t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&l0),
build_and_jump (&l3));
- append_to_statement_list (t, &list);
- bsi_insert_after (&si, list, BSI_SAME_STMT);
+ bsi_insert_after (&si, t, BSI_SAME_STMT);
}
bsi_remove (&si, true);
/* Iteration setup for sequential loop goes in L0_BB. */
- list = alloc_stmt_list ();
+ si = bsi_start (l0_bb);
t = fold_convert (type, istart0);
+ t = force_gimple_operand_bsi (&si, t, false, NULL_TREE,
+ false, BSI_CONTINUE_LINKING);
t = build2 (MODIFY_EXPR, void_type_node, fd->v, t);
- gimplify_and_add (t, &list);
+ bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
+ if (in_ssa_p)
+ {
+ SSA_NAME_DEF_STMT (fd->v) = t;
+ mark_new_vars_to_rename (t);
+ }
t = fold_convert (type, iend0);
- t = build2 (MODIFY_EXPR, void_type_node, iend, t);
- gimplify_and_add (t, &list);
-
- si = bsi_start (l0_bb);
- bsi_insert_after (&si, list, BSI_CONTINUE_LINKING);
+ iend = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
+ false, BSI_CONTINUE_LINKING);
/* Code to control the increment and predicate for the sequential
loop goes in the first half of EXIT_BB (we split EXIT_BB so
that we can inherit all the edges going out of the loop
body). */
- list = alloc_stmt_list ();
-
- t = build2 (PLUS_EXPR, type, fd->v, fd->step);
- t = build2 (MODIFY_EXPR, void_type_node, fd->v, t);
- gimplify_and_add (t, &list);
-
- t = build2 (fd->cond_code, boolean_type_node, fd->v, iend);
- t = get_formal_tmp_var (t, &list);
+ si = bsi_last (cont_bb);
+ t = bsi_stmt (si);
+ gcc_assert (TREE_CODE (t) == OMP_CONTINUE);
+ vmain = TREE_OPERAND (t, 1);
+ vback = TREE_OPERAND (t, 0);
+
+ t = fold_build2 (PLUS_EXPR, type, vmain, fd->step);
+ t = force_gimple_operand_bsi (&si, t, false, NULL_TREE,
+ true, BSI_SAME_STMT);
+ t = build2 (MODIFY_EXPR, void_type_node, vback, t);
+ bsi_insert_before (&si, t, BSI_SAME_STMT);
+ if (in_ssa_p)
+ SSA_NAME_DEF_STMT (vback) = t;
+
+ t = build2 (fd->cond_code, boolean_type_node, vback, iend);
t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&l1),
build_and_jump (&l2));
- append_to_statement_list (t, &list);
+ bsi_insert_before (&si, t, BSI_SAME_STMT);
- si = bsi_last (cont_bb);
- bsi_insert_after (&si, list, BSI_SAME_STMT);
- gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_CONTINUE);
+ /* Remove OMP_CONTINUE. */
bsi_remove (&si, true);
/* Emit code to get the next parallel iteration in L2_BB. */
- list = alloc_stmt_list ();
-
+ si = bsi_start (l2_bb);
t = build_fold_addr_expr (iend0);
args = tree_cons (NULL, t, NULL);
t = build_fold_addr_expr (istart0);
args = tree_cons (NULL, t, args);
t = build_function_call_expr (built_in_decls[next_fn], args);
- t = get_formal_tmp_var (t, &list);
+ t = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
+ false, BSI_CONTINUE_LINKING);
t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&l0),
build_and_jump (&l3));
- append_to_statement_list (t, &list);
-
- si = bsi_start (l2_bb);
- bsi_insert_after (&si, list, BSI_CONTINUE_LINKING);
+ bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
/* Add the loop cleanup function. */
si = bsi_last (exit_bb);
@@ -2690,22 +2769,24 @@ expand_omp_for_generic (struct omp_region *region,
t = built_in_decls[BUILT_IN_GOMP_LOOP_END];
t = build_function_call_expr (t, NULL);
bsi_insert_after (&si, t, BSI_SAME_STMT);
+ if (in_ssa_p)
+ mark_new_vars_to_rename (t);
bsi_remove (&si, true);
/* Connect the new blocks. */
- remove_edge (single_succ_edge (entry_bb));
if (in_combined_parallel)
- make_edge (entry_bb, l2_bb, EDGE_FALLTHRU);
+ {
+ remove_edge (BRANCH_EDGE (entry_bb));
+ redirect_edge_and_branch (single_succ_edge (entry_bb), l2_bb);
+ }
else
{
- make_edge (entry_bb, l0_bb, EDGE_TRUE_VALUE);
- make_edge (entry_bb, l3_bb, EDGE_FALSE_VALUE);
+ find_edge (entry_bb, l0_bb)->flags = EDGE_TRUE_VALUE;
+ find_edge (entry_bb, l3_bb)->flags = EDGE_FALSE_VALUE;
}
- make_edge (l0_bb, l1_bb, EDGE_FALLTHRU);
-
- remove_edge (single_succ_edge (cont_bb));
- make_edge (cont_bb, l1_bb, EDGE_TRUE_VALUE);
+ find_edge (cont_bb, l1_bb)->flags = EDGE_TRUE_VALUE;
+ remove_edge (find_edge (cont_bb, l3_bb));
make_edge (cont_bb, l2_bb, EDGE_FALSE_VALUE);
make_edge (l2_bb, l0_bb, EDGE_TRUE_VALUE);
@@ -2719,6 +2800,17 @@ expand_omp_for_generic (struct omp_region *region,
recount_dominator (CDI_DOMINATORS, l0_bb));
set_immediate_dominator (CDI_DOMINATORS, l1_bb,
recount_dominator (CDI_DOMINATORS, l1_bb));
+
+ /* FIXME: We marked ISTART0 and IEND0 as call clobbered. The problem
+ is that this causes them to appear appear on each call, making it
+ impossible to get virtual ssa form correct for them without calling
+ mark_new_vars_to_rename for each call in the function. This is annoying
+ and potentially slow. Also, in case OMP_PARALLEL is in this function
+ around the OMP_FOR construct, the references to ISTART0 and IEND0
+ will remain in the virtual operands even after the body of the
+ OMP_PARALLEL construct is split into new function. This does not
+ seem to cause any problem at the moment, but it looks dangerous. */
+ mark_call_virtual_operands ();
}
@@ -2739,9 +2831,9 @@ expand_omp_for_generic (struct omp_region *region,
q += (q * nthreads != n);
s0 = q * threadid;
e0 = min(s0 + q, n);
+ V = s0 * STEP + N1;
if (s0 >= e0) goto L2; else goto L0;
L0:
- V = s0 * STEP + N1;
e = e0 * STEP + N1;
L1:
BODY;
@@ -2755,19 +2847,24 @@ expand_omp_for_static_nochunk (struct omp_region *region,
struct omp_for_data *fd)
{
tree l0, l1, l2, n, q, s0, e0, e, t, nthreads, threadid;
- tree type, utype, list;
- basic_block entry_bb, exit_bb, seq_start_bb, body_bb, cont_bb;
+ tree type, utype;
+ basic_block entry_bb, seq_start_bb, body_bb, cont_bb, exit_bb;
basic_block fin_bb;
block_stmt_iterator si;
+ tree vmain, vback;
type = TREE_TYPE (fd->v);
utype = lang_hooks.types.unsigned_type (type);
entry_bb = region->entry;
- seq_start_bb = create_empty_bb (entry_bb);
- body_bb = single_succ (entry_bb);
cont_bb = region->cont;
- fin_bb = single_succ (cont_bb);
+ gcc_assert (EDGE_COUNT (entry_bb->succs) == 2);
+ gcc_assert (BRANCH_EDGE (entry_bb)->dest == FALLTHRU_EDGE (cont_bb)->dest);
+ seq_start_bb = split_edge (FALLTHRU_EDGE (entry_bb));
+ body_bb = single_succ (seq_start_bb);
+ gcc_assert (BRANCH_EDGE (cont_bb)->dest == body_bb);
+ gcc_assert (EDGE_COUNT (cont_bb->succs) == 2);
+ fin_bb = FALLTHRU_EDGE (cont_bb)->dest;
exit_bb = region->exit;
l0 = tree_block_label (seq_start_bb);
@@ -2775,29 +2872,35 @@ expand_omp_for_static_nochunk (struct omp_region *region,
l2 = tree_block_label (fin_bb);
/* Iteration space partitioning goes in ENTRY_BB. */
- list = alloc_stmt_list ();
+ si = bsi_last (entry_bb);
+ gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_FOR);
t = built_in_decls[BUILT_IN_OMP_GET_NUM_THREADS];
t = build_function_call_expr (t, NULL);
t = fold_convert (utype, t);
- nthreads = get_formal_tmp_var (t, &list);
+ nthreads = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
+ true, BSI_SAME_STMT);
t = built_in_decls[BUILT_IN_OMP_GET_THREAD_NUM];
t = build_function_call_expr (t, NULL);
t = fold_convert (utype, t);
- threadid = get_formal_tmp_var (t, &list);
+ threadid = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
+ true, BSI_SAME_STMT);
- fd->n1 = fold_convert (type, fd->n1);
- if (!is_gimple_val (fd->n1))
- fd->n1 = get_formal_tmp_var (fd->n1, &list);
+ fd->n1 = force_gimple_operand_bsi (&si,
+ fold_convert (type, fd->n1),
+ true, NULL_TREE,
+ true, BSI_SAME_STMT);
- fd->n2 = fold_convert (type, fd->n2);
- if (!is_gimple_val (fd->n2))
- fd->n2 = get_formal_tmp_var (fd->n2, &list);
+ fd->n2 = force_gimple_operand_bsi (&si,
+ fold_convert (type, fd->n2),
+ true, NULL_TREE,
+ true, BSI_SAME_STMT);
- fd->step = fold_convert (type, fd->step);
- if (!is_gimple_val (fd->step))
- fd->step = get_formal_tmp_var (fd->step, &list);
+ fd->step = force_gimple_operand_bsi (&si,
+ fold_convert (type, fd->step),
+ true, NULL_TREE,
+ true, BSI_SAME_STMT);
t = build_int_cst (type, (fd->cond_code == LT_EXPR ? -1 : 1));
t = fold_build2 (PLUS_EXPR, type, fd->step, t);
@@ -2805,89 +2908,85 @@ expand_omp_for_static_nochunk (struct omp_region *region,
t = fold_build2 (MINUS_EXPR, type, t, fd->n1);
t = fold_build2 (TRUNC_DIV_EXPR, type, t, fd->step);
t = fold_convert (utype, t);
- if (is_gimple_val (t))
- n = t;
- else
- n = get_formal_tmp_var (t, &list);
+ n = force_gimple_operand_bsi (&si, t, true, NULL_TREE, true, BSI_SAME_STMT);
- t = build2 (TRUNC_DIV_EXPR, utype, n, nthreads);
- q = get_formal_tmp_var (t, &list);
+ t = fold_build2 (TRUNC_DIV_EXPR, utype, n, nthreads);
+ q = force_gimple_operand_bsi (&si, t, true, NULL_TREE, true, BSI_SAME_STMT);
- t = build2 (MULT_EXPR, utype, q, nthreads);
- t = build2 (NE_EXPR, utype, t, n);
- t = build2 (PLUS_EXPR, utype, q, t);
- q = get_formal_tmp_var (t, &list);
+ t = fold_build2 (MULT_EXPR, utype, q, nthreads);
+ t = fold_build2 (NE_EXPR, utype, t, n);
+ t = fold_build2 (PLUS_EXPR, utype, q, t);
+ q = force_gimple_operand_bsi (&si, t, true, NULL_TREE, true, BSI_SAME_STMT);
t = build2 (MULT_EXPR, utype, q, threadid);
- s0 = get_formal_tmp_var (t, &list);
+ s0 = force_gimple_operand_bsi (&si, t, true, NULL_TREE, true, BSI_SAME_STMT);
- t = build2 (PLUS_EXPR, utype, s0, q);
- t = build2 (MIN_EXPR, utype, t, n);
- e0 = get_formal_tmp_var (t, &list);
+ t = fold_build2 (PLUS_EXPR, utype, s0, q);
+ t = fold_build2 (MIN_EXPR, utype, t, n);
+ e0 = force_gimple_operand_bsi (&si, t, true, NULL_TREE, true, BSI_SAME_STMT);
+
+ t = fold_convert (type, s0);
+ t = fold_build2 (MULT_EXPR, type, t, fd->step);
+ t = fold_build2 (PLUS_EXPR, type, t, fd->n1);
+ t = force_gimple_operand_bsi (&si, t, false, NULL_TREE,
+ true, BSI_SAME_STMT);
+ t = build2 (MODIFY_EXPR, void_type_node, fd->v, t);
+ bsi_insert_before (&si, t, BSI_SAME_STMT);
+ if (in_ssa_p)
+ SSA_NAME_DEF_STMT (fd->v) = t;
t = build2 (GE_EXPR, boolean_type_node, s0, e0);
t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&l2),
build_and_jump (&l0));
- append_to_statement_list (t, &list);
+ bsi_insert_before (&si, t, BSI_SAME_STMT);
- si = bsi_last (entry_bb);
- gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_FOR);
- bsi_insert_after (&si, list, BSI_SAME_STMT);
+ /* Remove the OMP_FOR statement. */
bsi_remove (&si, true);
/* Setup code for sequential iteration goes in SEQ_START_BB. */
- list = alloc_stmt_list ();
-
- t = fold_convert (type, s0);
- t = build2 (MULT_EXPR, type, t, fd->step);
- t = build2 (PLUS_EXPR, type, t, fd->n1);
- t = build2 (MODIFY_EXPR, void_type_node, fd->v, t);
- gimplify_and_add (t, &list);
+ si = bsi_start (seq_start_bb);
t = fold_convert (type, e0);
- t = build2 (MULT_EXPR, type, t, fd->step);
- t = build2 (PLUS_EXPR, type, t, fd->n1);
- e = get_formal_tmp_var (t, &list);
-
- si = bsi_start (seq_start_bb);
- bsi_insert_after (&si, list, BSI_CONTINUE_LINKING);
+ t = fold_build2 (MULT_EXPR, type, t, fd->step);
+ t = fold_build2 (PLUS_EXPR, type, t, fd->n1);
+ e = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
+ false, BSI_CONTINUE_LINKING);
/* The code controlling the sequential loop replaces the OMP_CONTINUE. */
- list = alloc_stmt_list ();
-
- t = build2 (PLUS_EXPR, type, fd->v, fd->step);
- t = build2 (MODIFY_EXPR, void_type_node, fd->v, t);
- gimplify_and_add (t, &list);
-
- t = build2 (fd->cond_code, boolean_type_node, fd->v, e);
- t = get_formal_tmp_var (t, &list);
+ si = bsi_last (cont_bb);
+ t = bsi_stmt (si);
+ gcc_assert (TREE_CODE (t) == OMP_CONTINUE);
+ vmain = TREE_OPERAND (t, 1);
+ vback = TREE_OPERAND (t, 0);
+
+ t = fold_build2 (PLUS_EXPR, type, vmain, fd->step);
+ t = force_gimple_operand_bsi (&si, t, false, NULL_TREE,
+ true, BSI_SAME_STMT);
+ t = build2 (MODIFY_EXPR, void_type_node, vback, t);
+ bsi_insert_before (&si, t, BSI_SAME_STMT);
+ if (in_ssa_p)
+ SSA_NAME_DEF_STMT (vback) = t;
+
+ t = build2 (fd->cond_code, boolean_type_node, vback, e);
t = build3 (COND_EXPR, void_type_node, t, build_and_jump (&l1),
build_and_jump (&l2));
- append_to_statement_list (t, &list);
+ bsi_insert_before (&si, t, BSI_SAME_STMT);
- si = bsi_last (cont_bb);
- gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_CONTINUE);
- bsi_insert_after (&si, list, BSI_SAME_STMT);
+ /* Remove the OMP_CONTINUE statement. */
bsi_remove (&si, true);
/* Replace the OMP_RETURN with a barrier, or nothing. */
si = bsi_last (exit_bb);
if (!OMP_RETURN_NOWAIT (bsi_stmt (si)))
- {
- list = alloc_stmt_list ();
- build_omp_barrier (&list);
- bsi_insert_after (&si, list, BSI_SAME_STMT);
- }
+ force_gimple_operand_bsi (&si, build_omp_barrier (), false, NULL_TREE,
+ false, BSI_SAME_STMT);
bsi_remove (&si, true);
/* Connect all the blocks. */
- make_edge (seq_start_bb, body_bb, EDGE_FALLTHRU);
-
- remove_edge (single_succ_edge (entry_bb));
- make_edge (entry_bb, fin_bb, EDGE_TRUE_VALUE);
- make_edge (entry_bb, seq_start_bb, EDGE_FALSE_VALUE);
+ find_edge (entry_bb, seq_start_bb)->flags = EDGE_FALSE_VALUE;
+ find_edge (entry_bb, fin_bb)->flags = EDGE_TRUE_VALUE;
- make_edge (cont_bb, body_bb, EDGE_TRUE_VALUE);
+ find_edge (cont_bb, body_bb)->flags = EDGE_TRUE_VALUE;
find_edge (cont_bb, fin_bb)->flags = EDGE_FALSE_VALUE;
set_immediate_dominator (CDI_DOMINATORS, seq_start_bb, entry_bb);
@@ -2912,6 +3011,9 @@ expand_omp_for_static_nochunk (struct omp_region *region,
adj = STEP + 1;
n = (adj + N2 - N1) / STEP;
trip = 0;
+ V = threadid * CHUNK * STEP + N1; -- this extra definition of V is
+ here so that V is defined
+ if the loop is not entered
L0:
s0 = (trip * nthreads + threadid) * CHUNK;
e0 = min(s0 + CHUNK, n);
@@ -2932,26 +3034,34 @@ expand_omp_for_static_nochunk (struct omp_region *region,
static void
expand_omp_for_static_chunk (struct omp_region *region, struct omp_for_data *fd)
{
- tree l0, l1, l2, l3, l4, n, s0, e0, e, t;
- tree trip, nthreads, threadid;
- tree type, utype;
- basic_block entry_bb, exit_bb, body_bb, seq_start_bb, iter_part_bb;
- basic_block trip_update_bb, cont_bb, fin_bb;
- tree list;
+ tree l0, l1, l2, l3, l4, n, s0, e0, e, t, phi, nphi, args;
+ tree trip_var, trip_init, trip_main, trip_back, nthreads, threadid;
+ tree type, utype, cont, v_main, v_back, v_extra;
+ basic_block entry_bb, body_bb, seq_start_bb, iter_part_bb;
+ basic_block trip_update_bb, cont_bb, fin_bb, exit_bb;
block_stmt_iterator si;
+ edge se, re, ene;
type = TREE_TYPE (fd->v);
utype = lang_hooks.types.unsigned_type (type);
entry_bb = region->entry;
- iter_part_bb = create_empty_bb (entry_bb);
- seq_start_bb = create_empty_bb (iter_part_bb);
- body_bb = single_succ (entry_bb);
+ se = split_block (entry_bb, last_stmt (entry_bb));
+ entry_bb = se->src;
+ iter_part_bb = se->dest;
cont_bb = region->cont;
- trip_update_bb = create_empty_bb (cont_bb);
- fin_bb = single_succ (cont_bb);
+ gcc_assert (EDGE_COUNT (iter_part_bb->succs) == 2);
+ gcc_assert (BRANCH_EDGE (iter_part_bb)->dest
+ == FALLTHRU_EDGE (cont_bb)->dest);
+ seq_start_bb = split_edge (FALLTHRU_EDGE (iter_part_bb));
+ body_bb = single_succ (seq_start_bb);
+ gcc_assert (BRANCH_EDGE (cont_bb)->dest == body_bb);
+ gcc_assert (EDGE_COUNT (cont_bb->succs) == 2);
+ fin_bb = FALLTHRU_EDGE (cont_bb)->dest;
+ trip_update_bb = split_edge (FALLTHRU_EDGE (cont_bb));
exit_bb = region->exit;
+ tree_block_label (entry_bb);
l0 = tree_block_label (iter_part_bb);
l1 = tree_block_label (seq_start_bb);
l2 = tree_block_label (body_bb);
@@ -2959,33 +3069,35 @@ expand_omp_for_static_chunk (struct omp_region *region, struct omp_for_data *fd)
l4 = tree_block_label (fin_bb);
/* Trip and adjustment setup goes in ENTRY_BB. */
- list = alloc_stmt_list ();
+ si = bsi_last (entry_bb);
+ gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_FOR);
t = built_in_decls[BUILT_IN_OMP_GET_NUM_THREADS];
t = build_function_call_expr (t, NULL);
t = fold_convert (utype, t);
- nthreads = get_formal_tmp_var (t, &list);
+ nthreads = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
+ true, BSI_SAME_STMT);
t = built_in_decls[BUILT_IN_OMP_GET_THREAD_NUM];
t = build_function_call_expr (t, NULL);
t = fold_convert (utype, t);
- threadid = get_formal_tmp_var (t, &list);
-
- fd->n1 = fold_convert (type, fd->n1);
- if (!is_gimple_val (fd->n1))
- fd->n1 = get_formal_tmp_var (fd->n1, &list);
-
- fd->n2 = fold_convert (type, fd->n2);
- if (!is_gimple_val (fd->n2))
- fd->n2 = get_formal_tmp_var (fd->n2, &list);
-
- fd->step = fold_convert (type, fd->step);
- if (!is_gimple_val (fd->step))
- fd->step = get_formal_tmp_var (fd->step, &list);
-
- fd->chunk_size = fold_convert (utype, fd->chunk_size);
- if (!is_gimple_val (fd->chunk_size))
- fd->chunk_size = get_formal_tmp_var (fd->chunk_size, &list);
+ threadid = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
+ true, BSI_SAME_STMT);
+
+ fd->n1 = force_gimple_operand_bsi (&si, fold_convert (type, fd->n1),
+ true, NULL_TREE,
+ true, BSI_SAME_STMT);
+ fd->n2 = force_gimple_operand_bsi (&si, fold_convert (type, fd->n2),
+ true, NULL_TREE,
+ true, BSI_SAME_STMT);
+ fd->step = force_gimple_operand_bsi (&si, fold_convert (type, fd->step),
+ true, NULL_TREE,
+ true, BSI_SAME_STMT);
+ fd->chunk_size
+ = force_gimple_operand_bsi (&si, fold_convert (utype,
+ fd->chunk_size),
+ true, NULL_TREE,
+ true, BSI_SAME_STMT);
t = build_int_cst (type, (fd->cond_code == LT_EXPR ? -1 : 1));
t = fold_build2 (PLUS_EXPR, type, fd->step, t);
@@ -2993,110 +3105,166 @@ expand_omp_for_static_chunk (struct omp_region *region, struct omp_for_data *fd)
t = fold_build2 (MINUS_EXPR, type, t, fd->n1);
t = fold_build2 (TRUNC_DIV_EXPR, type, t, fd->step);
t = fold_convert (utype, t);
- if (is_gimple_val (t))
- n = t;
- else
- n = get_formal_tmp_var (t, &list);
-
- t = build_int_cst (utype, 0);
- trip = get_initialized_tmp_var (t, &list, NULL);
+ n = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
+ true, BSI_SAME_STMT);
- si = bsi_last (entry_bb);
- gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_FOR);
- bsi_insert_after (&si, list, BSI_SAME_STMT);
+ trip_var = create_tmp_var (utype, ".trip");
+ add_referenced_var (trip_var);
+ if (in_ssa_p)
+ {
+ trip_init = make_ssa_name (trip_var, NULL_TREE);
+ trip_main = make_ssa_name (trip_var, NULL_TREE);
+ trip_back = make_ssa_name (trip_var, NULL_TREE);
+ }
+ else
+ {
+ trip_init = trip_var;
+ trip_main = trip_var;
+ trip_back = trip_var;
+ }
+ t = build2 (MODIFY_EXPR, void_type_node, trip_init,
+ build_int_cst (utype, 0));
+ bsi_insert_before (&si, t, BSI_SAME_STMT);
+ if (in_ssa_p)
+ SSA_NAME_DEF_STMT (trip_init) = t;
+
+ t = fold_build2 (MULT_EXPR, utype, threadid, fd->chunk_size);
+ t = fold_convert (type, t);
+ t = fold_build2 (MULT_EXPR, type, t, fd->step);
+ t = fold_build2 (PLUS_EXPR, type, t, fd->n1);
+ v_extra = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
+ true, BSI_SAME_STMT);
+
+ /* Remove the OMP_FOR. */
bsi_remove (&si, true);
/* Iteration space partitioning goes in ITER_PART_BB. */
- list = alloc_stmt_list ();
+ si = bsi_last (iter_part_bb);
- t = build2 (MULT_EXPR, utype, trip, nthreads);
- t = build2 (PLUS_EXPR, utype, t, threadid);
- t = build2 (MULT_EXPR, utype, t, fd->chunk_size);
- s0 = get_formal_tmp_var (t, &list);
+ t = fold_build2 (MULT_EXPR, utype, trip_main, nthreads);
+ t = fold_build2 (PLUS_EXPR, utype, t, threadid);
+ t = fold_build2 (MULT_EXPR, utype, t, fd->chunk_size);
+ s0 = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
+ false, BSI_CONTINUE_LINKING);
- t = build2 (PLUS_EXPR, utype, s0, fd->chunk_size);
- t = build2 (MIN_EXPR, utype, t, n);
- e0 = get_formal_tmp_var (t, &list);
+ t = fold_build2 (PLUS_EXPR, utype, s0, fd->chunk_size);
+ t = fold_build2 (MIN_EXPR, utype, t, n);
+ e0 = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
+ false, BSI_CONTINUE_LINKING);
t = build2 (LT_EXPR, boolean_type_node, s0, n);
t = build3 (COND_EXPR, void_type_node, t,
build_and_jump (&l1), build_and_jump (&l4));
- append_to_statement_list (t, &list);
-
- si = bsi_start (iter_part_bb);
- bsi_insert_after (&si, list, BSI_CONTINUE_LINKING);
+ bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
/* Setup code for sequential iteration goes in SEQ_START_BB. */
- list = alloc_stmt_list ();
+ si = bsi_start (seq_start_bb);
t = fold_convert (type, s0);
- t = build2 (MULT_EXPR, type, t, fd->step);
- t = build2 (PLUS_EXPR, type, t, fd->n1);
+ t = fold_build2 (MULT_EXPR, type, t, fd->step);
+ t = fold_build2 (PLUS_EXPR, type, t, fd->n1);
+ t = force_gimple_operand_bsi (&si, t, false, NULL_TREE,
+ false, BSI_CONTINUE_LINKING);
t = build2 (MODIFY_EXPR, void_type_node, fd->v, t);
- gimplify_and_add (t, &list);
+ bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
+ if (in_ssa_p)
+ SSA_NAME_DEF_STMT (fd->v) = t;
t = fold_convert (type, e0);
- t = build2 (MULT_EXPR, type, t, fd->step);
- t = build2 (PLUS_EXPR, type, t, fd->n1);
- e = get_formal_tmp_var (t, &list);
-
- si = bsi_start (seq_start_bb);
- bsi_insert_after (&si, list, BSI_CONTINUE_LINKING);
+ t = fold_build2 (MULT_EXPR, type, t, fd->step);
+ t = fold_build2 (PLUS_EXPR, type, t, fd->n1);
+ e = force_gimple_operand_bsi (&si, t, true, NULL_TREE,
+ false, BSI_CONTINUE_LINKING);
/* The code controlling the sequential loop goes in CONT_BB,
replacing the OMP_CONTINUE. */
- list = alloc_stmt_list ();
-
- t = build2 (PLUS_EXPR, type, fd->v, fd->step);
- t = build2 (MODIFY_EXPR, void_type_node, fd->v, t);
- gimplify_and_add (t, &list);
-
- t = build2 (fd->cond_code, boolean_type_node, fd->v, e);
- t = get_formal_tmp_var (t, &list);
+ si = bsi_last (cont_bb);
+ cont = bsi_stmt (si);
+ gcc_assert (TREE_CODE (cont) == OMP_CONTINUE);
+ v_main = TREE_OPERAND (cont, 1);
+ v_back = TREE_OPERAND (cont, 0);
+
+ t = build2 (PLUS_EXPR, type, v_main, fd->step);
+ t = build2 (MODIFY_EXPR, void_type_node, v_back, t);
+ bsi_insert_before (&si, t, BSI_SAME_STMT);
+ if (in_ssa_p)
+ SSA_NAME_DEF_STMT (v_back) = t;
+
+ t = build2 (fd->cond_code, boolean_type_node, v_back, e);
t = build3 (COND_EXPR, void_type_node, t,
build_and_jump (&l2), build_and_jump (&l3));
- append_to_statement_list (t, &list);
-
- si = bsi_last (cont_bb);
- gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_CONTINUE);
- bsi_insert_after (&si, list, BSI_SAME_STMT);
+ bsi_insert_before (&si, t, BSI_SAME_STMT);
+
+ /* Remove OMP_CONTINUE. */
bsi_remove (&si, true);
/* Trip update code goes into TRIP_UPDATE_BB. */
- list = alloc_stmt_list ();
+ si = bsi_start (trip_update_bb);
t = build_int_cst (utype, 1);
- t = build2 (PLUS_EXPR, utype, trip, t);
- t = build2 (MODIFY_EXPR, void_type_node, trip, t);
- gimplify_and_add (t, &list);
-
- si = bsi_start (trip_update_bb);
- bsi_insert_after (&si, list, BSI_CONTINUE_LINKING);
+ t = build2 (PLUS_EXPR, utype, trip_main, t);
+ t = build2 (MODIFY_EXPR, void_type_node, trip_back, t);
+ bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
+ if (in_ssa_p)
+ SSA_NAME_DEF_STMT (trip_back) = t;
/* Replace the OMP_RETURN with a barrier, or nothing. */
si = bsi_last (exit_bb);
if (!OMP_RETURN_NOWAIT (bsi_stmt (si)))
- {
- list = alloc_stmt_list ();
- build_omp_barrier (&list);
- bsi_insert_after (&si, list, BSI_SAME_STMT);
- }
+ force_gimple_operand_bsi (&si, build_omp_barrier (), false, NULL_TREE,
+ false, BSI_SAME_STMT);
bsi_remove (&si, true);
/* Connect the new blocks. */
- remove_edge (single_succ_edge (entry_bb));
- make_edge (entry_bb, iter_part_bb, EDGE_FALLTHRU);
+ find_edge (iter_part_bb, seq_start_bb)->flags = EDGE_TRUE_VALUE;
+ find_edge (iter_part_bb, fin_bb)->flags = EDGE_FALSE_VALUE;
- make_edge (iter_part_bb, seq_start_bb, EDGE_TRUE_VALUE);
- make_edge (iter_part_bb, fin_bb, EDGE_FALSE_VALUE);
+ find_edge (cont_bb, body_bb)->flags = EDGE_TRUE_VALUE;
+ find_edge (cont_bb, trip_update_bb)->flags = EDGE_FALSE_VALUE;
- make_edge (seq_start_bb, body_bb, EDGE_FALLTHRU);
+ redirect_edge_and_branch (single_succ_edge (trip_update_bb), iter_part_bb);
- remove_edge (single_succ_edge (cont_bb));
- make_edge (cont_bb, body_bb, EDGE_TRUE_VALUE);
- make_edge (cont_bb, trip_update_bb, EDGE_FALSE_VALUE);
+ if (in_ssa_p)
+ {
+ /* When we redirect the edge from trip_update_bb to iter_part_bb, we
+ remove arguments of the phi nodes in fin_bb. We need to create
+ appropriate phi nodes in iter_part_bb instead. */
+ se = single_pred_edge (fin_bb);
+ re = single_succ_edge (trip_update_bb);
+ ene = single_succ_edge (entry_bb);
+
+ args = PENDING_STMT (re);
+ PENDING_STMT (re) = NULL_TREE;
+ for (phi = phi_nodes (fin_bb);
+ phi && args;
+ phi = PHI_CHAIN (phi), args = TREE_CHAIN (args))
+ {
+ t = PHI_RESULT (phi);
+ gcc_assert (t == TREE_PURPOSE (args));
+ nphi = create_phi_node (t, iter_part_bb);
+ SSA_NAME_DEF_STMT (t) = nphi;
+
+ t = PHI_ARG_DEF_FROM_EDGE (phi, se);
+ /* A special case -- fd->v is not yet computed in iter_part_bb, we
+ need to use v_extra instead. */
+ if (t == fd->v)
+ t = v_extra;
+ add_phi_arg (nphi, t, ene);
+ add_phi_arg (nphi, TREE_VALUE (args), re);
+ }
+ gcc_assert (!phi && !args);
+ while ((phi = phi_nodes (fin_bb)) != NULL_TREE)
+ {
+ SET_PHI_RESULT (phi, NULL_TREE);
+ remove_phi_node (phi, NULL_TREE);
+ }
- make_edge (trip_update_bb, iter_part_bb, EDGE_FALLTHRU);
+ /* Make phi node for trip. */
+ phi = create_phi_node (trip_main, iter_part_bb);
+ SSA_NAME_DEF_STMT (trip_main) = phi;
+ add_phi_arg (phi, trip_back, single_succ_edge (trip_update_bb));
+ add_phi_arg (phi, trip_init, single_succ_edge (entry_bb));
+ }
set_immediate_dominator (CDI_DOMINATORS, trip_update_bb, cont_bb);
set_immediate_dominator (CDI_DOMINATORS, iter_part_bb,
@@ -3117,8 +3285,6 @@ expand_omp_for (struct omp_region *region)
{
struct omp_for_data fd;
- push_gimplify_context ();
-
extract_omp_for_data (last_stmt (region->entry), &fd);
region->sched_kind = fd.sched_kind;
@@ -3136,8 +3302,8 @@ expand_omp_for (struct omp_region *region)
int next_ix = BUILT_IN_GOMP_LOOP_STATIC_NEXT + fn_index;
expand_omp_for_generic (region, &fd, start_ix, next_ix);
}
-
- pop_gimplify_context (NULL);
+
+ update_ssa (TODO_update_ssa_only_virtuals);
}
@@ -3166,42 +3332,39 @@ expand_omp_for (struct omp_region *region)
reduction;
If this is a combined parallel sections, replace the call to
- GOMP_sections_start with 'goto L1'. */
+ GOMP_sections_start with call to GOMP_sections_next. */
static void
expand_omp_sections (struct omp_region *region)
{
- tree label_vec, l0, l1, l2, t, u, v, sections_stmt;
+ tree label_vec, l0, l1, l2, t, u, sections_stmt, vin, vmain, vnext, cont;
unsigned i, len;
- basic_block entry_bb, exit_bb, l0_bb, l1_bb, l2_bb, default_bb;
+ basic_block entry_bb, l0_bb, l1_bb, l2_bb, default_bb;
block_stmt_iterator si;
struct omp_region *inner;
- edge e;
entry_bb = region->entry;
- l0_bb = create_empty_bb (entry_bb);
+ l0_bb = single_succ (entry_bb);
l1_bb = region->cont;
- l2_bb = single_succ (l1_bb);
+ l2_bb = region->exit;
+ gcc_assert (single_pred (l2_bb) == l0_bb);
default_bb = create_empty_bb (l1_bb->prev_bb);
- exit_bb = region->exit;
l0 = tree_block_label (l0_bb);
l1 = tree_block_label (l1_bb);
l2 = tree_block_label (l2_bb);
- v = create_tmp_var (unsigned_type_node, ".section");
-
/* We will build a switch() with enough cases for all the
OMP_SECTION regions, a '0' case to handle the end of more work
and a default case to abort if something goes wrong. */
- len = EDGE_COUNT (entry_bb->succs);
+ len = EDGE_COUNT (l0_bb->succs) - 1;
label_vec = make_tree_vec (len + 2);
/* The call to GOMP_sections_start goes in ENTRY_BB, replacing the
OMP_SECTIONS statement. */
si = bsi_last (entry_bb);
sections_stmt = bsi_stmt (si);
- gcc_assert (TREE_CODE (sections_stmt) == OMP_SECTIONS);
+ vin = OMP_SECTIONS_CONTROL (sections_stmt);
if (!is_combined_parallel (region))
{
/* If we are not inside a combined parallel+sections region,
@@ -3210,21 +3373,42 @@ expand_omp_sections (struct omp_region *region)
t = tree_cons (NULL, t, NULL);
u = built_in_decls[BUILT_IN_GOMP_SECTIONS_START];
t = build_function_call_expr (u, t);
- t = build2 (MODIFY_EXPR, void_type_node, v, t);
- bsi_insert_after (&si, t, BSI_SAME_STMT);
+ t = force_gimple_operand_bsi (&si, t, false, NULL_TREE,
+ true, BSI_SAME_STMT);
+ t = build2 (MODIFY_EXPR, void_type_node, vin, t);
}
+ else
+ {
+ /* Otherwise, call GOMP_sections_next. */
+ t = built_in_decls[BUILT_IN_GOMP_SECTIONS_NEXT];
+ t = build_function_call_expr (t, NULL);
+ t = build2 (MODIFY_EXPR, void_type_node, vin, t);
+ }
+ bsi_insert_after (&si, t, BSI_SAME_STMT);
+ if (in_ssa_p)
+ {
+ SSA_NAME_DEF_STMT (vin) = t;
+ mark_new_vars_to_rename (t);
+ }
+
bsi_remove (&si, true);
- /* The switch() statement replacing OMP_SECTIONS goes in L0_BB. */
- si = bsi_start (l0_bb);
+ /* The switch() statement replacing OMP_SECTIONS_SWITCH goes in L0_BB. */
+ si = bsi_last (l0_bb);
+ gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_SECTIONS_SWITCH);
+ cont = last_stmt (l1_bb);
+ gcc_assert (TREE_CODE (cont) == OMP_CONTINUE);
+ vmain = TREE_OPERAND (cont, 1);
+ vnext = TREE_OPERAND (cont, 0);
- t = build3 (SWITCH_EXPR, void_type_node, v, NULL, label_vec);
- bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
+ t = build3 (SWITCH_EXPR, void_type_node, vmain, NULL, label_vec);
+ bsi_insert_after (&si, t, BSI_SAME_STMT);
t = build3 (CASE_LABEL_EXPR, void_type_node,
build_int_cst (unsigned_type_node, 0), NULL, l2);
TREE_VEC_ELT (label_vec, 0) = t;
- make_edge (l0_bb, l2_bb, 0);
+
+ bsi_remove (&si, true);
/* Convert each OMP_SECTION into a CASE_LABEL_EXPR. */
for (inner = region->inner, i = 1; inner; inner = inner->next, ++i)
@@ -3248,10 +3432,6 @@ expand_omp_sections (struct omp_region *region)
gcc_assert (TREE_CODE (bsi_stmt (si)) == OMP_RETURN);
bsi_remove (&si, true);
- e = single_pred_edge (s_entry_bb);
- e->flags = 0;
- redirect_edge_pred (e, l0_bb);
-
single_succ_edge (s_entry_bb)->flags = EDGE_FALLTHRU;
single_succ_edge (s_exit_bb)->flags = EDGE_FALLTHRU;
}
@@ -3265,6 +3445,8 @@ expand_omp_sections (struct omp_region *region)
si = bsi_start (default_bb);
t = built_in_decls[BUILT_IN_TRAP];
t = build_function_call_expr (t, NULL);
+ if (in_ssa_p)
+ mark_new_vars_to_rename (t);
bsi_insert_after (&si, t, BSI_CONTINUE_LINKING);
/* Code to get the next section goes in L1_BB. */
@@ -3273,46 +3455,31 @@ expand_omp_sections (struct omp_region *region)
t = built_in_decls[BUILT_IN_GOMP_SECTIONS_NEXT];
t = build_function_call_expr (t, NULL);
- t = build2 (MODIFY_EXPR, void_type_node, v, t);
+ t = build2 (MODIFY_EXPR, void_type_node, vnext, t);
bsi_insert_after (&si, t, BSI_SAME_STMT);
+ if (in_ssa_p)
+ {
+ SSA_NAME_DEF_STMT (vnext) = t;
+ mark_new_vars_to_rename (t);
+ }
bsi_remove (&si, true);
/* Cleanup function replaces OMP_RETURN in EXIT_BB. */
- si = bsi_last (exit_bb);
+ si = bsi_last (l2_bb);
if (OMP_RETURN_NOWAIT (bsi_stmt (si)))
t = built_in_decls[BUILT_IN_GOMP_SECTIONS_END_NOWAIT];
else
t = built_in_decls[BUILT_IN_GOMP_SECTIONS_END];
t = build_function_call_expr (t, NULL);
bsi_insert_after (&si, t, BSI_SAME_STMT);
+ if (in_ssa_p)
+ mark_new_vars_to_rename (t);
bsi_remove (&si, true);
- /* Connect the new blocks. */
- if (is_combined_parallel (region))
- {
- /* If this was a combined parallel+sections region, we did not
- emit a GOMP_sections_start in the entry block, so we just
- need to jump to L1_BB to get the next section. */
- make_edge (entry_bb, l1_bb, EDGE_FALLTHRU);
- }
- else
- make_edge (entry_bb, l0_bb, EDGE_FALLTHRU);
-
- e = single_succ_edge (l1_bb);
- redirect_edge_succ (e, l0_bb);
- e->flags = EDGE_FALLTHRU;
+ single_succ_edge (l1_bb)->flags = EDGE_FALLTHRU;
- set_immediate_dominator (CDI_DOMINATORS, l1_bb,
- recount_dominator (CDI_DOMINATORS, l1_bb));
- set_immediate_dominator (CDI_DOMINATORS, l0_bb,
- recount_dominator (CDI_DOMINATORS, l0_bb));
- set_immediate_dominator (CDI_DOMINATORS, default_bb,
- recount_dominator (CDI_DOMINATORS, default_bb));
- set_immediate_dominator (CDI_DOMINATORS, l2_bb,
- recount_dominator (CDI_DOMINATORS, l2_bb));
- for (inner = region->inner; inner; inner = inner->next)
- set_immediate_dominator (CDI_DOMINATORS, inner->entry,
- recount_dominator (CDI_DOMINATORS, inner->entry));
+ set_immediate_dominator (CDI_DOMINATORS, default_bb, l0_bb);
+ update_ssa (TODO_update_ssa_only_virtuals);
}
@@ -3342,13 +3509,11 @@ expand_omp_single (struct omp_region *region)
si = bsi_last (exit_bb);
if (!OMP_RETURN_NOWAIT (bsi_stmt (si)) || need_barrier)
- {
- tree t = alloc_stmt_list ();
- build_omp_barrier (&t);
- bsi_insert_after (&si, t, BSI_SAME_STMT);
- }
+ force_gimple_operand_bsi (&si, build_omp_barrier (), false, NULL_TREE,
+ false, BSI_SAME_STMT);
bsi_remove (&si, true);
single_succ_edge (exit_bb)->flags = EDGE_FALLTHRU;
+ update_ssa (TODO_update_ssa_only_virtuals);
}
@@ -3471,6 +3636,11 @@ build_omp_regions_1 (basic_block bb, struct omp_region *parent)
gcc_assert (parent);
parent->cont = bb;
}
+ else if (code == OMP_SECTIONS_SWITCH)
+ {
+ /* OMP_SECTIONS_SWITCH is part of OMP_SECTIONS, and we do nothing for
+ it. */
+ }
else
{
/* Otherwise, this directive becomes the parent for a new
@@ -3498,7 +3668,6 @@ build_omp_regions (void)
build_omp_regions_1 (ENTRY_BLOCK_PTR, NULL);
}
-
/* Main entry point for expanding OMP-GIMPLE into runtime calls. */
static unsigned int
@@ -3550,6 +3719,32 @@ struct tree_opt_pass pass_expand_omp =
TODO_dump_func, /* todo_flags_finish */
0 /* letter */
};
+
+/* This expansion pass is run when optimizations are disabled.
+ Execute_expand_omp detects this automatically by querying in_ssa_p. */
+
+static bool
+gate_expand_omp_O0 (void)
+{
+ return !optimize && flag_openmp != 0 && errorcount == 0;
+}
+
+struct tree_opt_pass pass_expand_omp_O0 =
+{
+ "ompexpO0", /* name */
+ gate_expand_omp_O0, /* gate */
+ execute_expand_omp, /* execute */
+ NULL, /* sub */
+ NULL, /* next */
+ 0, /* static_pass_number */
+ 0, /* tv_id */
+ PROP_gimple_any, /* properties_required */
+ PROP_gimple_lomp, /* properties_provided */
+ 0, /* properties_destroyed */
+ 0, /* todo_flags_start */
+ TODO_dump_func, /* todo_flags_finish */
+ 0 /* letter */
+};
/* Routines to lower OpenMP directives into OMP-GIMPLE. */
@@ -3562,6 +3757,7 @@ lower_omp_sections (tree *stmt_p, omp_context *ctx)
tree t, dlist;
tree_stmt_iterator tsi;
unsigned i, len;
+ tree control;
stmt = *stmt_p;
@@ -3620,9 +3816,12 @@ lower_omp_sections (tree *stmt_p, omp_context *ctx)
new_body = alloc_stmt_list ();
append_to_statement_list (ilist, &new_body);
append_to_statement_list (stmt, &new_body);
+ append_to_statement_list (make_node (OMP_SECTIONS_SWITCH), &new_body);
append_to_statement_list (bind, &new_body);
- t = make_node (OMP_CONTINUE);
+ control = create_tmp_var (unsigned_type_node, ".section");
+ t = build2 (OMP_CONTINUE, void_type_node, control, control);
+ OMP_SECTIONS_CONTROL (stmt) = control;
append_to_statement_list (t, &new_body);
append_to_statement_list (olist, &new_body);
@@ -3997,14 +4196,22 @@ lower_omp_for_lastprivate (struct omp_for_data *fd, tree *body_p,
static void
lower_omp_for (tree *stmt_p, omp_context *ctx)
{
- tree t, stmt, ilist, dlist, new_stmt, *body_p, *rhs_p;
+ tree t, stmt, ilist, clist = NULL, dlist, new_stmt, *body_p, *rhs_p, c, *tp;
struct omp_for_data fd;
+ enum gimplify_status gs;
stmt = *stmt_p;
push_gimplify_context ();
lower_omp (&OMP_FOR_PRE_BODY (stmt), ctx);
+ c = find_omp_clause (OMP_FOR_CLAUSES (stmt), OMP_CLAUSE_SCHEDULE);
+ if (c)
+ {
+ tp = &OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (c);
+ gs = gimplify_expr (tp, &clist, NULL, is_gimple_val, fb_rvalue);
+ gcc_assert (gs == GS_ALL_DONE);
+ }
lower_omp (&OMP_FOR_BODY (stmt), ctx);
/* Move declaration of temporaries in the loop body before we make
@@ -4020,6 +4227,7 @@ lower_omp_for (tree *stmt_p, omp_context *ctx)
ilist = NULL;
dlist = NULL;
append_to_statement_list (OMP_FOR_PRE_BODY (stmt), body_p);
+ append_to_statement_list (clist, body_p);
lower_rec_input_clauses (OMP_FOR_CLAUSES (stmt), body_p, &dlist, ctx);
/* Lower the header expressions. At this point, we can assume that
@@ -4048,7 +4256,7 @@ lower_omp_for (tree *stmt_p, omp_context *ctx)
append_to_statement_list (OMP_FOR_BODY (stmt), body_p);
- t = make_node (OMP_CONTINUE);
+ t = build2 (OMP_CONTINUE, void_type_node, fd.v, fd.v);
append_to_statement_list (t, body_p);
/* After the loop, add exit clauses. */
diff --git a/gcc/passes.c b/gcc/passes.c
index 92781e19616..02a28cebeeb 100644
--- a/gcc/passes.c
+++ b/gcc/passes.c
@@ -474,7 +474,7 @@ init_optimization_passes (void)
p = &all_passes;
NEXT_PASS (pass_fixup_cfg);
NEXT_PASS (pass_init_datastructures);
- NEXT_PASS (pass_expand_omp);
+ NEXT_PASS (pass_expand_omp_O0);
NEXT_PASS (pass_all_optimizations);
NEXT_PASS (pass_warn_function_noreturn);
NEXT_PASS (pass_mudflap_2);
@@ -491,6 +491,7 @@ init_optimization_passes (void)
NEXT_PASS (pass_create_structure_vars);
NEXT_PASS (pass_build_ssa);
NEXT_PASS (pass_may_alias);
+ NEXT_PASS (pass_expand_omp);
NEXT_PASS (pass_return_slot);
NEXT_PASS (pass_rename_ssa_copies);
NEXT_PASS (pass_early_warn_uninitialized);
diff --git a/gcc/testsuite/g++.dg/gomp/for-10.C b/gcc/testsuite/g++.dg/gomp/for-10.C
index f21404249c7..4b8eb8176e9 100644
--- a/gcc/testsuite/g++.dg/gomp/for-10.C
+++ b/gcc/testsuite/g++.dg/gomp/for-10.C
@@ -1,5 +1,5 @@
/* { dg-do compile } */
-/* { dg-options "-fopenmp -fdump-tree-ompexp" } */
+/* { dg-options "-fopenmp -O1 -fdump-tree-ompexp" } */
extern void bar(int);
diff --git a/gcc/testsuite/g++.dg/gomp/for-4.C b/gcc/testsuite/g++.dg/gomp/for-4.C
index fb6994ea20b..5a9d5927133 100644
--- a/gcc/testsuite/g++.dg/gomp/for-4.C
+++ b/gcc/testsuite/g++.dg/gomp/for-4.C
@@ -1,5 +1,5 @@
/* { dg-do compile } */
-/* { dg-options "-fopenmp -fdump-tree-ompexp" } */
+/* { dg-options "-fopenmp -O1 -fdump-tree-ompexp" } */
extern void bar(int);
diff --git a/gcc/testsuite/g++.dg/gomp/for-5.C b/gcc/testsuite/g++.dg/gomp/for-5.C
index 5912a4e5561..36179615aac 100644
--- a/gcc/testsuite/g++.dg/gomp/for-5.C
+++ b/gcc/testsuite/g++.dg/gomp/for-5.C
@@ -1,5 +1,5 @@
/* { dg-do compile } */
-/* { dg-options "-fopenmp -fdump-tree-ompexp" } */
+/* { dg-options "-fopenmp -O1 -fdump-tree-ompexp" } */
extern void bar(int);
diff --git a/gcc/testsuite/g++.dg/gomp/for-6.C b/gcc/testsuite/g++.dg/gomp/for-6.C
index 100ee2c8c21..ee9d19a05f1 100644
--- a/gcc/testsuite/g++.dg/gomp/for-6.C
+++ b/gcc/testsuite/g++.dg/gomp/for-6.C
@@ -1,5 +1,5 @@
/* { dg-do compile } */
-/* { dg-options "-fopenmp -fdump-tree-ompexp" } */
+/* { dg-options "-fopenmp -O1 -fdump-tree-ompexp" } */
extern void bar(int);
diff --git a/gcc/testsuite/g++.dg/gomp/for-7.C b/gcc/testsuite/g++.dg/gomp/for-7.C
index 10763dc596c..551973337d6 100644
--- a/gcc/testsuite/g++.dg/gomp/for-7.C
+++ b/gcc/testsuite/g++.dg/gomp/for-7.C
@@ -1,5 +1,5 @@
/* { dg-do compile } */
-/* { dg-options "-fopenmp -fdump-tree-ompexp" } */
+/* { dg-options "-fopenmp -O1 -fdump-tree-ompexp" } */
extern void bar(int);
diff --git a/gcc/testsuite/g++.dg/gomp/for-8.C b/gcc/testsuite/g++.dg/gomp/for-8.C
index 1bc66c49a0d..859bd635f21 100644
--- a/gcc/testsuite/g++.dg/gomp/for-8.C
+++ b/gcc/testsuite/g++.dg/gomp/for-8.C
@@ -1,5 +1,5 @@
/* { dg-do compile } */
-/* { dg-options "-fopenmp -fdump-tree-ompexp" } */
+/* { dg-options "-fopenmp -O1 -fdump-tree-ompexp" } */
extern void bar(int);
diff --git a/gcc/testsuite/g++.dg/gomp/for-9.C b/gcc/testsuite/g++.dg/gomp/for-9.C
index af99e216e79..a80f1ccccfd 100644
--- a/gcc/testsuite/g++.dg/gomp/for-9.C
+++ b/gcc/testsuite/g++.dg/gomp/for-9.C
@@ -1,5 +1,5 @@
/* { dg-do compile } */
-/* { dg-options "-fopenmp -fdump-tree-ompexp" } */
+/* { dg-options "-fopenmp -O1 -fdump-tree-ompexp" } */
extern void bar(int);
diff --git a/gcc/testsuite/gcc.dg/gomp/critical-1.c b/gcc/testsuite/gcc.dg/gomp/critical-1.c
index 6f3348c8884..037cce8abad 100644
--- a/gcc/testsuite/gcc.dg/gomp/critical-1.c
+++ b/gcc/testsuite/gcc.dg/gomp/critical-1.c
@@ -1,5 +1,5 @@
/* { dg-do compile } */
-/* { dg-options "-fopenmp -fdump-tree-ompexp" } */
+/* { dg-options "-fopenmp -O1 -fdump-tree-ompexp" } */
extern void bar(int);
diff --git a/gcc/testsuite/gcc.dg/gomp/critical-3.c b/gcc/testsuite/gcc.dg/gomp/critical-3.c
index c0046f57139..79654ffbc20 100644
--- a/gcc/testsuite/gcc.dg/gomp/critical-3.c
+++ b/gcc/testsuite/gcc.dg/gomp/critical-3.c
@@ -1,5 +1,5 @@
// { dg-do compile }
-// { dg-options "-fopenmp -fdump-tree-ompexp" }
+// { dg-options "-fopenmp -O1 -fdump-tree-ompexp" }
void bar(void);
void foo(void)
diff --git a/gcc/testsuite/gcc.dg/gomp/for-10.c b/gcc/testsuite/gcc.dg/gomp/for-10.c
index f21404249c7..4b8eb8176e9 100644
--- a/gcc/testsuite/gcc.dg/gomp/for-10.c
+++ b/gcc/testsuite/gcc.dg/gomp/for-10.c
@@ -1,5 +1,5 @@
/* { dg-do compile } */
-/* { dg-options "-fopenmp -fdump-tree-ompexp" } */
+/* { dg-options "-fopenmp -O1 -fdump-tree-ompexp" } */
extern void bar(int);
diff --git a/gcc/testsuite/gcc.dg/gomp/for-13.c b/gcc/testsuite/gcc.dg/gomp/for-13.c
index 607de49c749..cd41ee6e9be 100644
--- a/gcc/testsuite/gcc.dg/gomp/for-13.c
+++ b/gcc/testsuite/gcc.dg/gomp/for-13.c
@@ -2,7 +2,7 @@
// for iteration variable as private.
// { dg-do compile }
-// { dg-options "-fopenmp -fdump-tree-ompexp" }
+// { dg-options "-fopenmp -O1 -fdump-tree-ompexp" }
extern void bar(int);
void foo(void)
diff --git a/gcc/testsuite/gcc.dg/gomp/for-4.c b/gcc/testsuite/gcc.dg/gomp/for-4.c
index fb6994ea20b..5a9d5927133 100644
--- a/gcc/testsuite/gcc.dg/gomp/for-4.c
+++ b/gcc/testsuite/gcc.dg/gomp/for-4.c
@@ -1,5 +1,5 @@
/* { dg-do compile } */
-/* { dg-options "-fopenmp -fdump-tree-ompexp" } */
+/* { dg-options "-fopenmp -O1 -fdump-tree-ompexp" } */
extern void bar(int);
diff --git a/gcc/testsuite/gcc.dg/gomp/for-5.c b/gcc/testsuite/gcc.dg/gomp/for-5.c
index 5912a4e5561..36179615aac 100644
--- a/gcc/testsuite/gcc.dg/gomp/for-5.c
+++ b/gcc/testsuite/gcc.dg/gomp/for-5.c
@@ -1,5 +1,5 @@
/* { dg-do compile } */
-/* { dg-options "-fopenmp -fdump-tree-ompexp" } */
+/* { dg-options "-fopenmp -O1 -fdump-tree-ompexp" } */
extern void bar(int);
diff --git a/gcc/testsuite/gcc.dg/gomp/for-6.c b/gcc/testsuite/gcc.dg/gomp/for-6.c
index 100ee2c8c21..ee9d19a05f1 100644
--- a/gcc/testsuite/gcc.dg/gomp/for-6.c
+++ b/gcc/testsuite/gcc.dg/gomp/for-6.c
@@ -1,5 +1,5 @@
/* { dg-do compile } */
-/* { dg-options "-fopenmp -fdump-tree-ompexp" } */
+/* { dg-options "-fopenmp -O1 -fdump-tree-ompexp" } */
extern void bar(int);
diff --git a/gcc/testsuite/gcc.dg/gomp/for-7.c b/gcc/testsuite/gcc.dg/gomp/for-7.c
index 10763dc596c..551973337d6 100644
--- a/gcc/testsuite/gcc.dg/gomp/for-7.c
+++ b/gcc/testsuite/gcc.dg/gomp/for-7.c
@@ -1,5 +1,5 @@
/* { dg-do compile } */
-/* { dg-options "-fopenmp -fdump-tree-ompexp" } */
+/* { dg-options "-fopenmp -O1 -fdump-tree-ompexp" } */
extern void bar(int);
diff --git a/gcc/testsuite/gcc.dg/gomp/for-8.c b/gcc/testsuite/gcc.dg/gomp/for-8.c
index 1bc66c49a0d..859bd635f21 100644
--- a/gcc/testsuite/gcc.dg/gomp/for-8.c
+++ b/gcc/testsuite/gcc.dg/gomp/for-8.c
@@ -1,5 +1,5 @@
/* { dg-do compile } */
-/* { dg-options "-fopenmp -fdump-tree-ompexp" } */
+/* { dg-options "-fopenmp -O1 -fdump-tree-ompexp" } */
extern void bar(int);
diff --git a/gcc/testsuite/gcc.dg/gomp/for-9.c b/gcc/testsuite/gcc.dg/gomp/for-9.c
index af99e216e79..a80f1ccccfd 100644
--- a/gcc/testsuite/gcc.dg/gomp/for-9.c
+++ b/gcc/testsuite/gcc.dg/gomp/for-9.c
@@ -1,5 +1,5 @@
/* { dg-do compile } */
-/* { dg-options "-fopenmp -fdump-tree-ompexp" } */
+/* { dg-options "-fopenmp -O1 -fdump-tree-ompexp" } */
extern void bar(int);
diff --git a/gcc/testsuite/gcc.dg/gomp/master-3.c b/gcc/testsuite/gcc.dg/gomp/master-3.c
index fee09ddd798..da90cf968f6 100644
--- a/gcc/testsuite/gcc.dg/gomp/master-3.c
+++ b/gcc/testsuite/gcc.dg/gomp/master-3.c
@@ -1,5 +1,5 @@
/* { dg-do compile } */
-/* { dg-options "-fopenmp -fdump-tree-ompexp" } */
+/* { dg-options "-fopenmp -O1 -fdump-tree-ompexp" } */
extern void bar(int);
diff --git a/gcc/testsuite/gcc.dg/gomp/ordered-1.c b/gcc/testsuite/gcc.dg/gomp/ordered-1.c
index de5e116ebd2..1bc80a5bf09 100644
--- a/gcc/testsuite/gcc.dg/gomp/ordered-1.c
+++ b/gcc/testsuite/gcc.dg/gomp/ordered-1.c
@@ -1,5 +1,5 @@
/* { dg-do compile } */
-/* { dg-options "-fopenmp -fdump-tree-ompexp" } */
+/* { dg-options "-fopenmp -O1 -fdump-tree-ompexp" } */
extern void bar(int);
diff --git a/gcc/tree-cfg.c b/gcc/tree-cfg.c
index e785af8d170..0ac813ae725 100644
--- a/gcc/tree-cfg.c
+++ b/gcc/tree-cfg.c
@@ -535,6 +535,10 @@ make_edges (void)
case OMP_SECTIONS:
cur_region = new_omp_region (bb, code, cur_region);
+ fallthru = true;
+ break;
+
+ case OMP_SECTIONS_SWITCH:
fallthru = false;
break;
@@ -551,31 +555,42 @@ make_edges (void)
switch (cur_region->type)
{
case OMP_FOR:
- /* ??? Technically there should be a some sort of loopback
- edge here, but it goes to a block that doesn't exist yet,
- and without it, updating the ssa form would be a real
- bear. Fortunately, we don't yet do ssa before expanding
- these nodes. */
+ /* Make the loopback edge. */
+ make_edge (bb, single_succ (cur_region->entry), 0);
+
+ /* Create an edge from OMP_FOR to exit, which corresponds to
+ the case that the body of the loop is not executed at
+ all. */
+ make_edge (cur_region->entry, bb->next_bb, 0);
+ fallthru = true;
break;
case OMP_SECTIONS:
/* Wire up the edges into and out of the nested sections. */
- /* ??? Similarly wrt loopback. */
{
+ basic_block switch_bb = single_succ (cur_region->entry);
+
struct omp_region *i;
for (i = cur_region->inner; i ; i = i->next)
{
gcc_assert (i->type == OMP_SECTION);
- make_edge (cur_region->entry, i->entry, 0);
+ make_edge (switch_bb, i->entry, 0);
make_edge (i->exit, bb, EDGE_FALLTHRU);
}
+
+ /* Make the loopback edge to the block with
+ OMP_SECTIONS_SWITCH. */
+ make_edge (bb, switch_bb, 0);
+
+ /* Make the edge from the switch to exit. */
+ make_edge (switch_bb, bb->next_bb, 0);
+ fallthru = false;
}
break;
default:
gcc_unreachable ();
}
- fallthru = true;
break;
default:
@@ -4157,6 +4172,13 @@ tree_redirect_edge_and_branch (edge e, basic_block dest)
e->flags |= EDGE_FALLTHRU;
break;
+ case OMP_RETURN:
+ case OMP_CONTINUE:
+ case OMP_SECTIONS_SWITCH:
+ case OMP_FOR:
+ /* The edges from OMP constructs can be simply redirected. */
+ break;
+
default:
/* Otherwise it must be a fallthru edge, and we don't need to
do anything besides redirecting it. */
@@ -4561,13 +4583,69 @@ gather_blocks_in_sese_region (basic_block entry, basic_block exit,
}
}
+/* Replaces *TP with a duplicate (belonging to function TO_CONTEXT).
+ The duplicates are recorded in VARS_MAP. */
+
+static void
+replace_by_duplicate_decl (tree *tp, htab_t vars_map, tree to_context)
+{
+ tree t = *tp, new_t, ddef;
+ struct function *f = DECL_STRUCT_FUNCTION (to_context);
+ struct tree_map in, *out;
+ void **loc;
+
+ in.from = t;
+ loc = htab_find_slot_with_hash (vars_map, &in, DECL_UID (t), INSERT);
+
+ if (!*loc)
+ {
+ if (SSA_VAR_P (t))
+ {
+ new_t = copy_var_decl (t, DECL_NAME (t), TREE_TYPE (t));
+ f->unexpanded_var_list
+ = tree_cons (NULL_TREE, new_t, f->unexpanded_var_list);
+
+ ddef = default_def (t);
+ if (ddef)
+ set_default_def (new_t, ddef);
+ }
+ else
+ {
+ gcc_assert (TREE_CODE (t) == CONST_DECL);
+ new_t = copy_node (t);
+ }
+ DECL_CONTEXT (new_t) = to_context;
+
+ out = XNEW (struct tree_map);
+ out->hash = DECL_UID (t);
+ out->from = t;
+ out->to = new_t;
+ *loc = out;
+
+ /* Enter the duplicate as a key to the hashtable as well, since
+ SSA names are shared, and we want to avoid replacing their
+ variables repeatedly. */
+ in.from = new_t;
+ loc = htab_find_slot_with_hash (vars_map, &in, DECL_UID (new_t), INSERT);
+ gcc_assert (!*loc);
+ out = XNEW (struct tree_map);
+ out->hash = DECL_UID (new_t);
+ out->from = new_t;
+ out->to = new_t;
+ *loc = out;
+ }
+ else
+ new_t = ((struct tree_map *) *loc)->to;
+
+ *tp = new_t;
+}
struct move_stmt_d
{
tree block;
tree from_context;
tree to_context;
- bitmap vars_to_remove;
+ htab_t vars_map;
htab_t new_label_map;
bool remap_decls_p;
};
@@ -4580,7 +4658,7 @@ static tree
move_stmt_r (tree *tp, int *walk_subtrees, void *data)
{
struct move_stmt_d *p = (struct move_stmt_d *) data;
- tree t = *tp;
+ tree t = *tp, *dp;
if (p->block && IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (TREE_CODE (t))))
TREE_BLOCK (t) = p->block;
@@ -4605,7 +4683,12 @@ move_stmt_r (tree *tp, int *walk_subtrees, void *data)
|| TREE_CODE (t) == SSA_NAME)
{
if (TREE_CODE (t) == SSA_NAME)
- t = SSA_NAME_VAR (t);
+ {
+ dp = &SSA_NAME_VAR (t);
+ t = SSA_NAME_VAR (t);
+ }
+ else
+ dp = tp;
if (TREE_CODE (t) == LABEL_DECL)
{
@@ -4622,23 +4705,16 @@ move_stmt_r (tree *tp, int *walk_subtrees, void *data)
}
else if (p->remap_decls_p)
{
- DECL_CONTEXT (t) = p->to_context;
-
- if (TREE_CODE (t) == VAR_DECL)
- {
- struct function *f = DECL_STRUCT_FUNCTION (p->to_context);
-
- if (!bitmap_bit_p (p->vars_to_remove, DECL_UID (t)))
- {
- f->unexpanded_var_list
- = tree_cons (0, t, f->unexpanded_var_list);
-
- /* Mark T to be removed from the original function,
- otherwise it will be given a DECL_RTL when the
- original function is expanded. */
- bitmap_set_bit (p->vars_to_remove, DECL_UID (t));
- }
- }
+ /* Replace T with its duplicate. T should no longer appear in the
+ parent function, so this looks wasteful; however, it may appear
+ in referenced_vars, and more importantly, as virtual operands of
+ statements, and in alias lists of other variables. It would be
+ quite difficult to expunge it from all those places. ??? It might
+ suffice to do this for addressable variables. */
+ if ((TREE_CODE (t) == VAR_DECL
+ && !is_global_var (t))
+ || TREE_CODE (t) == CONST_DECL)
+ replace_by_duplicate_decl (dp, p->vars_map, p->to_context);
}
}
else if (TYPE_P (t))
@@ -4647,6 +4723,56 @@ move_stmt_r (tree *tp, int *walk_subtrees, void *data)
return NULL_TREE;
}
+/* Marks all virtual operands of statement STMT for renaming. */
+
+void
+mark_virtual_ops_for_renaming (tree stmt)
+{
+ ssa_op_iter iter;
+ tree var;
+
+ if (TREE_CODE (stmt) == PHI_NODE)
+ {
+ var = PHI_RESULT (stmt);
+ if (is_gimple_reg (var))
+ return;
+
+ if (TREE_CODE (var) == SSA_NAME)
+ var = SSA_NAME_VAR (var);
+ mark_sym_for_renaming (var);
+ return;
+ }
+
+ update_stmt (stmt);
+
+ FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_ALL_VIRTUALS)
+ {
+ if (TREE_CODE (var) == SSA_NAME)
+ var = SSA_NAME_VAR (var);
+ mark_sym_for_renaming (var);
+ }
+}
+
+/* Marks virtual operands of all statements in basic blocks BBS for
+ renaming. */
+
+static void
+mark_virtual_ops_in_region (VEC(basic_block,heap) *bbs)
+{
+ tree phi;
+ block_stmt_iterator bsi;
+ basic_block bb;
+ unsigned i;
+
+ for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
+ {
+ for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+ mark_virtual_ops_for_renaming (phi);
+
+ for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+ mark_virtual_ops_for_renaming (bsi_stmt (bsi));
+ }
+}
/* Move basic block BB from function CFUN to function DEST_FN. The
block is moved out of the original linked list and placed after
@@ -4655,13 +4781,13 @@ move_stmt_r (tree *tp, int *walk_subtrees, void *data)
If UPDATE_EDGE_COUNT_P is true, the edge counts on both CFGs is
updated to reflect the moved edges.
- On exit, local variables that need to be removed from
- CFUN->UNEXPANDED_VAR_LIST will have been added to VARS_TO_REMOVE. */
+ The local variables are remapped to new instances, VARS_MAP is used
+ to record the mapping. */
static void
move_block_to_fn (struct function *dest_cfun, basic_block bb,
basic_block after, bool update_edge_count_p,
- bitmap vars_to_remove, htab_t new_label_map, int eh_offset)
+ htab_t vars_map, htab_t new_label_map, int eh_offset)
{
struct control_flow_graph *cfg;
edge_iterator ei;
@@ -4670,6 +4796,7 @@ move_block_to_fn (struct function *dest_cfun, basic_block bb,
struct move_stmt_d d;
unsigned old_len, new_len;
basic_block *addr;
+ tree phi;
/* Remove BB from dominance structures. */
delete_from_dominance_info (CDI_DOMINATORS, bb);
@@ -4707,20 +4834,41 @@ move_block_to_fn (struct function *dest_cfun, basic_block bb,
VEC_replace (basic_block, cfg->x_basic_block_info,
bb->index, bb);
+ /* Remap the variables in phi nodes. */
+ for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+ {
+ use_operand_p use;
+ tree op = PHI_RESULT (phi);
+ ssa_op_iter oi;
+
+ if (!is_gimple_reg (op))
+ continue;
+
+ replace_by_duplicate_decl (&SSA_NAME_VAR (op), vars_map,
+ dest_cfun->decl);
+ FOR_EACH_PHI_ARG (use, phi, oi, SSA_OP_USE)
+ {
+ op = USE_FROM_PTR (use);
+ if (TREE_CODE (op) == SSA_NAME)
+ replace_by_duplicate_decl (&SSA_NAME_VAR (op), vars_map,
+ dest_cfun->decl);
+ }
+ }
+
/* The statements in BB need to be associated with a new TREE_BLOCK.
Labels need to be associated with a new label-to-block map. */
memset (&d, 0, sizeof (d));
- d.vars_to_remove = vars_to_remove;
+ d.vars_map = vars_map;
+ d.from_context = cfun->decl;
+ d.to_context = dest_cfun->decl;
+ d.new_label_map = new_label_map;
for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
{
tree stmt = bsi_stmt (si);
int region;
- d.from_context = cfun->decl;
- d.to_context = dest_cfun->decl;
d.remap_decls_p = true;
- d.new_label_map = new_label_map;
if (TREE_BLOCK (stmt))
d.block = DECL_INITIAL (dest_cfun->decl);
@@ -4809,7 +4957,7 @@ new_label_mapper (tree decl, void *data)
gcc_assert (TREE_CODE (decl) == LABEL_DECL);
- m = xmalloc (sizeof (struct tree_map));
+ m = XNEW (struct tree_map);
m->hash = DECL_UID (decl);
m->from = decl;
m->to = create_artificial_label ();
@@ -4852,8 +5000,7 @@ move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
unsigned i, num_entry_edges, num_exit_edges;
edge e;
edge_iterator ei;
- bitmap vars_to_remove;
- htab_t new_label_map;
+ htab_t new_label_map, vars_map;
saved_cfun = cfun;
@@ -4946,44 +5093,28 @@ move_sese_region_to_fn (struct function *dest_cfun, basic_block entry_bb,
cfun = saved_cfun;
+ /* The ssa form for virtual operands in the source function will have to
+ be repaired. We do not care for the real operands -- the sese region
+ must be closed with respect to those. */
+ mark_virtual_ops_in_region (bbs);
+
/* Move blocks from BBS into DEST_CFUN. */
gcc_assert (VEC_length (basic_block, bbs) >= 2);
after = dest_cfun->cfg->x_entry_block_ptr;
- vars_to_remove = BITMAP_ALLOC (NULL);
+ vars_map = htab_create (17, tree_map_hash, tree_map_eq, free);
for (i = 0; VEC_iterate (basic_block, bbs, i, bb); i++)
{
/* No need to update edge counts on the last block. It has
already been updated earlier when we detached the region from
the original CFG. */
- move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, vars_to_remove,
+ move_block_to_fn (dest_cfun, bb, after, bb != exit_bb, vars_map,
new_label_map, eh_offset);
after = bb;
}
if (new_label_map)
htab_delete (new_label_map);
-
- /* Remove the variables marked in VARS_TO_REMOVE from
- CFUN->UNEXPANDED_VAR_LIST. Otherwise, they will be given a
- DECL_RTL in the context of CFUN. */
- if (!bitmap_empty_p (vars_to_remove))
- {
- tree *p;
-
- for (p = &cfun->unexpanded_var_list; *p; )
- {
- tree var = TREE_VALUE (*p);
- if (bitmap_bit_p (vars_to_remove, DECL_UID (var)))
- {
- *p = TREE_CHAIN (*p);
- continue;
- }
-
- p = &TREE_CHAIN (*p);
- }
- }
-
- BITMAP_FREE (vars_to_remove);
+ htab_delete (vars_map);
/* Rewire the entry and exit blocks. The successor to the entry
block turns into the successor of DEST_FN's ENTRY_BLOCK_PTR in
diff --git a/gcc/tree-flow-inline.h b/gcc/tree-flow-inline.h
index 892be56a61c..2596fbecf48 100644
--- a/gcc/tree-flow-inline.h
+++ b/gcc/tree-flow-inline.h
@@ -585,16 +585,6 @@ phi_arg_index_from_use (use_operand_p use)
return index;
}
-/* Mark VAR as used, so that it'll be preserved during rtl expansion. */
-
-static inline void
-set_is_used (tree var)
-{
- var_ann_t ann = get_var_ann (var);
- ann->used = 1;
-}
-
-
/* ----------------------------------------------------------------------- */
/* Return true if T is an executable statement. */
diff --git a/gcc/tree-flow.h b/gcc/tree-flow.h
index 203d453113a..b24f3b47521 100644
--- a/gcc/tree-flow.h
+++ b/gcc/tree-flow.h
@@ -157,13 +157,6 @@ struct var_ann_d GTY(())
{
struct tree_ann_common_d common;
- /* Used by the out of SSA pass to determine whether this variable has
- been seen yet or not. */
- unsigned out_of_ssa_tag : 1;
-
- /* Used when building root_var structures in tree_ssa_live.[ch]. */
- unsigned root_var_processed : 1;
-
/* Nonzero if this variable is in the alias set of another variable. */
unsigned is_aliased : 1;
@@ -193,13 +186,6 @@ struct var_ann_d GTY(())
/* Variables that may alias this variable. */
VEC(tree, gc) *may_aliases;
- /* Used when going out of SSA form to indicate which partition this
- variable represents storage for. */
- unsigned partition;
-
- /* Used by the root-var object in tree-ssa-live.[ch]. */
- unsigned root_index;
-
/* During into-ssa and the dominator optimizer, this field holds the
current version of this variable (an SSA_NAME). */
tree current_def;
@@ -548,6 +534,8 @@ extern struct omp_region *root_omp_region;
extern struct omp_region *new_omp_region (basic_block, enum tree_code,
struct omp_region *);
extern void free_omp_regions (void);
+tree copy_var_decl (tree, tree, tree);
+tree find_omp_clause (tree, enum tree_code);
/*---------------------------------------------------------------------------
Function prototypes
@@ -637,6 +625,8 @@ extern tree get_virtual_var (tree);
extern void add_referenced_var (tree);
extern void mark_new_vars_to_rename (tree);
extern void find_new_referenced_vars (tree *);
+void mark_virtual_ops_for_renaming (tree);
+void mark_call_virtual_operands (void);
extern tree make_rename_temp (tree, const char *);
extern void set_default_def (tree, tree);
@@ -713,7 +703,7 @@ void mark_sym_for_renaming (tree);
void mark_set_for_renaming (bitmap);
tree get_current_def (tree);
void set_current_def (tree, tree);
-void go_out_of_ssa (void);
+void go_out_of_ssa (tree);
/* In tree-ssa-ccp.c */
bool fold_stmt (tree *);
@@ -889,7 +879,6 @@ enum escape_type
/* In tree-flow-inline.h */
static inline bool is_call_clobbered (tree);
static inline void mark_call_clobbered (tree, unsigned int);
-static inline void set_is_used (tree);
static inline bool unmodifiable_var_p (tree);
/* In tree-eh.c */
@@ -947,7 +936,8 @@ extern void register_jump_thread (edge, edge);
/* In gimplify.c */
tree force_gimple_operand (tree, tree *, bool, tree);
-tree force_gimple_operand_bsi (block_stmt_iterator *, tree, bool, tree);
+tree force_gimple_operand_bsi (block_stmt_iterator *, tree, bool, tree,
+ bool, enum bsi_iterator_update);
/* In tree-ssa-structalias.c */
bool find_what_p_points_to (tree);
diff --git a/gcc/tree-gimple.c b/gcc/tree-gimple.c
index 740f6735f7b..5a51f323873 100644
--- a/gcc/tree-gimple.c
+++ b/gcc/tree-gimple.c
@@ -222,6 +222,7 @@ is_gimple_stmt (tree t)
case OMP_PARALLEL:
case OMP_FOR:
case OMP_SECTIONS:
+ case OMP_SECTIONS_SWITCH:
case OMP_SECTION:
case OMP_SINGLE:
case OMP_MASTER:
diff --git a/gcc/tree-inline.c b/gcc/tree-inline.c
index baca8340486..531501b13fa 100644
--- a/gcc/tree-inline.c
+++ b/gcc/tree-inline.c
@@ -1633,6 +1633,7 @@ estimate_num_insns_1 (tree *tp, int *walk_subtrees, void *data)
case OMP_CLAUSE:
case OMP_RETURN:
case OMP_CONTINUE:
+ case OMP_SECTIONS_SWITCH:
break;
/* We don't account constants for now. Assume that the cost is amortized
diff --git a/gcc/tree-outof-ssa.c b/gcc/tree-outof-ssa.c
index 27822851315..427079ae46a 100644
--- a/gcc/tree-outof-ssa.c
+++ b/gcc/tree-outof-ssa.c
@@ -168,14 +168,7 @@ create_temp (tree t)
}
DECL_ARTIFICIAL (tmp) = DECL_ARTIFICIAL (t);
DECL_IGNORED_P (tmp) = DECL_IGNORED_P (t);
- add_referenced_var (tmp);
-
- /* add_referenced_var will create the annotation and set up some
- of the flags in the annotation. However, some flags we need to
- inherit from our original variable. */
- var_ann (tmp)->symbol_mem_tag = var_ann (t)->symbol_mem_tag;
- if (is_call_clobbered (t))
- mark_call_clobbered (tmp, var_ann (t)->escape_mask);
+ DECL_COMPLEX_GIMPLE_REG_P (tmp) = DECL_COMPLEX_GIMPLE_REG_P (t);
return tmp;
}
@@ -190,12 +183,9 @@ insert_copy_on_edge (edge e, tree dest, tree src)
tree copy;
copy = build2 (MODIFY_EXPR, TREE_TYPE (dest), dest, src);
- set_is_used (dest);
if (TREE_CODE (src) == ADDR_EXPR)
src = TREE_OPERAND (src, 0);
- if (TREE_CODE (src) == VAR_DECL || TREE_CODE (src) == PARM_DECL)
- set_is_used (src);
if (dump_file && (dump_flags & TDF_DETAILS))
{
@@ -885,13 +875,12 @@ coalesce_ssa_name (var_map map, int flags)
EXECUTE_IF_SET_IN_SBITMAP (live, 0, x, sbi)
{
tree var = root_var (rv, root_var_find (rv, x));
- var_ann_t ann = var_ann (var);
/* If these aren't already coalesced... */
if (partition_to_var (map, x) != var)
{
/* This root variable should have not already been assigned
to another partition which is not coalesced with this one. */
- gcc_assert (!ann->out_of_ssa_tag);
+ gcc_assert (!bitmap_bit_p (out_of_ssa, DECL_UID (var)));
if (dump_file && (dump_flags & TDF_DETAILS))
{
@@ -938,7 +927,6 @@ assign_vars (var_map map)
{
int x, i, num, rep;
tree t, var;
- var_ann_t ann;
root_var_p rv;
rv = root_var_init (map);
@@ -957,8 +945,7 @@ assign_vars (var_map map)
/* Coalescing will already have verified that more than one
partition doesn't have the same root variable. Simply marked
the variable as assigned. */
- ann = var_ann (var);
- ann->out_of_ssa_tag = 1;
+ bitmap_set_bit (out_of_ssa, DECL_UID (var));
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "partition %d has variable ", x);
@@ -973,7 +960,6 @@ assign_vars (var_map map)
for (x = 0; x < num; x++)
{
var = root_var (rv, x);
- ann = var_ann (var);
for (i = root_var_first_partition (rv, x);
i != ROOT_VAR_NONE;
i = root_var_next_partition (rv, i))
@@ -985,7 +971,7 @@ assign_vars (var_map map)
rep = var_to_partition (map, t);
- if (!ann->out_of_ssa_tag)
+ if (!bitmap_bit_p (out_of_ssa, DECL_UID (var)))
{
if (dump_file && (dump_flags & TDF_DETAILS))
print_exprs (dump_file, "", t, " --> ", var, "\n");
@@ -999,7 +985,6 @@ assign_vars (var_map map)
var = create_temp (t);
change_partition_var (map, var, rep);
- ann = var_ann (var);
if (dump_file && (dump_flags & TDF_DETAILS))
{
@@ -1043,7 +1028,6 @@ replace_use_variable (var_map map, use_operand_p p, tree *expr)
if (new_var)
{
SET_USE (p, new_var);
- set_is_used (new_var);
return true;
}
return false;
@@ -1079,7 +1063,6 @@ replace_def_variable (var_map map, def_operand_p def_p, tree *expr)
if (new_var)
{
SET_DEF (def_p, new_var);
- set_is_used (new_var);
return true;
}
return false;
@@ -2497,19 +2480,59 @@ insert_backedge_copies (void)
}
}
-/* Rewrites the current function out of SSA form, leaving it in gimple
+/* Rewrites the function FUN out of SSA form, leaving it in gimple
and not freeing any structures. */
void
-go_out_of_ssa (void)
+go_out_of_ssa (tree fun)
{
var_map map;
+ struct function *act_cfun = cfun;
+ tree act_decl = current_function_decl;
+ basic_block bb;
+ block_stmt_iterator bsi;
+ stmt_ann_t ann;
+ use_operand_p use;
+ tree stmt;
+ ssa_op_iter oi;
+ cfun = DECL_STRUCT_FUNCTION (fun);
+ current_function_decl = fun;
+
+ init_var_partition_map ();
insert_backedge_copies ();
eliminate_virtual_phis ();
map = create_ssa_var_map (0);
remove_ssa_form (map, 0);
delete_var_map (map);
+ delete_var_partition_map ();
+
+ /* Clean the annotations from the variables. Go_out_of_ssa is called
+ on code split from the current function, operands of the statements are
+ allocated from the local caches, so we cannot preserve them. Even if
+ we could, it probably would not be safe due to possible changes to the
+ information stored in the annotations. */
+ FOR_EACH_BB (bb)
+ {
+ for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
+ {
+ stmt = bsi_stmt (bsi);
+ FOR_EACH_SSA_USE_OPERAND (use, stmt, oi,
+ SSA_OP_ALL_USES | SSA_OP_ALL_KILLS)
+ {
+ delink_imm_use (use);
+ }
+
+ ann = stmt_ann (bsi_stmt (bsi));
+ memset (ann, 0, sizeof (struct stmt_ann_d));
+ ann->common.type = STMT_ANN;
+ ann->modified = 1;
+ ann->bb = bb;
+ }
+ }
+
+ cfun = act_cfun;
+ current_function_decl = act_decl;
}
/* Take the current function out of SSA form, as described in
@@ -2523,6 +2546,8 @@ rewrite_out_of_ssa (void)
int var_flags = 0;
int ssa_flags = 0;
+ init_var_partition_map ();
+
/* If elimination of a PHI requires inserting a copy on a backedge,
then we will have to split the backedge which has numerous
undesirable performance effects.
@@ -2561,6 +2586,8 @@ rewrite_out_of_ssa (void)
/* Flush out flow graph and SSA data. */
delete_var_map (map);
+ delete_var_partition_map ();
+
in_ssa_p = false;
return 0;
}
diff --git a/gcc/tree-parloops.c b/gcc/tree-parloops.c
index 9e558420859..36ec003a67b 100644
--- a/gcc/tree-parloops.c
+++ b/gcc/tree-parloops.c
@@ -174,36 +174,6 @@ loop_parallel_p (struct loop *loop, struct tree_niter_desc *niter)
return ret;
}
-/* Marks all virtual operands of statement STMT for renaming. */
-
-static void
-mark_virtual_ops_for_renaming (tree stmt)
-{
- ssa_op_iter iter;
- tree var;
-
- if (TREE_CODE (stmt) == PHI_NODE)
- {
- var = PHI_RESULT (stmt);
- if (is_gimple_reg (var))
- return;
-
- if (TREE_CODE (var) == SSA_NAME)
- var = SSA_NAME_VAR (var);
- mark_sym_for_renaming (var);
- return;
- }
-
- update_stmt (stmt);
-
- FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, SSA_OP_ALL_VIRTUALS)
- {
- if (TREE_CODE (var) == SSA_NAME)
- var = SSA_NAME_VAR (var);
- mark_sym_for_renaming (var);
- }
-}
-
/* Calls mark_virtual_ops_for_renaming for all members of LIST. */
static void
@@ -217,7 +187,7 @@ mark_virtual_ops_for_renaming_list (tree list)
/* Marks operands of calls for renaming. */
-static void
+void
mark_call_virtual_operands (void)
{
basic_block bb;
@@ -950,7 +920,7 @@ extract_loop_to_function (struct loop *loop, tree arg_struct, tree *loop_fn)
{
basic_block bb_to = loop_split_edge_with (loop->single_exit, NULL);
basic_block bb_from = loop_preheader_edge (loop)->src;
- basic_block repl_bb, bb;
+ basic_block repl_bb;
tree arg, narg, stmt;
struct function *act_cfun = cfun;
tree act_decl = current_function_decl;
@@ -958,7 +928,6 @@ extract_loop_to_function (struct loop *loop, tree arg_struct, tree *loop_fn)
basic_block *body = get_loop_body (loop);
struct loop *outer = loop->outer;
unsigned i, n = loop->num_nodes;
- stmt_ann_t ann;
cancel_loop_tree (current_loops, loop);
for (i = 0; i < n; i++)
@@ -993,27 +962,11 @@ extract_loop_to_function (struct loop *loop, tree arg_struct, tree *loop_fn)
SSA_NAME_DEF_STMT (arg_struct) = stmt;
bsi_insert_before (&bsi, stmt, BSI_NEW_STMT);
}
-
- go_out_of_ssa ();
-
- /* Let us pretend that we have never seen the statements before. The
- operands of the statements are allocated from the local caches, so
- we cannot preserve them. */
- FOR_EACH_BB (bb)
- {
- for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
- {
- ann = stmt_ann (bsi_stmt (bsi));
- memset (ann, 0, sizeof (struct stmt_ann_d));
- ann->common.type = STMT_ANN;
- ann->modified = 1;
- ann->bb = bb;
- }
- }
-
cfun = act_cfun;
current_function_decl = act_decl;
+ go_out_of_ssa (*loop_fn);
+
return repl_bb;
}
diff --git a/gcc/tree-pass.h b/gcc/tree-pass.h
index 8b5eecbd55a..f99f745bcda 100644
--- a/gcc/tree-pass.h
+++ b/gcc/tree-pass.h
@@ -275,6 +275,7 @@ extern struct tree_opt_pass pass_lower_vector;
extern struct tree_opt_pass pass_lower_vector_ssa;
extern struct tree_opt_pass pass_lower_omp;
extern struct tree_opt_pass pass_expand_omp;
+extern struct tree_opt_pass pass_expand_omp_O0;
extern struct tree_opt_pass pass_object_sizes;
extern struct tree_opt_pass pass_fold_builtins;
extern struct tree_opt_pass pass_stdarg;
diff --git a/gcc/tree-pretty-print.c b/gcc/tree-pretty-print.c
index 5ce495b501f..cfaccb39047 100644
--- a/gcc/tree-pretty-print.c
+++ b/gcc/tree-pretty-print.c
@@ -1780,9 +1780,21 @@ dump_generic_node (pretty_printer *buffer, tree node, int spc, int flags,
case OMP_SECTIONS:
pp_string (buffer, "#pragma omp sections");
+ if (OMP_SECTIONS_CONTROL (node))
+ {
+ pp_string (buffer, " (");
+ dump_generic_node (buffer, OMP_SECTIONS_CONTROL (node), spc,
+ flags, false);
+ pp_string (buffer, ")");
+ }
dump_omp_clauses (buffer, OMP_SECTIONS_CLAUSES (node), spc, flags);
goto dump_omp_body;
+ case OMP_SECTIONS_SWITCH:
+ pp_string (buffer, "OMP_SECTIONS_SWITCH");
+ is_expr = false;
+ break;
+
case OMP_SECTION:
pp_string (buffer, "#pragma omp section");
goto dump_omp_body;
@@ -1830,7 +1842,11 @@ dump_generic_node (pretty_printer *buffer, tree node, int spc, int flags,
break;
case OMP_CONTINUE:
- pp_string (buffer, "OMP_CONTINUE");
+ pp_string (buffer, "OMP_CONTINUE (");
+ dump_generic_node (buffer, TREE_OPERAND (node, 0), spc, flags, false);
+ pp_string (buffer, " <- ");
+ dump_generic_node (buffer, TREE_OPERAND (node, 1), spc, flags, false);
+ pp_string (buffer, ")");
is_expr = false;
break;
diff --git a/gcc/tree-profile.c b/gcc/tree-profile.c
index fe42309ccc7..809f9e72dc5 100644
--- a/gcc/tree-profile.c
+++ b/gcc/tree-profile.c
@@ -125,7 +125,7 @@ prepare_instrumented_value (block_stmt_iterator *bsi,
{
tree val = value->hvalue.value;
return force_gimple_operand_bsi (bsi, fold_convert (gcov_type_node, val),
- true, NULL_TREE);
+ true, NULL_TREE, true, BSI_SAME_STMT);
}
/* Output instructions as GIMPLE trees to increment the interval histogram
@@ -144,7 +144,7 @@ tree_gen_interval_profiler (histogram_value value, unsigned tag, unsigned base)
ref_ptr = force_gimple_operand_bsi (&bsi,
build_addr (ref, current_function_decl),
- true, NULL_TREE);
+ true, NULL_TREE, true, BSI_SAME_STMT);
val = prepare_instrumented_value (&bsi, value);
args = tree_cons (NULL_TREE, ref_ptr,
tree_cons (NULL_TREE, val,
@@ -169,7 +169,7 @@ tree_gen_pow2_profiler (histogram_value value, unsigned tag, unsigned base)
ref_ptr = force_gimple_operand_bsi (&bsi,
build_addr (ref, current_function_decl),
- true, NULL_TREE);
+ true, NULL_TREE, true, BSI_SAME_STMT);
val = prepare_instrumented_value (&bsi, value);
args = tree_cons (NULL_TREE, ref_ptr,
tree_cons (NULL_TREE, val,
@@ -192,7 +192,7 @@ tree_gen_one_value_profiler (histogram_value value, unsigned tag, unsigned base)
ref_ptr = force_gimple_operand_bsi (&bsi,
build_addr (ref, current_function_decl),
- true, NULL_TREE);
+ true, NULL_TREE, true, BSI_SAME_STMT);
val = prepare_instrumented_value (&bsi, value);
args = tree_cons (NULL_TREE, ref_ptr,
tree_cons (NULL_TREE, val,
diff --git a/gcc/tree-scalar-evolution.c b/gcc/tree-scalar-evolution.c
index 9bd122adc47..2c425ba710c 100644
--- a/gcc/tree-scalar-evolution.c
+++ b/gcc/tree-scalar-evolution.c
@@ -3013,7 +3013,8 @@ scev_const_prop (void)
{
block_stmt_iterator dest = bsi;
bsi_insert_before (&dest, ass, BSI_NEW_STMT);
- def = force_gimple_operand_bsi (&dest, def, false, NULL_TREE);
+ def = force_gimple_operand_bsi (&dest, def, false, NULL_TREE,
+ true, BSI_SAME_STMT);
}
TREE_OPERAND (ass, 1) = def;
update_stmt (ass);
diff --git a/gcc/tree-ssa-address.c b/gcc/tree-ssa-address.c
index 9dbc399d2cd..f5af163714e 100644
--- a/gcc/tree-ssa-address.c
+++ b/gcc/tree-ssa-address.c
@@ -477,11 +477,12 @@ static void
gimplify_mem_ref_parts (block_stmt_iterator *bsi, struct mem_address *parts)
{
if (parts->base)
- parts->base = force_gimple_operand_bsi (bsi, parts->base,
- true, NULL_TREE);
+ parts->base = force_gimple_operand_bsi (bsi, parts->base, true, NULL_TREE,
+ true, BSI_SAME_STMT);
if (parts->index)
parts->index = force_gimple_operand_bsi (bsi, parts->index,
- true, NULL_TREE);
+ true, NULL_TREE,
+ true, BSI_SAME_STMT);
}
/* Creates and returns a TARGET_MEM_REF for address ADDR. If necessary
@@ -511,7 +512,8 @@ create_mem_ref (block_stmt_iterator *bsi, tree type,
parts.index = force_gimple_operand_bsi (bsi,
build2 (MULT_EXPR, addr_type,
parts.index, parts.step),
- true, NULL_TREE);
+ true, NULL_TREE,
+ true, BSI_SAME_STMT);
parts.step = NULL_TREE;
mem_ref = create_mem_ref_raw (type, &parts);
@@ -530,7 +532,8 @@ create_mem_ref (block_stmt_iterator *bsi, tree type,
parts.base = force_gimple_operand_bsi (bsi,
build2 (PLUS_EXPR, addr_type,
parts.base, tmp),
- true, NULL_TREE);
+ true, NULL_TREE,
+ true, BSI_SAME_STMT);
else
{
parts.index = parts.base;
@@ -554,7 +557,8 @@ create_mem_ref (block_stmt_iterator *bsi, tree type,
build2 (PLUS_EXPR, addr_type,
parts.base,
parts.index),
- true, NULL_TREE);
+ true, NULL_TREE,
+ true, BSI_SAME_STMT);
else
parts.index = parts.base;
parts.base = NULL_TREE;
@@ -572,7 +576,8 @@ create_mem_ref (block_stmt_iterator *bsi, tree type,
build2 (PLUS_EXPR, addr_type,
parts.index,
parts.offset),
- true, NULL_TREE);
+ true, NULL_TREE,
+ true, BSI_SAME_STMT);
else
parts.index = parts.offset, bsi;
diff --git a/gcc/tree-ssa-alias.c b/gcc/tree-ssa-alias.c
index d3c5700c873..e57a59a32bb 100644
--- a/gcc/tree-ssa-alias.c
+++ b/gcc/tree-ssa-alias.c
@@ -1618,6 +1618,42 @@ create_alias_map_for (tree var, struct alias_info *ai)
ai->addressable_vars[ai->num_addressable_vars++] = alias_map;
}
+/* Removes phi nodes for variables in VARS. */
+
+static void
+remove_phis_for (bitmap vars)
+{
+ basic_block bb;
+ tree phi, prev, next, var;
+
+ FOR_EACH_BB (bb)
+ {
+ prev = NULL_TREE;
+ for (phi = phi_nodes (bb); phi; phi = next)
+ {
+ next = PHI_CHAIN (phi);
+ var = SSA_NAME_VAR (PHI_RESULT (phi));
+ if (!bitmap_bit_p (vars, DECL_UID (var)))
+ {
+ prev = phi;
+ continue;
+ }
+
+#ifdef ENABLE_CHECKING
+ {
+ int i;
+
+ /* Verify that all the arguments of the phi node are the uses
+ of VAR. */
+ for (i = 0; i < PHI_NUM_ARGS (phi); i++)
+ gcc_assert (SSA_NAME_VAR (PHI_ARG_DEF (phi, i)) == var);
+ }
+#endif
+
+ remove_phi_node (phi, prev);
+ }
+ }
+}
/* Create memory tags for all the dereferenced pointers and build the
ADDRESSABLE_VARS and POINTERS arrays used for building the may-alias
@@ -1633,6 +1669,7 @@ setup_pointers_and_addressables (struct alias_info *ai)
tree var;
VEC (tree, heap) *varvec = NULL;
safe_referenced_var_iterator srvi;
+ bitmap new_gimple_regs = BITMAP_ALLOC (NULL);
/* Size up the arrays ADDRESSABLE_VARS and POINTERS. */
num_addressable_vars = num_pointers = 0;
@@ -1722,7 +1759,11 @@ setup_pointers_and_addressables (struct alias_info *ai)
addressable bit, so that it can be optimized as a
regular variable. */
if (okay_to_mark)
- mark_non_addressable (var);
+ {
+ mark_non_addressable (var);
+ if (is_gimple_reg (var))
+ bitmap_set_bit (new_gimple_regs, DECL_UID (var));
+ }
}
}
@@ -1796,6 +1837,16 @@ setup_pointers_and_addressables (struct alias_info *ai)
}
}
VEC_free (tree, heap, varvec);
+
+ /* We want to remove the phi nodes for variables that used to be addressable,
+ but now are gimple registers. There are usually many superfluous phis for
+ such variables; also, keeping these nodes would make the variables appear
+ to be live in greater part of cfg than they really are, which causes
+ problems for omp_expand pass. */
+
+ if (!bitmap_empty_p (new_gimple_regs))
+ remove_phis_for (new_gimple_regs);
+ BITMAP_FREE (new_gimple_regs);
}
@@ -2124,6 +2175,12 @@ is_escape_site (tree stmt, struct alias_info *ai)
return ESCAPE_TO_CALL;
}
+ else if (TREE_CODE (stmt) == OMP_PARALLEL)
+ {
+ /* OMP_PARALLEL expands to a call whose argument is address of
+ OMP_PARALLEL_DATA_ARG. */
+ return ESCAPE_TO_CALL;
+ }
else if (TREE_CODE (stmt) == ASM_EXPR)
return ESCAPE_TO_ASM;
else if (TREE_CODE (stmt) == MODIFY_EXPR)
diff --git a/gcc/tree-ssa-live.c b/gcc/tree-ssa-live.c
index a5fe403ef53..3c421a76034 100644
--- a/gcc/tree-ssa-live.c
+++ b/gcc/tree-ssa-live.c
@@ -40,6 +40,12 @@ Boston, MA 02110-1301, USA. */
#include "toplev.h"
#include "vecprim.h"
+/* Records partition for a variable. */
+htab_t var_partition_map;
+
+/* Records variables seen by out of ssa pass. */
+bitmap out_of_ssa;
+
static void live_worklist (tree_live_info_p, int *, int);
static tree_live_info_p new_tree_live_info (var_map);
static inline void set_if_valid (var_map, bitmap, tree);
@@ -50,6 +56,26 @@ static inline void add_conflicts_if_valid (tpa_p, conflict_graph,
var_map, bitmap, tree);
static partition_pair_p find_partition_pair (coalesce_list_p, int, int, bool);
+/* Hash table used by root_var_init and mapping from variables to
+ partitions. */
+
+static int
+int_int_map_eq (const void *aa, const void *bb)
+{
+ struct int_int_map *a = (struct int_int_map *) aa;
+ struct int_int_map *b = (struct int_int_map *) bb;
+
+ return a->from == b->from;
+}
+
+static hashval_t
+int_int_map_hash (const void *aa)
+{
+ struct int_int_map *a = (struct int_int_map *) aa;
+
+ return (hashval_t) a->from;
+}
+
/* This is where the mapping from SSA version number to real storage variable
is tracked.
@@ -100,6 +126,14 @@ delete_var_map (var_map map)
free (map);
}
+/* Free mapping from vars to partitions. */
+
+void
+delete_var_partition_map (void)
+{
+ htab_delete (var_partition_map);
+ BITMAP_FREE (out_of_ssa);
+}
/* This function will combine the partitions in MAP for VAR1 and VAR2. It
Returns the partition which represents the new partition. If the two
@@ -276,26 +310,27 @@ compact_var_map (var_map map, int flags)
void
change_partition_var (var_map map, tree var, int part)
{
- var_ann_t ann;
+ struct int_int_map *part_map;
gcc_assert (TREE_CODE (var) != SSA_NAME);
- ann = var_ann (var);
- ann->out_of_ssa_tag = 1;
- VAR_ANN_PARTITION (ann) = part;
+ part_map = int_int_map_find_or_insert (var_partition_map, DECL_UID (var));
+ bitmap_set_bit (out_of_ssa, DECL_UID (var));
+ part_map->to = part;
if (map->compact_to_partition)
map->partition_to_var[map->compact_to_partition[part]] = var;
}
-static inline void mark_all_vars_used (tree *);
+static inline void mark_all_vars_used (tree *, bitmap);
/* Helper function for mark_all_vars_used, called via walk_tree. */
static tree
mark_all_vars_used_1 (tree *tp, int *walk_subtrees,
- void *data ATTRIBUTE_UNUSED)
+ void *data)
{
tree t = *tp;
+ bitmap used = data;
if (TREE_CODE (t) == SSA_NAME)
t = SSA_NAME_VAR (t);
@@ -304,9 +339,9 @@ mark_all_vars_used_1 (tree *tp, int *walk_subtrees,
fields that do not contain vars. */
if (TREE_CODE (t) == TARGET_MEM_REF)
{
- mark_all_vars_used (&TMR_SYMBOL (t));
- mark_all_vars_used (&TMR_BASE (t));
- mark_all_vars_used (&TMR_INDEX (t));
+ mark_all_vars_used (&TMR_SYMBOL (t), used);
+ mark_all_vars_used (&TMR_BASE (t), used);
+ mark_all_vars_used (&TMR_INDEX (t), used);
*walk_subtrees = 0;
return NULL;
}
@@ -314,7 +349,7 @@ mark_all_vars_used_1 (tree *tp, int *walk_subtrees,
/* Only need to mark VAR_DECLS; parameters and return results are not
eliminated as unused. */
if (TREE_CODE (t) == VAR_DECL)
- set_is_used (t);
+ bitmap_set_bit (used, DECL_UID (t));
if (IS_TYPE_OR_DECL_P (t))
*walk_subtrees = 0;
@@ -326,9 +361,9 @@ mark_all_vars_used_1 (tree *tp, int *walk_subtrees,
eliminated during the tree->rtl conversion process. */
static inline void
-mark_all_vars_used (tree *expr_p)
+mark_all_vars_used (tree *expr_p, bitmap used)
{
- walk_tree (expr_p, mark_all_vars_used_1, NULL, NULL);
+ walk_tree (expr_p, mark_all_vars_used_1, used, NULL);
}
@@ -338,16 +373,8 @@ void
remove_unused_locals (void)
{
basic_block bb;
- tree t, *cell;
-
- /* Assume all locals are unused. */
- for (t = cfun->unexpanded_var_list; t; t = TREE_CHAIN (t))
- {
- tree var = TREE_VALUE (t);
- if (TREE_CODE (var) != FUNCTION_DECL
- && var_ann (var))
- var_ann (var)->used = false;
- }
+ tree *cell;
+ bitmap used_vars = BITMAP_ALLOC (NULL);
/* Walk the CFG marking all referenced symbols. */
FOR_EACH_BB (bb)
@@ -357,7 +384,7 @@ remove_unused_locals (void)
/* Walk the statements. */
for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
- mark_all_vars_used (bsi_stmt_ptr (bsi));
+ mark_all_vars_used (bsi_stmt_ptr (bsi), used_vars);
for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
{
@@ -369,12 +396,12 @@ remove_unused_locals (void)
continue;
def = PHI_RESULT (phi);
- mark_all_vars_used (&def);
+ mark_all_vars_used (&def, used_vars);
FOR_EACH_PHI_ARG (arg_p, phi, i, SSA_OP_ALL_USES)
{
tree arg = USE_FROM_PTR (arg_p);
- mark_all_vars_used (&arg);
+ mark_all_vars_used (&arg, used_vars);
}
}
}
@@ -383,11 +410,9 @@ remove_unused_locals (void)
for (cell = &cfun->unexpanded_var_list; *cell; )
{
tree var = TREE_VALUE (*cell);
- var_ann_t ann;
if (TREE_CODE (var) != FUNCTION_DECL
- && (!(ann = var_ann (var))
- || !ann->used))
+ && !bitmap_bit_p (used_vars, DECL_UID (var)))
{
*cell = TREE_CHAIN (*cell);
continue;
@@ -395,6 +420,17 @@ remove_unused_locals (void)
cell = &TREE_CHAIN (*cell);
}
+
+ BITMAP_FREE (used_vars);
+}
+
+/* Initialize mapping from vars to partitions. */
+
+void
+init_var_partition_map (void)
+{
+ out_of_ssa = BITMAP_ALLOC (NULL);
+ var_partition_map = htab_create (10, int_int_map_hash, int_int_map_eq, free);
}
/* This function looks through the program and uses FLAGS to determine what
@@ -442,8 +478,6 @@ create_ssa_var_map (int flags)
arg = PHI_ARG_DEF (phi, i);
if (TREE_CODE (arg) == SSA_NAME)
register_ssa_partition (map, arg, true);
-
- mark_all_vars_used (&PHI_ARG_DEF_TREE (phi, i));
}
}
@@ -480,8 +514,6 @@ create_ssa_var_map (int flags)
}
#endif /* ENABLE_CHECKING */
-
- mark_all_vars_used (bsi_stmt_ptr (bsi));
}
}
@@ -999,6 +1031,25 @@ tpa_compact (tpa_p tpa)
return last;
}
+/* Finds or creates element with key WHAT in WHERE. */
+
+struct int_int_map *
+int_int_map_find_or_insert (htab_t where, int what)
+{
+ void **loc;
+ struct int_int_map *nelt, temp;
+
+ temp.from = what;
+ loc = htab_find_slot_with_hash (where, &temp, what, INSERT);
+ if (*loc)
+ return *loc;
+
+ nelt = XNEW (struct int_int_map);
+ nelt->from = what;
+ nelt->to = 0;
+ *loc = nelt;
+ return nelt;
+}
/* Initialize a root_var object with SSA partitions from MAP which are based
on each root variable. */
@@ -1010,8 +1061,10 @@ root_var_init (var_map map)
int num_partitions = num_var_partitions (map);
int x, p;
tree t;
- var_ann_t ann;
sbitmap seen;
+ bitmap root_var_processed;
+ htab_t var_root_index;
+ struct int_int_map *root_index;
rv = tpa_init (map);
if (!rv)
@@ -1019,6 +1072,8 @@ root_var_init (var_map map)
seen = sbitmap_alloc (num_partitions);
sbitmap_zero (seen);
+ root_var_processed = BITMAP_ALLOC (NULL);
+ var_root_index = htab_create (10, int_int_map_hash, int_int_map_eq, free);
/* Start at the end and work towards the front. This will provide a list
that is ordered from smallest to largest. */
@@ -1040,30 +1095,25 @@ root_var_init (var_map map)
SET_BIT (seen, p);
if (TREE_CODE (t) == SSA_NAME)
t = SSA_NAME_VAR (t);
- ann = var_ann (t);
- if (ann->root_var_processed)
- {
+ root_index = int_int_map_find_or_insert (var_root_index, DECL_UID (t));
+ if (bitmap_bit_p (root_var_processed, DECL_UID (t)))
+ {
rv->next_partition[p] = VEC_index (int, rv->first_partition,
- VAR_ANN_ROOT_INDEX (ann));
- VEC_replace (int, rv->first_partition, VAR_ANN_ROOT_INDEX (ann), p);
+ root_index->to);
+ VEC_replace (int, rv->first_partition, root_index->to, p);
}
else
{
- ann->root_var_processed = 1;
- VAR_ANN_ROOT_INDEX (ann) = rv->num_trees++;
+ bitmap_set_bit (root_var_processed, DECL_UID (t));
+ root_index->to = rv->num_trees++;
VEC_safe_push (tree, heap, rv->trees, t);
VEC_safe_push (int, heap, rv->first_partition, p);
}
- rv->partition_to_tree_map[p] = VAR_ANN_ROOT_INDEX (ann);
- }
-
- /* Reset the out_of_ssa_tag flag on each variable for later use. */
- for (x = 0; x < rv->num_trees; x++)
- {
- t = VEC_index (tree, rv->trees, x);
- var_ann (t)->root_var_processed = 0;
+ rv->partition_to_tree_map[p] = root_index->to;
}
+ BITMAP_FREE (root_var_processed);
+ htab_delete (var_root_index);
sbitmap_free (seen);
return rv;
}
diff --git a/gcc/tree-ssa-live.h b/gcc/tree-ssa-live.h
index f0c59028e19..0999075475b 100644
--- a/gcc/tree-ssa-live.h
+++ b/gcc/tree-ssa-live.h
@@ -49,8 +49,17 @@ typedef struct _var_map
int *ref_count;
} *var_map;
-#define VAR_ANN_PARTITION(ann) (ann->partition)
-#define VAR_ANN_ROOT_INDEX(ann) (ann->root_index)
+/* Hash table used by root_var_init and mapping from variables to
+ partitions. */
+
+struct int_int_map
+{
+ int from, to;
+};
+
+extern htab_t var_partition_map;
+extern bitmap out_of_ssa;
+struct int_int_map *int_int_map_find_or_insert (htab_t, int);
#define NO_PARTITION -1
@@ -135,7 +144,7 @@ static inline tree version_to_var (var_map map, int version)
static inline int
var_to_partition (var_map map, tree var)
{
- var_ann_t ann;
+ struct int_int_map *part_map;
int part;
if (TREE_CODE (var) == SSA_NAME)
@@ -146,9 +155,12 @@ var_to_partition (var_map map, tree var)
}
else
{
- ann = var_ann (var);
- if (ann->out_of_ssa_tag)
- part = VAR_ANN_PARTITION (ann);
+ if (bitmap_bit_p (out_of_ssa, DECL_UID (var)))
+ {
+ part_map = int_int_map_find_or_insert (var_partition_map,
+ DECL_UID (var));
+ part = part_map->to;
+ }
else
part = NO_PARTITION;
}
@@ -713,5 +725,7 @@ extern conflict_graph build_tree_conflict_graph (tree_live_info_p, tpa_p,
extern void coalesce_tpa_members (tpa_p tpa, conflict_graph graph, var_map map,
coalesce_list_p cl, FILE *);
+void init_var_partition_map (void);
+void delete_var_partition_map (void);
#endif /* _TREE_SSA_LIVE_H */
diff --git a/gcc/tree-ssa-loop-prefetch.c b/gcc/tree-ssa-loop-prefetch.c
index f9fe2d30c8a..180b7146b3a 100644
--- a/gcc/tree-ssa-loop-prefetch.c
+++ b/gcc/tree-ssa-loop-prefetch.c
@@ -832,7 +832,8 @@ issue_prefetch_ref (struct mem_ref *ref, unsigned unroll_factor, unsigned ahead)
n_prefetches = ((unroll_factor + ref->prefetch_mod - 1)
/ ref->prefetch_mod);
addr_base = build_fold_addr_expr_with_type (ref->mem, ptr_type_node);
- addr_base = force_gimple_operand_bsi (&bsi, unshare_expr (addr_base), true, NULL);
+ addr_base = force_gimple_operand_bsi (&bsi, unshare_expr (addr_base), true, NULL,
+ true, BSI_SAME_STMT);
for (ap = 0; ap < n_prefetches; ap++)
{
@@ -840,7 +841,8 @@ issue_prefetch_ref (struct mem_ref *ref, unsigned unroll_factor, unsigned ahead)
delta = (ahead + ap * ref->prefetch_mod) * ref->group->step;
addr = fold_build2 (PLUS_EXPR, ptr_type_node,
addr_base, build_int_cst (ptr_type_node, delta));
- addr = force_gimple_operand_bsi (&bsi, unshare_expr (addr), true, NULL);
+ addr = force_gimple_operand_bsi (&bsi, unshare_expr (addr), true, NULL,
+ true, BSI_SAME_STMT);
/* Create the prefetch instruction. */
write_p = ref->write_p ? integer_one_node : integer_zero_node;
diff --git a/gcc/tree-ssa-operands.c b/gcc/tree-ssa-operands.c
index 0e931ca74a9..2efa9062a5b 100644
--- a/gcc/tree-ssa-operands.c
+++ b/gcc/tree-ssa-operands.c
@@ -2045,21 +2045,72 @@ get_expr_operands (tree stmt, tree *expr_p, int flags)
return;
}
+ case OMP_FOR:
+ {
+ tree init = OMP_FOR_INIT (expr);
+ tree cond = OMP_FOR_COND (expr);
+ tree incr = OMP_FOR_INCR (expr);
+ tree c, clauses = OMP_FOR_CLAUSES (stmt);
+
+ get_expr_operands (stmt, &TREE_OPERAND (init, 0), opf_is_def);
+ get_expr_operands (stmt, &TREE_OPERAND (init, 1), opf_none);
+ get_expr_operands (stmt, &TREE_OPERAND (cond, 1), opf_none);
+ get_expr_operands (stmt, &TREE_OPERAND (TREE_OPERAND (incr, 1), 1),
+ opf_none);
+
+ c = find_omp_clause (clauses, OMP_CLAUSE_SCHEDULE);
+ if (c)
+ get_expr_operands (stmt, &OMP_CLAUSE_SCHEDULE_CHUNK_EXPR (c),
+ opf_none);
+ return;
+ }
+
+ case OMP_CONTINUE:
+ {
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 0), opf_is_def);
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 1), opf_none);
+ return;
+ }
+
+ case OMP_PARALLEL:
+ {
+ tree c, clauses = OMP_PARALLEL_CLAUSES (stmt);
+
+ if (OMP_PARALLEL_DATA_ARG (stmt))
+ {
+ get_expr_operands (stmt, &OMP_PARALLEL_DATA_ARG (stmt), opf_none);
+ add_to_addressable_set (OMP_PARALLEL_DATA_ARG (stmt),
+ &s_ann->addresses_taken);
+ }
+
+ c = find_omp_clause (clauses, OMP_CLAUSE_IF);
+ if (c)
+ get_expr_operands (stmt, &OMP_CLAUSE_IF_EXPR (c), opf_none);
+ c = find_omp_clause (clauses, OMP_CLAUSE_NUM_THREADS);
+ if (c)
+ get_expr_operands (stmt, &OMP_CLAUSE_NUM_THREADS_EXPR (c), opf_none);
+ return;
+ }
+
+ case OMP_SECTIONS:
+ {
+ get_expr_operands (stmt, &OMP_SECTIONS_CONTROL (expr), opf_is_def);
+ return;
+ }
+
case BLOCK:
case FUNCTION_DECL:
case EXC_PTR_EXPR:
case FILTER_EXPR:
case LABEL_DECL:
case CONST_DECL:
- case OMP_PARALLEL:
- case OMP_SECTIONS:
- case OMP_FOR:
case OMP_SINGLE:
case OMP_MASTER:
case OMP_ORDERED:
case OMP_CRITICAL:
case OMP_RETURN:
- case OMP_CONTINUE:
+ case OMP_SECTION:
+ case OMP_SECTIONS_SWITCH:
/* Expressions that make no memory references. */
return;
diff --git a/gcc/tree-ssa-reassoc.c b/gcc/tree-ssa-reassoc.c
index 477d8c8e407..0eac6ca0d1f 100644
--- a/gcc/tree-ssa-reassoc.c
+++ b/gcc/tree-ssa-reassoc.c
@@ -1050,7 +1050,7 @@ negate_value (tree tonegate, block_stmt_iterator *bsi)
tonegate = fold_build1 (NEGATE_EXPR, TREE_TYPE (tonegate), tonegate);
resultofnegate = force_gimple_operand_bsi (bsi, tonegate, true,
- NULL_TREE);
+ NULL_TREE, true, BSI_SAME_STMT);
VEC_safe_push (tree, heap, broken_up_subtracts, resultofnegate);
return resultofnegate;
diff --git a/gcc/tree.def b/gcc/tree.def
index ffc772fa173..7ad27d07a2f 100644
--- a/gcc/tree.def
+++ b/gcc/tree.def
@@ -987,8 +987,14 @@ DEFTREECODE (OMP_FOR, "omp_for", tcc_statement, 6)
/* OpenMP - #pragma omp sections [clause1 ... clauseN]
Operand 0: OMP_SECTIONS_BODY: Sections body.
- Operand 1: OMP_SECTIONS_CLAUSES: List of clauses. */
-DEFTREECODE (OMP_SECTIONS, "omp_sections", tcc_statement, 2)
+ Operand 1: OMP_SECTIONS_CLAUSES: List of clauses.
+ Operand 2: OMP_SECTIONS_CONTROL: The control variable used for deciding
+ which of the sections to execute. */
+DEFTREECODE (OMP_SECTIONS, "omp_sections", tcc_statement, 3)
+
+/* This tree immediatelly follows OMP_SECTIONS, and represents the switch
+ used to decide which branch is taken. */
+DEFTREECODE (OMP_SECTIONS_SWITCH, "omp_sections_switch", tcc_statement, 0)
/* OpenMP - #pragma omp single
Operand 0: OMP_SINGLE_BODY: Single section body.
@@ -1016,8 +1022,9 @@ DEFTREECODE (OMP_CRITICAL, "omp_critical", tcc_statement, 2)
DEFTREECODE (OMP_RETURN, "omp_return", tcc_statement, 0)
/* OpenMP - An intermediate tree code to mark the location of the
- loop or sections iteration in the partially lowered code. */
-DEFTREECODE (OMP_CONTINUE, "omp_continue", tcc_statement, 0)
+ loop or sections iteration in the partially lowered code.
+ The arguments are definition and use of the control variable. */
+DEFTREECODE (OMP_CONTINUE, "omp_continue", tcc_statement, 2)
/* OpenMP - #pragma omp atomic
Operand 0: The address at which the atomic operation is to be performed.
diff --git a/gcc/tree.h b/gcc/tree.h
index 72a8e46bd03..9da85d74292 100644
--- a/gcc/tree.h
+++ b/gcc/tree.h
@@ -173,6 +173,7 @@ extern const enum tree_code_class tree_code_type[];
(TREE_CODE (NODE) == OMP_PARALLEL \
|| TREE_CODE (NODE) == OMP_FOR \
|| TREE_CODE (NODE) == OMP_SECTIONS \
+ || TREE_CODE (NODE) == OMP_SECTIONS_SWITCH \
|| TREE_CODE (NODE) == OMP_SINGLE \
|| TREE_CODE (NODE) == OMP_SECTION \
|| TREE_CODE (NODE) == OMP_MASTER \
@@ -1571,6 +1572,7 @@ struct tree_constructor GTY(())
#define OMP_SECTIONS_BODY(NODE) TREE_OPERAND (OMP_SECTIONS_CHECK (NODE), 0)
#define OMP_SECTIONS_CLAUSES(NODE) TREE_OPERAND (OMP_SECTIONS_CHECK (NODE), 1)
+#define OMP_SECTIONS_CONTROL(NODE) TREE_OPERAND (OMP_SECTIONS_CHECK (NODE), 2)
#define OMP_SECTION_BODY(NODE) TREE_OPERAND (OMP_SECTION_CHECK (NODE), 0)