aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--gcc/ChangeLog41
-rw-r--r--gcc/cfgbuild.c4
-rw-r--r--gcc/cfgexpand.c2
-rw-r--r--gcc/combine.c2
-rw-r--r--gcc/config/arm/arm.c4
-rw-r--r--gcc/config/rx/rx.md6
-rw-r--r--gcc/config/s390/s390.c5
-rw-r--r--gcc/cse.c2
-rw-r--r--gcc/except.c2
-rw-r--r--gcc/function.c10
-rw-r--r--gcc/function.h4
-rw-r--r--gcc/gcse.c4
-rw-r--r--gcc/ipa-pure-const.c6
-rw-r--r--gcc/lower-subreg.c2
-rw-r--r--gcc/lto-streamer-in.c1
-rw-r--r--gcc/lto-streamer-out.c1
-rw-r--r--gcc/optabs.c4
-rw-r--r--gcc/postreload-gcse.c2
-rw-r--r--gcc/postreload.c2
-rw-r--r--gcc/reload1.c8
-rw-r--r--gcc/sel-sched-ir.c2
-rw-r--r--gcc/store-motion.c4
-rw-r--r--gcc/tree-eh.c8
-rw-r--r--gcc/tree-inline.c43
-rw-r--r--gcc/tree-ssa-dce.c4
25 files changed, 120 insertions, 53 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index 5831a609b79..ed93893cea4 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,3 +1,44 @@
+2010-05-25 Eric Botcazou <ebotcazou@adacore.com>
+
+ * function.h (struct function): Add can_throw_non_call_exceptions bit.
+ * lto-streamer-in.c (input_function): Stream it in.
+ * lto-streamer-out.c (output_function): Stream it out.
+ * function.c (allocate_struct_function): Set it.
+ (expand_function_end): Substitute cfun->can_throw_non_call_exceptions
+ for flag_non_call_exceptions.
+ * cfgbuild.c (control_flow_insn_p): Likewise.
+ (make_edges): Likewise.
+ * cfgexpand.c (expand_stack_alignment): Likewise.
+ * combine.c (distribute_notes): Likewise.
+ * cse.c (cse_extended_basic_block): Likewise.
+ * except.c (insn_could_throw_p): Likewise.
+ * gcse.c (simple_mem): Likewise.
+ * ipa-pure-const.c (check_call): Likewise.
+ (check_stmt ): Likewise.
+ * lower-subreg.c (lower-subreg.c): Likewise.
+ * optabs.c (emit_libcall_block): Likewise.
+ (prepare_cmp_insn): Likewise.
+ * postreload-gcse.c (eliminate_partially_redundant_loads): Likewise.
+ * postreload.c (rest_of_handle_postreload): Likewise.
+ * reload1.c (reload_as_needed): Likewise.
+ (emit_input_reload_insns): Likewise.
+ (emit_output_reload_insns): Likewise.
+ (fixup_abnormal_edges): Likewise.
+ * sel-sched-ir.c (init_global_and_expr_for_insn): Likewise.
+ * store-motion.c (find_moveable_store): Likewise.
+ * tree-eh.c (stmt_could_throw_p): Likewise.
+ (tree_could_throw_p): Likewise.
+ * tree-ssa-dce.c (mark_stmt_if_obviously_necessary): Likewise.
+ * config/arm/arm.c (arm_expand_prologue): Likewise.
+ (thumb1_expand_prologue): Likewise.
+ * config/rx/rx.md (cbranchsf4): Likewise.
+ (cmpsf): Likewise.
+ * config/s390/s390.c (s390_emit_prologue): Likewise.
+ * tree-inline.c (initialize_cfun): Copy can_throw_non_call_exceptions.
+ (inline_forbidden_into_p): New predicate.
+ (expand_call_inline): Use it to forbid inlining.
+ (tree_can_inline_p): Likewise.
+
2010-05-25 Steven Bosscher <steven@gcc.gnu.org>
* config/i386/i386-c.c: Do not include rtl.h.
diff --git a/gcc/cfgbuild.c b/gcc/cfgbuild.c
index 79b6183bfba..c53450ca553 100644
--- a/gcc/cfgbuild.c
+++ b/gcc/cfgbuild.c
@@ -112,7 +112,7 @@ control_flow_insn_p (const_rtx insn)
if (GET_CODE (PATTERN (insn)) == TRAP_IF
&& XEXP (PATTERN (insn), 0) == const1_rtx)
return true;
- if (!flag_non_call_exceptions)
+ if (!cfun->can_throw_non_call_exceptions)
return false;
break;
@@ -333,7 +333,7 @@ make_edges (basic_block min, basic_block max, int update_p)
handler for this CALL_INSN. If we're handling non-call
exceptions then any insn can reach any of the active handlers.
Also mark the CALL_INSN as reaching any nonlocal goto handler. */
- else if (code == CALL_INSN || flag_non_call_exceptions)
+ else if (code == CALL_INSN || cfun->can_throw_non_call_exceptions)
{
/* Add any appropriate EH edges. */
rtl_make_eh_edge (edge_cache, bb, insn);
diff --git a/gcc/cfgexpand.c b/gcc/cfgexpand.c
index 3a36ee17944..cc8ff336a42 100644
--- a/gcc/cfgexpand.c
+++ b/gcc/cfgexpand.c
@@ -3683,7 +3683,7 @@ expand_stack_alignment (void)
stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
exceptions since callgraph doesn't collect incoming stack alignment
in this case. */
- if (flag_non_call_exceptions
+ if (cfun->can_throw_non_call_exceptions
&& PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
else
diff --git a/gcc/combine.c b/gcc/combine.c
index 9809565a77c..e9389d7b2b7 100644
--- a/gcc/combine.c
+++ b/gcc/combine.c
@@ -12734,7 +12734,7 @@ distribute_notes (rtx notes, rtx from_insn, rtx i3, rtx i2, rtx elim_i2,
place = i2;
else
{
- gcc_assert (flag_non_call_exceptions);
+ gcc_assert (cfun->can_throw_non_call_exceptions);
if (may_trap_p (i3))
place = i3;
else if (i2 && may_trap_p (i2))
diff --git a/gcc/config/arm/arm.c b/gcc/config/arm/arm.c
index 88f21bc289f..6c886dccaef 100644
--- a/gcc/config/arm/arm.c
+++ b/gcc/config/arm/arm.c
@@ -15005,7 +15005,7 @@ arm_expand_prologue (void)
using the EABI unwinder, to prevent faulting instructions from being
swapped with a stack adjustment. */
if (crtl->profile || !TARGET_SCHED_PROLOG
- || (ARM_EABI_UNWIND_TABLES && flag_non_call_exceptions))
+ || (ARM_EABI_UNWIND_TABLES && cfun->can_throw_non_call_exceptions))
emit_insn (gen_blockage ());
/* If the link register is being kept alive, with the return address in it,
@@ -19541,7 +19541,7 @@ thumb1_expand_prologue (void)
using the EABI unwinder, to prevent faulting instructions from being
swapped with a stack adjustment. */
if (crtl->profile || !TARGET_SCHED_PROLOG
- || (ARM_EABI_UNWIND_TABLES && flag_non_call_exceptions))
+ || (ARM_EABI_UNWIND_TABLES && cfun->can_throw_non_call_exceptions))
emit_insn (gen_blockage ());
cfun->machine->lr_save_eliminated = !thumb_force_lr_save ();
diff --git a/gcc/config/rx/rx.md b/gcc/config/rx/rx.md
index 0e76a5e6412..274ce6f3c66 100644
--- a/gcc/config/rx/rx.md
+++ b/gcc/config/rx/rx.md
@@ -176,7 +176,7 @@
[(cc0) (const_int 0)])
(label_ref (match_operand 3 ""))
(pc)))]
- "ALLOW_RX_FPU_INSNS && ! flag_non_call_exceptions"
+ "ALLOW_RX_FPU_INSNS && !cfun->can_throw_non_call_exceptions"
""
)
@@ -211,7 +211,7 @@
(set_attr "length" "2,2,3,4,5,6,5")]
)
-;; This pattern is disabled when -fnon-call-exceptions is active because
+;; This pattern is disabled if the function can throw non-call exceptions,
;; it could generate a floating point exception, which would introduce an
;; edge into the flow graph between this insn and the conditional branch
;; insn to follow, thus breaking the cc0 relationship. Run the g++ test
@@ -220,7 +220,7 @@
[(set (cc0)
(compare:CC (match_operand:SF 0 "register_operand" "r,r,r")
(match_operand:SF 1 "rx_source_operand" "r,i,Q")))]
- "ALLOW_RX_FPU_INSNS && ! flag_non_call_exceptions"
+ "ALLOW_RX_FPU_INSNS && !cfun->can_throw_non_call_exceptions"
{
rx_float_compare_mode = true;
return "fcmp\t%1, %0";
diff --git a/gcc/config/s390/s390.c b/gcc/config/s390/s390.c
index 2972545eac2..412486ba5c8 100644
--- a/gcc/config/s390/s390.c
+++ b/gcc/config/s390/s390.c
@@ -7966,11 +7966,10 @@ s390_emit_prologue (void)
insn = emit_insn (gen_move_insn (addr, temp_reg));
}
- /* If we support asynchronous exceptions (e.g. for Java),
+ /* If we support non-call exceptions (e.g. for Java),
we need to make sure the backchain pointer is set up
before any possibly trapping memory access. */
-
- if (TARGET_BACKCHAIN && flag_non_call_exceptions)
+ if (TARGET_BACKCHAIN && cfun->can_throw_non_call_exceptions)
{
addr = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
emit_clobber (addr);
diff --git a/gcc/cse.c b/gcc/cse.c
index 70e8daf3411..e45c9b25b16 100644
--- a/gcc/cse.c
+++ b/gcc/cse.c
@@ -6388,7 +6388,7 @@ cse_extended_basic_block (struct cse_basic_block_data *ebb_data)
/* With non-call exceptions, we are not always able to update
the CFG properly inside cse_insn. So clean up possibly
redundant EH edges here. */
- if (flag_non_call_exceptions && have_eh_succ_edges (bb))
+ if (cfun->can_throw_non_call_exceptions && have_eh_succ_edges (bb))
cse_cfg_altered |= purge_dead_edges (bb);
/* If we changed a conditional jump, we may have terminated
diff --git a/gcc/except.c b/gcc/except.c
index d7efa93c8b3..393a8011e0d 100644
--- a/gcc/except.c
+++ b/gcc/except.c
@@ -1619,7 +1619,7 @@ insn_could_throw_p (const_rtx insn)
{
if (CALL_P (insn))
return true;
- if (INSN_P (insn) && flag_non_call_exceptions)
+ if (INSN_P (insn) && cfun->can_throw_non_call_exceptions)
return may_trap_p (PATTERN (insn));
return false;
}
diff --git a/gcc/function.c b/gcc/function.c
index 2c87dec9ee8..a39ee7c1848 100644
--- a/gcc/function.c
+++ b/gcc/function.c
@@ -4233,6 +4233,10 @@ allocate_struct_function (tree fndecl, bool abstract_p)
/* Assume all registers in stdarg functions need to be saved. */
cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
cfun->va_list_fpr_size = VA_LIST_MAX_FPR_SIZE;
+
+ /* ??? This could be set on a per-function basis by the front-end
+ but is this worth the hassle? */
+ cfun->can_throw_non_call_exceptions = flag_non_call_exceptions;
}
}
@@ -4246,7 +4250,7 @@ push_struct_function (tree fndecl)
allocate_struct_function (fndecl, false);
}
-/* Reset cfun, and other non-struct-function variables to defaults as
+/* Reset crtl and other non-struct-function variables to defaults as
appropriate for emitting rtl at the start of a function. */
static void
@@ -4778,7 +4782,7 @@ expand_function_end (void)
/* We want to ensure that instructions that may trap are not
moved into the epilogue by scheduling, because we don't
always emit unwind information for the epilogue. */
- if (flag_non_call_exceptions)
+ if (cfun->can_throw_non_call_exceptions)
emit_insn (gen_blockage ());
}
@@ -4924,7 +4928,7 @@ expand_function_end (void)
/* @@@ This is a kludge. We want to ensure that instructions that
may trap are not moved into the epilogue by scheduling, because
we don't always emit unwind information for the epilogue. */
- if (! USING_SJLJ_EXCEPTIONS && flag_non_call_exceptions)
+ if (!USING_SJLJ_EXCEPTIONS && cfun->can_throw_non_call_exceptions)
emit_insn (gen_blockage ());
/* If stack protection is enabled for this function, check the guard. */
diff --git a/gcc/function.h b/gcc/function.h
index 072da580550..deb170dc38c 100644
--- a/gcc/function.h
+++ b/gcc/function.h
@@ -580,6 +580,10 @@ struct GTY(()) function {
unsigned int after_inlining : 1;
unsigned int always_inline_functions_inlined : 1;
+ /* Nonzero if function being compiled can throw synchronous non-call
+ exceptions. */
+ unsigned int can_throw_non_call_exceptions : 1;
+
/* Fields below this point are not set for abstract functions; see
allocate_struct_function. */
diff --git a/gcc/gcse.c b/gcc/gcse.c
index 10f015fa3f0..b0a18689326 100644
--- a/gcc/gcse.c
+++ b/gcc/gcse.c
@@ -4667,9 +4667,9 @@ simple_mem (const_rtx x)
return 0;
/* If we are handling exceptions, we must be careful with memory references
- that may trap. If we are not, the behavior is undefined, so we may just
+ that may trap. If we are not, the behavior is undefined, so we may just
continue. */
- if (flag_non_call_exceptions && may_trap_p (x))
+ if (cfun->can_throw_non_call_exceptions && may_trap_p (x))
return 0;
if (side_effects_p (x))
diff --git a/gcc/ipa-pure-const.c b/gcc/ipa-pure-const.c
index 0bfb7ee51d6..c33b2d3f18b 100644
--- a/gcc/ipa-pure-const.c
+++ b/gcc/ipa-pure-const.c
@@ -340,7 +340,7 @@ check_call (funct_state local, gimple call, bool ipa)
if (gimple_op (call, i)
&& tree_could_throw_p (gimple_op (call, i)))
{
- if (possibly_throws && flag_non_call_exceptions)
+ if (possibly_throws && cfun->can_throw_non_call_exceptions)
{
if (dump_file)
fprintf (dump_file, " operand can throw; looping\n");
@@ -405,7 +405,7 @@ check_call (funct_state local, gimple call, bool ipa)
those bits. */
else if (!ipa || !callee_t)
{
- if (possibly_throws && flag_non_call_exceptions)
+ if (possibly_throws && cfun->can_throw_non_call_exceptions)
{
if (dump_file)
fprintf (dump_file, " can throw; looping\n");
@@ -503,7 +503,7 @@ check_stmt (gimple_stmt_iterator *gsip, funct_state local, bool ipa)
if (gimple_code (stmt) != GIMPLE_CALL
&& stmt_could_throw_p (stmt))
{
- if (flag_non_call_exceptions)
+ if (cfun->can_throw_non_call_exceptions)
{
if (dump_file)
fprintf (dump_file, " can throw; looping");
diff --git a/gcc/lower-subreg.c b/gcc/lower-subreg.c
index 3301a9db18c..160328ccefc 100644
--- a/gcc/lower-subreg.c
+++ b/gcc/lower-subreg.c
@@ -1212,7 +1212,7 @@ decompose_multiword_subregs (void)
basic block and still produce the correct control
flow graph for it. */
gcc_assert (!cfi
- || (flag_non_call_exceptions
+ || (cfun->can_throw_non_call_exceptions
&& can_throw_internal (insn)));
insn = resolve_simple_move (set, insn);
diff --git a/gcc/lto-streamer-in.c b/gcc/lto-streamer-in.c
index 123a7a7aebf..2be1a40e5ae 100644
--- a/gcc/lto-streamer-in.c
+++ b/gcc/lto-streamer-in.c
@@ -1309,6 +1309,7 @@ input_function (tree fn_decl, struct data_in *data_in,
fn->after_tree_profile = bp_unpack_value (bp, 1);
fn->returns_pcc_struct = bp_unpack_value (bp, 1);
fn->returns_struct = bp_unpack_value (bp, 1);
+ fn->can_throw_non_call_exceptions = bp_unpack_value (bp, 1);
fn->always_inline_functions_inlined = bp_unpack_value (bp, 1);
fn->after_inlining = bp_unpack_value (bp, 1);
fn->dont_save_pending_sizes_p = bp_unpack_value (bp, 1);
diff --git a/gcc/lto-streamer-out.c b/gcc/lto-streamer-out.c
index 4d598320046..b7b1bddfb5e 100644
--- a/gcc/lto-streamer-out.c
+++ b/gcc/lto-streamer-out.c
@@ -1877,6 +1877,7 @@ output_function (struct cgraph_node *node)
bp_pack_value (bp, fn->after_tree_profile, 1);
bp_pack_value (bp, fn->returns_pcc_struct, 1);
bp_pack_value (bp, fn->returns_struct, 1);
+ bp_pack_value (bp, fn->can_throw_non_call_exceptions, 1);
bp_pack_value (bp, fn->always_inline_functions_inlined, 1);
bp_pack_value (bp, fn->after_inlining, 1);
bp_pack_value (bp, fn->dont_save_pending_sizes_p, 1);
diff --git a/gcc/optabs.c b/gcc/optabs.c
index 3c5424d2b59..a81d2e660dd 100644
--- a/gcc/optabs.c
+++ b/gcc/optabs.c
@@ -3880,7 +3880,7 @@ emit_libcall_block (rtx insns, rtx target, rtx result, rtx equiv)
/* If we're using non-call exceptions, a libcall corresponding to an
operation that may trap may also trap. */
/* ??? See the comment in front of make_reg_eh_region_note. */
- if (flag_non_call_exceptions && may_trap_p (equiv))
+ if (cfun->can_throw_non_call_exceptions && may_trap_p (equiv))
{
for (insn = insns; insn; insn = NEXT_INSN (insn))
if (CALL_P (insn))
@@ -4126,7 +4126,7 @@ prepare_cmp_insn (rtx x, rtx y, enum rtx_code comparison, rtx size,
/* Don't allow operands to the compare to trap, as that can put the
compare and branch in different basic blocks. */
- if (flag_non_call_exceptions)
+ if (cfun->can_throw_non_call_exceptions)
{
if (may_trap_p (x))
x = force_reg (mode, x);
diff --git a/gcc/postreload-gcse.c b/gcc/postreload-gcse.c
index 5da9f62697d..77897aadf43 100644
--- a/gcc/postreload-gcse.c
+++ b/gcc/postreload-gcse.c
@@ -1200,7 +1200,7 @@ eliminate_partially_redundant_loads (void)
/* Are the operands unchanged since the start of the
block? */
&& oprs_unchanged_p (src, insn, false)
- && !(flag_non_call_exceptions && may_trap_p (src))
+ && !(cfun->can_throw_non_call_exceptions && may_trap_p (src))
&& !side_effects_p (src)
/* Is the expression recorded? */
&& (expr = lookup_expr_in_table (src)) != NULL)
diff --git a/gcc/postreload.c b/gcc/postreload.c
index 6b8f186917f..a03cdfacee3 100644
--- a/gcc/postreload.c
+++ b/gcc/postreload.c
@@ -1590,7 +1590,7 @@ rest_of_handle_postreload (void)
reload_cse_regs (get_insns ());
/* Reload_cse_regs can eliminate potentially-trapping MEMs.
Remove any EH edges associated with them. */
- if (flag_non_call_exceptions)
+ if (cfun->can_throw_non_call_exceptions)
purge_all_dead_edges ();
return 0;
diff --git a/gcc/reload1.c b/gcc/reload1.c
index 504c9e09049..85880f8210c 100644
--- a/gcc/reload1.c
+++ b/gcc/reload1.c
@@ -4299,7 +4299,7 @@ reload_as_needed (int live_known)
subst_reloads (insn);
/* Adjust the exception region notes for loads and stores. */
- if (flag_non_call_exceptions && !CALL_P (insn))
+ if (cfun->can_throw_non_call_exceptions && !CALL_P (insn))
fixup_eh_region_note (insn, prev, next);
/* If this was an ASM, make sure that all the reload insns
@@ -7331,7 +7331,7 @@ emit_input_reload_insns (struct insn_chain *chain, struct reload *rl,
rl->when_needed);
}
- if (flag_non_call_exceptions)
+ if (cfun->can_throw_non_call_exceptions)
copy_reg_eh_region_note_forward (insn, get_insns (), NULL);
/* End this sequence. */
@@ -7551,7 +7551,7 @@ emit_output_reload_insns (struct insn_chain *chain, struct reload *rl,
else
output_reload_insns[rl->opnum] = get_insns ();
- if (flag_non_call_exceptions)
+ if (cfun->can_throw_non_call_exceptions)
copy_reg_eh_region_note_forward (insn, get_insns (), NULL);
end_sequence ();
@@ -9019,7 +9019,7 @@ fixup_abnormal_edges (void)
}
/* We've possibly turned single trapping insn into multiple ones. */
- if (flag_non_call_exceptions)
+ if (cfun->can_throw_non_call_exceptions)
{
sbitmap blocks;
blocks = sbitmap_alloc (last_basic_block);
diff --git a/gcc/sel-sched-ir.c b/gcc/sel-sched-ir.c
index 4647c4793d8..908eb570db8 100644
--- a/gcc/sel-sched-ir.c
+++ b/gcc/sel-sched-ir.c
@@ -2865,7 +2865,7 @@ init_global_and_expr_for_insn (insn_t insn)
|| SCHED_GROUP_P (insn)
|| prologue_epilogue_contains (insn)
/* Exception handling insns are always unique. */
- || (flag_non_call_exceptions && can_throw_internal (insn))
+ || (cfun->can_throw_non_call_exceptions && can_throw_internal (insn))
/* TRAP_IF though have an INSN code is control_flow_insn_p (). */
|| control_flow_insn_p (insn))
force_unique_p = true;
diff --git a/gcc/store-motion.c b/gcc/store-motion.c
index 8f8617cff37..61d0cba8159 100644
--- a/gcc/store-motion.c
+++ b/gcc/store-motion.c
@@ -560,9 +560,9 @@ find_moveable_store (rtx insn, int *regs_set_before, int *regs_set_after)
return;
/* If we are handling exceptions, we must be careful with memory references
- that may trap. If we are not, the behavior is undefined, so we may just
+ that may trap. If we are not, the behavior is undefined, so we may just
continue. */
- if (flag_non_call_exceptions && may_trap_p (dest))
+ if (cfun->can_throw_non_call_exceptions && may_trap_p (dest))
return;
/* Even if the destination cannot trap, the source may. In this case we'd
diff --git a/gcc/tree-eh.c b/gcc/tree-eh.c
index e5d76c8b8ba..77c8cac7b5c 100644
--- a/gcc/tree-eh.c
+++ b/gcc/tree-eh.c
@@ -2526,12 +2526,12 @@ stmt_could_throw_p (gimple stmt)
case GIMPLE_ASSIGN:
case GIMPLE_COND:
- if (!flag_non_call_exceptions)
+ if (!cfun->can_throw_non_call_exceptions)
return false;
return stmt_could_throw_1_p (stmt);
case GIMPLE_ASM:
- if (!flag_non_call_exceptions)
+ if (!cfun->can_throw_non_call_exceptions)
return false;
return gimple_asm_volatile_p (stmt);
@@ -2550,7 +2550,7 @@ tree_could_throw_p (tree t)
return false;
if (TREE_CODE (t) == MODIFY_EXPR)
{
- if (flag_non_call_exceptions
+ if (cfun->can_throw_non_call_exceptions
&& tree_could_trap_p (TREE_OPERAND (t, 0)))
return true;
t = TREE_OPERAND (t, 1);
@@ -2560,7 +2560,7 @@ tree_could_throw_p (tree t)
t = TREE_OPERAND (t, 0);
if (TREE_CODE (t) == CALL_EXPR)
return (call_expr_flags (t) & ECF_NOTHROW) == 0;
- if (flag_non_call_exceptions)
+ if (cfun->can_throw_non_call_exceptions)
return tree_could_trap_p (t);
return false;
}
diff --git a/gcc/tree-inline.c b/gcc/tree-inline.c
index 342b5a5f082..697c6bc269a 100644
--- a/gcc/tree-inline.c
+++ b/gcc/tree-inline.c
@@ -2028,6 +2028,8 @@ initialize_cfun (tree new_fndecl, tree callee_fndecl, gcov_type count)
cfun->stdarg = src_cfun->stdarg;
cfun->dont_save_pending_sizes_p = src_cfun->dont_save_pending_sizes_p;
cfun->after_inlining = src_cfun->after_inlining;
+ cfun->can_throw_non_call_exceptions
+ = src_cfun->can_throw_non_call_exceptions;
cfun->returns_struct = src_cfun->returns_struct;
cfun->returns_pcc_struct = src_cfun->returns_pcc_struct;
cfun->after_tree_profile = src_cfun->after_tree_profile;
@@ -2960,6 +2962,29 @@ inline_forbidden_p (tree fndecl)
return forbidden_p;
}
+/* Return true if CALLEE cannot be inlined into CALLER. */
+
+static bool
+inline_forbidden_into_p (tree caller, tree callee)
+{
+ /* Don't inline if the functions have different EH personalities. */
+ if (DECL_FUNCTION_PERSONALITY (caller)
+ && DECL_FUNCTION_PERSONALITY (callee)
+ && (DECL_FUNCTION_PERSONALITY (caller)
+ != DECL_FUNCTION_PERSONALITY (callee)))
+ return true;
+
+ /* Don't inline if the callee can throw non-call exceptions but the
+ caller cannot. */
+ if (DECL_STRUCT_FUNCTION (callee)
+ && DECL_STRUCT_FUNCTION (callee)->can_throw_non_call_exceptions
+ && !(DECL_STRUCT_FUNCTION (caller)
+ && DECL_STRUCT_FUNCTION (caller)->can_throw_non_call_exceptions))
+ return true;
+
+ return false;
+}
+
/* Returns nonzero if FN is a function that does not have any
fundamental inline blocking properties. */
@@ -3622,15 +3647,11 @@ expand_call_inline (basic_block bb, gimple stmt, copy_body_data *id)
cg_edge = cgraph_edge (id->dst_node, stmt);
- /* Don't inline functions with different EH personalities. */
- if (DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
- && DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl)
- && (DECL_FUNCTION_PERSONALITY (cg_edge->caller->decl)
- != DECL_FUNCTION_PERSONALITY (cg_edge->callee->decl)))
+ /* First check that inlining isn't simply forbidden in this case. */
+ if (inline_forbidden_into_p (cg_edge->caller->decl, cg_edge->callee->decl))
goto egress;
- /* Don't try to inline functions that are not well-suited to
- inlining. */
+ /* Don't try to inline functions that are not well-suited to inlining. */
if (!cgraph_inline_p (cg_edge, &reason))
{
/* If this call was originally indirect, we do not want to emit any
@@ -5180,12 +5201,8 @@ tree_can_inline_p (struct cgraph_edge *e)
caller = e->caller->decl;
callee = e->callee->decl;
- /* We cannot inline a function that uses a different EH personality
- than the caller. */
- if (DECL_FUNCTION_PERSONALITY (caller)
- && DECL_FUNCTION_PERSONALITY (callee)
- && (DECL_FUNCTION_PERSONALITY (caller)
- != DECL_FUNCTION_PERSONALITY (callee)))
+ /* First check that inlining isn't simply forbidden in this case. */
+ if (inline_forbidden_into_p (caller, callee))
{
e->inline_failed = CIF_UNSPECIFIED;
gimple_call_set_cannot_inline (e->call_stmt, true);
diff --git a/gcc/tree-ssa-dce.c b/gcc/tree-ssa-dce.c
index 00bf012d715..59ccc408e9c 100644
--- a/gcc/tree-ssa-dce.c
+++ b/gcc/tree-ssa-dce.c
@@ -272,10 +272,10 @@ static void
mark_stmt_if_obviously_necessary (gimple stmt, bool aggressive)
{
tree lhs = NULL_TREE;
+
/* With non-call exceptions, we have to assume that all statements could
throw. If a statement may throw, it is inherently necessary. */
- if (flag_non_call_exceptions
- && stmt_could_throw_p (stmt))
+ if (cfun->can_throw_non_call_exceptions && stmt_could_throw_p (stmt))
{
mark_stmt_necessary (stmt, true);
return;