From ae5f545b87fcdd5a7a169ac726a80da6bb279833 Mon Sep 17 00:00:00 2001 From: Alexandre Oliva Date: Tue, 2 Oct 2007 01:42:46 +0000 Subject: * expr.c (expand_expr_real_1): Reject debug exprs. * cfgexpand.c (floor_sdiv_adjust): New fn. (ceil_sdiv_adjust): New fn. (ceil_udiv_adjust): New fn. (round_sdiv_adjust): New fn. (round_udiv_adjust): New fn. (expand_debug_expr): New fn. (expand_gimple_basic_block): Handle debug stmts. * cfgrtl.c (rtl_block_ends_with_call_p): Ignore debug insns. * rtl.h (DEBUG_INSN_P): New macro. (MAY_HAVE_DEBUG_INSNS): New macro. (INSN_P): Accept debug insns. (RTX_FRAME_RELATED_P): Likewise. (INSN_DELETED_P): Likewise. (PAT_VAR_LOCATION_DECL): New macro. (PAT_VAR_LOCATION_LOC): New macro. (NOTE_VAR_LOCATION_DECL): Reimplement. (NOTE_VAR_LOCATION_LOC): Likewise. (NOTE_VAR_LOCATION_STATUS): Likewise. (INSN_VAR_LOCATION): New macro. (INSN_VAR_LOCATION_DECL): Likewise. (INSN_VAR_LOCATION_LOC): Likewise. (INSN_VAR_LOCATION_STATUS): Likewise. (gen_rtx_UNKNOWN_VAR_LOC): New macro. (VAR_LOC_UNKNOWN_P): New macro. (SCHED_GROUP_P): Accept debug insns. (emit_debug_insn_before): Declare. (emit_debug_insn_before_noloc): Likewise. (emit_debug_insn_before_setloc): Likewise. (emit_debug_insn_after): Likewise. (emit_debug_insn_after_noloc): Likewise. (emit_debug_insn_after_setloc): Likewise. (emit_debug_insn): Likewise. (make_debug_insn_raw): Likewise. * recog.c (verify_changes): Accept debug insns. (extract_insn): Handle them. (peephole2_optimize): Skip them. * dce.c (deletable_insn_p): Handle debug insns. * reload1.c (reload): Don't scan subregs in debug insns. (eliminate_regs_in_insn): Handle debug insns. * cse.c (cse_insn): Handle debug insns. (cse_extended_basic_block): Likewise. (insn_live_p): Likewise. * emit-rtl.c (copy_rtx_if_shared_1): Handle debug insns. (reset_used_flags): Likewise. (set_used_flags): Likewise. (active_insn_p): Exclude debug insns. (make_debug_insn_raw): New fn. (emit_insn_before_noloc): Handle debug insns. (emit_jump_insn_before_noloc): Likewise. (emit_call_insn_before_noloc): Likewise. (emit_debug_insn_before_noloc): New fn. (emit_insn_after_noloc): Handle debug insns. (emit_jump_insn_after_noloc): Likewise. (emit_call_insn_after_noloc): Likewise. (emit_debug_insn_after_noloc): New fn. (emit_debug_insn_after_setloc): New fn. (emit_debug_insn_after): New fn. (emit_debug_insn_before_setloc): New fn. (emit_debug_insn_before): New fn. (emit_insn): Handle debug insn. (emit_debug_insn): New fn. (emit_jump_insn): Handle debug insn. (emit_call_insn): Likewise. (emit): Likewise. (emit_copy_of_insn_after): Likewise. * reg-stack.c (subst_stack_regs_pat): Handle var location rtx. * var-tracking.c (MO_LOC_MAIN, MO_LOC_USE): New micro ops. (var_reg_decl_set): New fn. (var_reg_set): Adjust. (var_mem_decl_set): New fn. (var_mem_set): Adjust. (use_type): New fn. (count_uses, add_uses, add_stores): Adjust. (compute_bb_dataflow): Handle new micro ops. (emit_notes_in_bb): Likewise. (vt_initialize): Likewise. (delete_debug_insns): New fn. (vt_debug_insns_local): New fn. (variable_tracking_main): Call it. * final.c (get_attr_length_1): Handle debug insns. * lower-subreg.c (adjust_decomposed_uses): New fn. (resolve_debug): New fn. (decompose_multiword_subregs): Handle debug insns. * print-rtl.c (print_rtx): Likewise. * ifcvt.c (first_active_insn): Skip debug insns. (last_active_insn): Likewise. (cond_exec_process_insns): Handle debug insns. (check_cond_move_block): Likewise. (cond_move_convert_if_block): Likewise. (block_jumps_and_fallthru_p): Likewise. (dead_or_predicable): Likewise. * cfgcleanup.c (flow_find_cross_jump): Skip debug insns. * combine.c (create_log_links): Skip debug insns. (combine_instructions): Likewise. (rtx_subst_pair): New struct. (propagate_for_debug_subst): New fn. (propagate_for_debug): New fn. (try_combine): Call it. (distribute_links): Handle debug insns. * df-problems.c (df_lr_bb_local_compute): Likewise. (df_set_note): Reject debug insns. (df_set_dead_notes_for_mw): Add added_notes_p argument. (df_note_bb_compute): Handle debug insns. (df_simulate_uses): Likewise. (df_simulate_one_insn_forwards): Likewise. (df_simulate_one_insn_backwards): Likewise. * df-scan.c (df_insn_rescan_1): Renamed with new argument, from... (df_insn_rescan): ... this. Implement in terms of new name. (df_insn_rescan_debug_internal): New fn. (df_uses_record): Handle debug insns. * haifa-sched.c (contributes_to_priority_p): Likewise. (nondebug_dep_list_size): New fn. (priority, add_jump_dependencies): Use it. (rank_for_schedule): Handle debug insns. (schedule_block): Always accept more debug insns. * local-alloc.c (block_alloc): Handle debug insns. * regrename.c (replace_oldest_value_reg): ??? Test without this (replace_oldest_value_addr): Handle debug insns. (replace_oldest_value_mem): Likewise. (copyprop_hardreg_forward_1): Likewise. * regstat.c (regstat_init_n_sets_and_refs): Skip debug uses. (regstat_bb_compute_ri): Skip debug insns. * sched-rgn.c (add_branch_dependences): Likewise. * sched-vis.c (print_pattern): Handle debug insns. * regmove.c (optimize_reg_copy_1): Don't count debug insns. (fixup_match_2): Likewise. (regmove_optimize): Handle debug insns. (fixup_match_1): Don't count debug insns. Adjust them. * gcse.c (allolc_gcse_mem): Don't allocate uid for debug isnsn. (bypass_conditional_jumps): Skip debug insns. (compute_ld_motion_mems): Likewise. * sched-deps.c (sched_analyze_2): Handle debug insns. (sched_analyze_insn): Likewise. (sched_analyze): Likewise. * init-regs.c (initialize_uninitialized_regs): Skip debug insns. * dse.c (scan_insn): Keep debug insns. * resource.c (mark_target_live_regs): Skip debug insns. * rtlanal.c (canonicalize_condition): Likewise. * df.h (df_insn_rescan_debug_internal): Declare. * dwarf2out.c (mem_loc_descriptor): Ignore sign and zero extend. (loc_descriptor): Likewise. * function.c (instantiate_virtual_regs): Handle debug insns. * rtl.def (DEBUG_INSN): New rtl code. * cfgbuild.c (inside_basic_block_p): Handle debug insns. (control_flow_insn_p): Likewise. * cfglayout.c (duplicate_insn_chain): Likewise. git-svn-id: https://gcc.gnu.org/svn/gcc/branches/var-tracking-assignments-branch@128946 138bc75d-0d04-0410-961f-82ee72b054a4 --- gcc/ChangeLog.vta | 150 +++++++++++++ gcc/cfgbuild.c | 2 + gcc/cfgcleanup.c | 10 +- gcc/cfgexpand.c | 648 +++++++++++++++++++++++++++++++++++++++++++++++++++++ gcc/cfglayout.c | 1 + gcc/cfgrtl.c | 3 +- gcc/combine.c | 61 ++++- gcc/cse.c | 20 +- gcc/dce.c | 1 + gcc/df-problems.c | 59 ++++- gcc/df-scan.c | 37 ++- gcc/df.h | 1 + gcc/dse.c | 6 + gcc/dwarf2out.c | 4 + gcc/emit-rtl.c | 232 ++++++++++++++++++- gcc/expr.c | 4 + gcc/final.c | 3 +- gcc/function.c | 7 +- gcc/gcse.c | 10 +- gcc/haifa-sched.c | 71 +++++- gcc/ifcvt.c | 28 ++- gcc/init-regs.c | 2 +- gcc/local-alloc.c | 6 +- gcc/lower-subreg.c | 40 ++++ gcc/print-rtl.c | 25 ++- gcc/recog.c | 8 +- gcc/reg-stack.c | 12 +- gcc/regmove.c | 96 +++++--- gcc/regrename.c | 45 +++- gcc/regstat.c | 28 ++- gcc/reload1.c | 5 +- gcc/resource.c | 3 + gcc/rtl.def | 3 + gcc/rtl.h | 64 +++++- gcc/rtlanal.c | 6 +- gcc/sched-deps.c | 37 ++- gcc/sched-rgn.c | 10 + gcc/sched-vis.c | 4 + gcc/var-tracking.c | 245 ++++++++++++++++---- 39 files changed, 1831 insertions(+), 166 deletions(-) diff --git a/gcc/ChangeLog.vta b/gcc/ChangeLog.vta index 69679b2a06a..9c27274ce3e 100644 --- a/gcc/ChangeLog.vta +++ b/gcc/ChangeLog.vta @@ -1,3 +1,153 @@ +2007-10-01 Alexandre Oliva + + * expr.c (expand_expr_real_1): Reject debug exprs. + * cfgexpand.c (floor_sdiv_adjust): New fn. + (ceil_sdiv_adjust): New fn. + (ceil_udiv_adjust): New fn. + (round_sdiv_adjust): New fn. + (round_udiv_adjust): New fn. + (expand_debug_expr): New fn. + (expand_gimple_basic_block): Handle debug stmts. + * cfgrtl.c (rtl_block_ends_with_call_p): Ignore debug insns. + * rtl.h (DEBUG_INSN_P): New macro. + (MAY_HAVE_DEBUG_INSNS): New macro. + (INSN_P): Accept debug insns. + (RTX_FRAME_RELATED_P): Likewise. + (INSN_DELETED_P): Likewise. + (PAT_VAR_LOCATION_DECL): New macro. + (PAT_VAR_LOCATION_LOC): New macro. + (NOTE_VAR_LOCATION_DECL): Reimplement. + (NOTE_VAR_LOCATION_LOC): Likewise. + (NOTE_VAR_LOCATION_STATUS): Likewise. + (INSN_VAR_LOCATION): New macro. + (INSN_VAR_LOCATION_DECL): Likewise. + (INSN_VAR_LOCATION_LOC): Likewise. + (INSN_VAR_LOCATION_STATUS): Likewise. + (gen_rtx_UNKNOWN_VAR_LOC): New macro. + (VAR_LOC_UNKNOWN_P): New macro. + (SCHED_GROUP_P): Accept debug insns. + (emit_debug_insn_before): Declare. + (emit_debug_insn_before_noloc): Likewise. + (emit_debug_insn_before_setloc): Likewise. + (emit_debug_insn_after): Likewise. + (emit_debug_insn_after_noloc): Likewise. + (emit_debug_insn_after_setloc): Likewise. + (emit_debug_insn): Likewise. + (make_debug_insn_raw): Likewise. + * recog.c (verify_changes): Accept debug insns. + (extract_insn): Handle them. + (peephole2_optimize): Skip them. + * dce.c (deletable_insn_p): Handle debug insns. + * reload1.c (reload): Don't scan subregs in debug insns. + (eliminate_regs_in_insn): Handle debug insns. + * cse.c (cse_insn): Handle debug insns. + (cse_extended_basic_block): Likewise. + (insn_live_p): Likewise. + * emit-rtl.c (copy_rtx_if_shared_1): Handle debug insns. + (reset_used_flags): Likewise. + (set_used_flags): Likewise. + (active_insn_p): Exclude debug insns. + (make_debug_insn_raw): New fn. + (emit_insn_before_noloc): Handle debug insns. + (emit_jump_insn_before_noloc): Likewise. + (emit_call_insn_before_noloc): Likewise. + (emit_debug_insn_before_noloc): New fn. + (emit_insn_after_noloc): Handle debug insns. + (emit_jump_insn_after_noloc): Likewise. + (emit_call_insn_after_noloc): Likewise. + (emit_debug_insn_after_noloc): New fn. + (emit_debug_insn_after_setloc): New fn. + (emit_debug_insn_after): New fn. + (emit_debug_insn_before_setloc): New fn. + (emit_debug_insn_before): New fn. + (emit_insn): Handle debug insn. + (emit_debug_insn): New fn. + (emit_jump_insn): Handle debug insn. + (emit_call_insn): Likewise. + (emit): Likewise. + (emit_copy_of_insn_after): Likewise. + * reg-stack.c (subst_stack_regs_pat): Handle var location rtx. + * var-tracking.c (MO_LOC_MAIN, MO_LOC_USE): New micro ops. + (var_reg_decl_set): New fn. + (var_reg_set): Adjust. + (var_mem_decl_set): New fn. + (var_mem_set): Adjust. + (use_type): New fn. + (count_uses, add_uses, add_stores): Adjust. + (compute_bb_dataflow): Handle new micro ops. + (emit_notes_in_bb): Likewise. + (vt_initialize): Likewise. + (delete_debug_insns): New fn. + (vt_debug_insns_local): New fn. + (variable_tracking_main): Call it. + * final.c (get_attr_length_1): Handle debug insns. + * lower-subreg.c (adjust_decomposed_uses): New fn. + (resolve_debug): New fn. + (decompose_multiword_subregs): Handle debug insns. + * print-rtl.c (print_rtx): Likewise. + * ifcvt.c (first_active_insn): Skip debug insns. + (last_active_insn): Likewise. + (cond_exec_process_insns): Handle debug insns. + (check_cond_move_block): Likewise. + (cond_move_convert_if_block): Likewise. + (block_jumps_and_fallthru_p): Likewise. + (dead_or_predicable): Likewise. + * cfgcleanup.c (flow_find_cross_jump): Skip debug insns. + * combine.c (create_log_links): Skip debug insns. + (combine_instructions): Likewise. + (rtx_subst_pair): New struct. + (propagate_for_debug_subst): New fn. + (propagate_for_debug): New fn. + (try_combine): Call it. + (distribute_links): Handle debug insns. + * df-problems.c (df_lr_bb_local_compute): Likewise. + (df_set_note): Reject debug insns. + (df_set_dead_notes_for_mw): Add added_notes_p argument. + (df_note_bb_compute): Handle debug insns. + (df_simulate_uses): Likewise. + (df_simulate_one_insn_forwards): Likewise. + (df_simulate_one_insn_backwards): Likewise. + * df-scan.c (df_insn_rescan_1): Renamed with new argument, from... + (df_insn_rescan): ... this. Implement in terms of new name. + (df_insn_rescan_debug_internal): New fn. + (df_uses_record): Handle debug insns. + * haifa-sched.c (contributes_to_priority_p): Likewise. + (nondebug_dep_list_size): New fn. + (priority, add_jump_dependencies): Use it. + (rank_for_schedule): Handle debug insns. + (schedule_block): Always accept more debug insns. + * local-alloc.c (block_alloc): Handle debug insns. + * regrename.c (replace_oldest_value_reg): ??? Test without this + (replace_oldest_value_addr): Handle debug insns. + (replace_oldest_value_mem): Likewise. + (copyprop_hardreg_forward_1): Likewise. + * regstat.c (regstat_init_n_sets_and_refs): Skip debug uses. + (regstat_bb_compute_ri): Skip debug insns. + * sched-rgn.c (add_branch_dependences): Likewise. + * sched-vis.c (print_pattern): Handle debug insns. + * regmove.c (optimize_reg_copy_1): Don't count debug insns. + (fixup_match_2): Likewise. + (regmove_optimize): Handle debug insns. + (fixup_match_1): Don't count debug insns. Adjust them. + * gcse.c (allolc_gcse_mem): Don't allocate uid for debug isnsn. + (bypass_conditional_jumps): Skip debug insns. + (compute_ld_motion_mems): Likewise. + * sched-deps.c (sched_analyze_2): Handle debug insns. + (sched_analyze_insn): Likewise. + (sched_analyze): Likewise. + * init-regs.c (initialize_uninitialized_regs): Skip debug insns. + * dse.c (scan_insn): Keep debug insns. + * resource.c (mark_target_live_regs): Skip debug insns. + * rtlanal.c (canonicalize_condition): Likewise. + * df.h (df_insn_rescan_debug_internal): Declare. + * dwarf2out.c (mem_loc_descriptor): Ignore sign and zero extend. + (loc_descriptor): Likewise. + * function.c (instantiate_virtual_regs): Handle debug insns. + * rtl.def (DEBUG_INSN): New rtl code. + * cfgbuild.c (inside_basic_block_p): Handle debug insns. + (control_flow_insn_p): Likewise. + * cfglayout.c (duplicate_insn_chain): Likewise. + 2007-10-01 Alexandre Oliva * tree-inline.c (processing_debug_stmt_p): New variable. diff --git a/gcc/cfgbuild.c b/gcc/cfgbuild.c index f8c8b820541..6ef23bfc095 100644 --- a/gcc/cfgbuild.c +++ b/gcc/cfgbuild.c @@ -73,6 +73,7 @@ inside_basic_block_p (const_rtx insn) case CALL_INSN: case INSN: + case DEBUG_INSN: return true; case BARRIER: @@ -96,6 +97,7 @@ control_flow_insn_p (const_rtx insn) { case NOTE: case CODE_LABEL: + case DEBUG_INSN: return false; case JUMP_INSN: diff --git a/gcc/cfgcleanup.c b/gcc/cfgcleanup.c index 5086784cd4c..29c0cb8c69e 100644 --- a/gcc/cfgcleanup.c +++ b/gcc/cfgcleanup.c @@ -1056,10 +1056,10 @@ flow_find_cross_jump (int mode ATTRIBUTE_UNUSED, basic_block bb1, while (true) { /* Ignore notes. */ - while (!INSN_P (i1) && i1 != BB_HEAD (bb1)) + while ((!INSN_P (i1) || DEBUG_INSN_P (i1)) && i1 != BB_HEAD (bb1)) i1 = PREV_INSN (i1); - while (!INSN_P (i2) && i2 != BB_HEAD (bb2)) + while ((!INSN_P (i2) || DEBUG_INSN_P (i2)) && i2 != BB_HEAD (bb2)) i2 = PREV_INSN (i2); if (i1 == BB_HEAD (bb1) || i2 == BB_HEAD (bb2)) @@ -1110,13 +1110,15 @@ flow_find_cross_jump (int mode ATTRIBUTE_UNUSED, basic_block bb1, Two, it keeps line number notes as matched as may be. */ if (ninsns) { - while (last1 != BB_HEAD (bb1) && !INSN_P (PREV_INSN (last1))) + while (last1 != BB_HEAD (bb1) && (!INSN_P (PREV_INSN (last1)) + || DEBUG_INSN_P (PREV_INSN (last1)))) last1 = PREV_INSN (last1); if (last1 != BB_HEAD (bb1) && LABEL_P (PREV_INSN (last1))) last1 = PREV_INSN (last1); - while (last2 != BB_HEAD (bb2) && !INSN_P (PREV_INSN (last2))) + while (last2 != BB_HEAD (bb2) && (!INSN_P (PREV_INSN (last2)) + || DEBUG_INSN_P (PREV_INSN (last2)))) last2 = PREV_INSN (last2); if (last2 != BB_HEAD (bb2) && LABEL_P (PREV_INSN (last2))) diff --git a/gcc/cfgexpand.c b/gcc/cfgexpand.c index e550a4c8b7e..807ece0881a 100644 --- a/gcc/cfgexpand.c +++ b/gcc/cfgexpand.c @@ -1465,6 +1465,624 @@ expand_gimple_tailcall (basic_block bb, tree stmt, bool *can_fallthru) return bb; } +/* Return the difference between the floor and the truncated result of + a signed division by OP1 with remainder MOD. */ +static rtx +floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1) +{ + /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */ + return gen_rtx_IF_THEN_ELSE + (mode, gen_rtx_NE (BImode, mod, const0_rtx), + gen_rtx_IF_THEN_ELSE + (mode, gen_rtx_LT (BImode, + gen_rtx_DIV (mode, op1, mod), + const0_rtx), + constm1_rtx, const0_rtx), + const0_rtx); +} + +/* Return the difference between the ceil and the truncated result of + a signed division by OP1 with remainder MOD. */ +static rtx +ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1) +{ + /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */ + return gen_rtx_IF_THEN_ELSE + (mode, gen_rtx_NE (BImode, mod, const0_rtx), + gen_rtx_IF_THEN_ELSE + (mode, gen_rtx_GT (BImode, + gen_rtx_DIV (mode, op1, mod), + const0_rtx), + const1_rtx, const0_rtx), + const0_rtx); +} + +/* Return the difference between the ceil and the truncated result of + an unsigned division by OP1 with remainder MOD. */ +static rtx +ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED) +{ + /* (mod != 0 ? 1 : 0) */ + return gen_rtx_IF_THEN_ELSE + (mode, gen_rtx_NE (BImode, mod, const0_rtx), + const1_rtx, const0_rtx); +} + +/* Return the difference between the rounded and the truncated result + of a signed division by OP1 with remainder MOD. Halfway cases are + rounded away from zero, rather than to the nearest even number. */ +static rtx +round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1) +{ + /* (abs (mod) >= abs (op1) - abs (mod) + ? (op1 / mod > 0 ? 1 : -1) + : 0) */ + return gen_rtx_IF_THEN_ELSE + (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod), + gen_rtx_MINUS (mode, + gen_rtx_ABS (mode, op1), + gen_rtx_ABS (mode, mod))), + gen_rtx_IF_THEN_ELSE + (mode, gen_rtx_GT (BImode, + gen_rtx_DIV (mode, op1, mod), + const0_rtx), + const1_rtx, constm1_rtx), + const0_rtx); +} + +/* Return the difference between the rounded and the truncated result + of a unsigned division by OP1 with remainder MOD. Halfway cases + are rounded away from zero, rather than to the nearest even + number. */ +static rtx +round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1) +{ + /* (mod >= op1 - mod ? 1 : 0) */ + return gen_rtx_IF_THEN_ELSE + (mode, gen_rtx_GE (BImode, mod, + gen_rtx_MINUS (mode, op1, mod)), + const1_rtx, const0_rtx); +} + + +/* Return an RTX equivalent to the value of the tree expression + EXP. */ + +static rtx +expand_debug_expr (tree exp) +{ + rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX; + enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp)); + int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp)); + + switch (TREE_CODE_CLASS (TREE_CODE (exp))) + { + case tcc_expression: + switch (TREE_CODE (exp)) + { + case COND_EXPR: + goto ternary; + + case TRUTH_ANDIF_EXPR: + case TRUTH_ORIF_EXPR: + case TRUTH_AND_EXPR: + case TRUTH_OR_EXPR: + case TRUTH_XOR_EXPR: + goto binary; + + case TRUTH_NOT_EXPR: + goto unary; + + default: + break; + } + break; + + ternary: + op2 = expand_debug_expr (TREE_OPERAND (exp, 2)); + if (!op2) + return NULL_RTX; + /* Fall through. */ + + binary: + case tcc_binary: + case tcc_comparison: + op1 = expand_debug_expr (TREE_OPERAND (exp, 1)); + if (!op1) + return NULL_RTX; + /* Fall through. */ + + unary: + case tcc_unary: + op0 = expand_debug_expr (TREE_OPERAND (exp, 0)); + if (!op0) + return NULL_RTX; + break; + + case tcc_type: + case tcc_statement: + case tcc_gimple_stmt: + gcc_unreachable (); + + case tcc_constant: + case tcc_exceptional: + case tcc_declaration: + case tcc_reference: + case tcc_vl_exp: + break; + } + + switch (TREE_CODE (exp)) + { + case VAR_DECL: + case PARM_DECL: + case FUNCTION_DECL: + case LABEL_DECL: + case CONST_DECL: + case RESULT_DECL: + /* This decl was optimized away. */ + if (!DECL_RTL_SET_P (exp)) + return NULL; + + return DECL_RTL (exp); + + case INTEGER_CST: + case REAL_CST: + case FIXED_CST: + case COMPLEX_CST: + case STRING_CST: + op0 = expand_expr (exp, NULL_RTX, mode, + EXPAND_INITIALIZER | EXPAND_CONST_ADDRESS); + if (op0 && GET_MODE (op0) == VOIDmode && mode != VOIDmode) + return op0 = gen_rtx_CONST (mode, op0); + return op0; + + case NOP_EXPR: + case CONVERT_EXPR: + { + enum machine_mode inner_mode + = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); + + if (mode == GET_MODE (op0)) + return op0; + + if (CONSTANT_P (op0) + || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (GET_MODE (op0))) + op0 = simplify_gen_subreg (mode, op0, inner_mode, + subreg_lowpart_offset (mode, + inner_mode)); + else if (unsignedp) + op0 = gen_rtx_ZERO_EXTEND (mode, op0); + else + op0 = gen_rtx_SIGN_EXTEND (mode, op0); + + return op0; + } + + case INDIRECT_REF: + case ALIGN_INDIRECT_REF: + case MISALIGNED_INDIRECT_REF: + op0 = expand_debug_expr (TREE_OPERAND (exp, 0)); + if (!op0) + return NULL; + + gcc_assert (GET_MODE (op0) == Pmode); + + if (TREE_CODE (exp) == ALIGN_INDIRECT_REF) + { + int align = TYPE_ALIGN_UNIT (TREE_TYPE (exp)); + op0 = gen_rtx_AND (Pmode, op0, GEN_INT (-align)); + } + + op0 = gen_rtx_MEM (mode, op0); + + set_mem_attributes (op0, exp, 0); + + return op0; + + case TARGET_MEM_REF: + if (TMR_SYMBOL (exp) && !DECL_RTL_SET_P (TMR_SYMBOL (exp))) + return NULL; + + op0 = expand_debug_expr + (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), + exp)); + if (!op0) + return NULL; + + gcc_assert (GET_MODE (op0) == Pmode); + + op0 = gen_rtx_MEM (mode, op0); + + set_mem_attributes (op0, exp, 0); + + return op0; + + case ARRAY_REF: + case ARRAY_RANGE_REF: + case COMPONENT_REF: + case BIT_FIELD_REF: + case REALPART_EXPR: + case IMAGPART_EXPR: + case VIEW_CONVERT_EXPR: + { + enum machine_mode mode1; + HOST_WIDE_INT bitsize, bitpos; + tree offset; + int volatilep = 0; + tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset, + &mode1, &unsignedp, &volatilep, true); + + op0 = expand_debug_expr (tem); + + if (!op0) + return NULL; + + if (offset) + { + gcc_assert (MEM_P (op0)); + + op1 = expand_debug_expr (offset); + if (!op1) + return NULL; + + op0 = gen_rtx_MEM (mode, gen_rtx_PLUS (Pmode, XEXP (op0, 0), op1)); + } + + if (MEM_P (op0)) + { + if (bitpos >= BITS_PER_UNIT) + { + op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT); + bitpos %= BITS_PER_UNIT; + } + else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode)) + op0 = adjust_address_nv (op0, mode, 0); + else if (GET_MODE (op0) != mode1) + op0 = adjust_address_nv (op0, mode1, 0); + else + op0 = copy_rtx (op0); + set_mem_attributes (op0, exp, 0); + } + + if (bitpos == 0 && mode == GET_MODE (op0)) + return op0; + + if ((bitpos % BITS_PER_UNIT) == 0 + && bitsize == GET_MODE_BITSIZE (mode1)) + return simplify_gen_subreg (mode, op0, + GET_MODE (op0) != VOIDmode + ? GET_MODE (op0) : mode1, + bitpos / BITS_PER_UNIT); + + return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0)) + && TYPE_UNSIGNED (TREE_TYPE (exp)) + ? SIGN_EXTRACT + : ZERO_EXTRACT, mode, + GET_MODE (op0) != VOIDmode + ? GET_MODE (op0) : mode1, + op0, GEN_INT (bitsize), GEN_INT (bitpos)); + } + + case EXC_PTR_EXPR: + /* ??? Do not call get_exception_pointer(), we don't want to gen + it if it hasn't been created yet. */ + return get_exception_pointer (cfun); + + case FILTER_EXPR: + /* Likewise get_exception_filter(). */ + return get_exception_filter (cfun); + + case ABS_EXPR: + return gen_rtx_ABS (mode, op0); + + case NEGATE_EXPR: + return gen_rtx_NEG (mode, op0); + + case BIT_NOT_EXPR: + return gen_rtx_NOT (mode, op0); + + case FLOAT_EXPR: + if (unsignedp) + return gen_rtx_UNSIGNED_FLOAT (mode, op0); + else + return gen_rtx_FLOAT (mode, op0); + + case FIX_TRUNC_EXPR: + if (unsignedp) + return gen_rtx_UNSIGNED_FIX (mode, op0); + else + return gen_rtx_FIX (mode, op0); + + case POINTER_PLUS_EXPR: + case PLUS_EXPR: + return gen_rtx_PLUS (mode, op0, op1); + + case MINUS_EXPR: + return gen_rtx_MINUS (mode, op0, op1); + + case MULT_EXPR: + return gen_rtx_MULT (mode, op0, op1); + + case RDIV_EXPR: + case TRUNC_DIV_EXPR: + case EXACT_DIV_EXPR: + if (unsignedp) + return gen_rtx_UDIV (mode, op0, op1); + else + return gen_rtx_DIV (mode, op0, op1); + + case TRUNC_MOD_EXPR: + if (unsignedp) + return gen_rtx_UMOD (mode, op0, op1); + else + return gen_rtx_MOD (mode, op0, op1); + + case FLOOR_DIV_EXPR: + if (unsignedp) + return gen_rtx_UDIV (mode, op0, op1); + else + { + rtx div = gen_rtx_DIV (mode, op0, op1); + rtx mod = gen_rtx_MOD (mode, op0, op1); + rtx adj = floor_sdiv_adjust (mode, mod, op1); + return gen_rtx_PLUS (mode, div, adj); + } + + case FLOOR_MOD_EXPR: + if (unsignedp) + return gen_rtx_UMOD (mode, op0, op1); + else + { + rtx mod = gen_rtx_MOD (mode, op0, op1); + rtx adj = floor_sdiv_adjust (mode, mod, op1); + adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1)); + return gen_rtx_PLUS (mode, mod, adj); + } + + case CEIL_DIV_EXPR: + if (unsignedp) + { + rtx div = gen_rtx_UDIV (mode, op0, op1); + rtx mod = gen_rtx_UMOD (mode, op0, op1); + rtx adj = ceil_udiv_adjust (mode, mod, op1); + return gen_rtx_PLUS (mode, div, adj); + } + else + { + rtx div = gen_rtx_DIV (mode, op0, op1); + rtx mod = gen_rtx_MOD (mode, op0, op1); + rtx adj = ceil_sdiv_adjust (mode, mod, op1); + return gen_rtx_PLUS (mode, div, adj); + } + + case CEIL_MOD_EXPR: + if (unsignedp) + { + rtx mod = gen_rtx_UMOD (mode, op0, op1); + rtx adj = ceil_udiv_adjust (mode, mod, op1); + adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1)); + return gen_rtx_PLUS (mode, mod, adj); + } + else + { + rtx mod = gen_rtx_MOD (mode, op0, op1); + rtx adj = ceil_sdiv_adjust (mode, mod, op1); + adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1)); + return gen_rtx_PLUS (mode, mod, adj); + } + + case ROUND_DIV_EXPR: + if (unsignedp) + { + rtx div = gen_rtx_UDIV (mode, op0, op1); + rtx mod = gen_rtx_UMOD (mode, op0, op1); + rtx adj = round_udiv_adjust (mode, mod, op1); + return gen_rtx_PLUS (mode, div, adj); + } + else + { + rtx div = gen_rtx_DIV (mode, op0, op1); + rtx mod = gen_rtx_MOD (mode, op0, op1); + rtx adj = round_sdiv_adjust (mode, mod, op1); + return gen_rtx_PLUS (mode, div, adj); + } + + case ROUND_MOD_EXPR: + if (unsignedp) + { + rtx mod = gen_rtx_UMOD (mode, op0, op1); + rtx adj = round_udiv_adjust (mode, mod, op1); + adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1)); + return gen_rtx_PLUS (mode, mod, adj); + } + else + { + rtx mod = gen_rtx_MOD (mode, op0, op1); + rtx adj = round_sdiv_adjust (mode, mod, op1); + adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1)); + return gen_rtx_PLUS (mode, mod, adj); + } + + case LSHIFT_EXPR: + return gen_rtx_ASHIFT (mode, op0, op1); + + case RSHIFT_EXPR: + if (unsignedp) + return gen_rtx_LSHIFTRT (mode, op0, op1); + else + return gen_rtx_ASHIFTRT (mode, op0, op1); + + case LROTATE_EXPR: + return gen_rtx_ROTATE (mode, op0, op1); + + case RROTATE_EXPR: + return gen_rtx_ROTATERT (mode, op0, op1); + + case MIN_EXPR: + if (unsignedp) + return gen_rtx_UMIN (mode, op0, op1); + else + return gen_rtx_SMIN (mode, op0, op1); + + case MAX_EXPR: + if (unsignedp) + return gen_rtx_UMAX (mode, op0, op1); + else + return gen_rtx_SMAX (mode, op0, op1); + + case BIT_AND_EXPR: + case TRUTH_AND_EXPR: + return gen_rtx_AND (mode, op0, op1); + + case BIT_IOR_EXPR: + case TRUTH_OR_EXPR: + return gen_rtx_IOR (mode, op0, op1); + + case BIT_XOR_EXPR: + case TRUTH_XOR_EXPR: + return gen_rtx_XOR (mode, op0, op1); + + case TRUTH_ANDIF_EXPR: + return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx); + + case TRUTH_ORIF_EXPR: + return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1); + + case TRUTH_NOT_EXPR: + return gen_rtx_EQ (mode, op0, const0_rtx); + + case LT_EXPR: + if (unsignedp) + return gen_rtx_LTU (mode, op0, op1); + else + return gen_rtx_LT (mode, op0, op1); + + case LE_EXPR: + if (unsignedp) + return gen_rtx_LEU (mode, op0, op1); + else + return gen_rtx_LE (mode, op0, op1); + + case GT_EXPR: + if (unsignedp) + return gen_rtx_GTU (mode, op0, op1); + else + return gen_rtx_GT (mode, op0, op1); + + case GE_EXPR: + if (unsignedp) + return gen_rtx_GEU (mode, op0, op1); + else + return gen_rtx_GE (mode, op0, op1); + + case EQ_EXPR: + return gen_rtx_EQ (mode, op0, op1); + + case NE_EXPR: + return gen_rtx_NE (mode, op0, op1); + + case UNORDERED_EXPR: + return gen_rtx_UNORDERED (mode, op0, op1); + + case ORDERED_EXPR: + return gen_rtx_ORDERED (mode, op0, op1); + + case UNLT_EXPR: + return gen_rtx_UNLT (mode, op0, op1); + + case UNLE_EXPR: + return gen_rtx_UNLE (mode, op0, op1); + + case UNGT_EXPR: + return gen_rtx_UNGT (mode, op0, op1); + + case UNGE_EXPR: + return gen_rtx_UNGE (mode, op0, op1); + + case UNEQ_EXPR: + return gen_rtx_UNEQ (mode, op0, op1); + + case LTGT_EXPR: + return gen_rtx_LTGT (mode, op0, op1); + + case COND_EXPR: + return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2); + + case COMPLEX_EXPR: + gcc_assert (COMPLEX_MODE_P (mode)); + if (GET_MODE (op0) == VOIDmode) + op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0); + if (GET_MODE (op1) == VOIDmode) + op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1); + return gen_rtx_CONCAT (mode, op0, op1); + + case ADDR_EXPR: + op0 = expand_debug_expr (TREE_OPERAND (exp, 0)); + if (!op0 || !MEM_P (op0)) + return NULL; + + return XEXP (op0, 0); + + case VECTOR_CST: + exp = build_constructor_from_list (TREE_TYPE (exp), + TREE_VECTOR_CST_ELTS (exp)); + /* Fall through. */ + + case CONSTRUCTOR: + if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE) + { + unsigned i; + tree val; + + op0 = gen_rtx_CONCATN + (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))); + + FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val) + { + op1 = expand_debug_expr (val); + if (!op1) + return NULL; + XVECEXP (op0, 0, i) = op1; + } + + if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))) + { + op1 = expand_debug_expr + (fold_convert (TREE_TYPE (TREE_TYPE (exp)), integer_zero_node)); + + if (!op1) + return NULL; + + for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++) + XVECEXP (op0, 0, i) = op1; + } + + return op0; + } + else + goto flag_unsupported; + + case CALL_EXPR: + /* ??? Maybe handle some builtins? */ + return NULL; + + /* SSA names of optimized-away variables can survive un-SSA. */ + case SSA_NAME: + case ERROR_MARK: + return NULL; + + + default: + flag_unsupported: +#if 0 + return NULL; +#endif + debug_tree (exp); + gcc_unreachable (); + } +} + /* Expand basic block BB from GIMPLE trees to RTL. */ static basic_block @@ -1575,6 +2193,36 @@ expand_gimple_basic_block (basic_block bb) if (new_bb) return new_bb; } + else if (TREE_CODE (stmt) == VAR_DEBUG_VALUE) + { + tree var = VAR_DEBUG_VALUE_VAR (stmt); + tree value = VAR_DEBUG_VALUE_VALUE (stmt); + rtx val; + + last = get_last_insn (); + + if (value == VAR_DEBUG_VALUE_NOVALUE) + val = NULL_RTX; + else + val = expand_debug_expr (value); + + if (!val) + val = gen_rtx_UNKNOWN_VAR_LOC (VOIDmode); + + val = gen_rtx_VAR_LOCATION + (VOIDmode, var, val, VAR_INIT_STATUS_INITIALIZED); + + val = emit_debug_insn (val); + + maybe_dump_rtl_for_tree_stmt (stmt, last); + + if (last != PREV_INSN (val)) + { + debug_generic_expr (stmt); + debug_rtx_range (NEXT_INSN (last), get_last_insn ()); + gcc_unreachable (); + } + } else { tree call = get_call_expr_in (stmt); diff --git a/gcc/cfglayout.c b/gcc/cfglayout.c index c70348c9a4d..f93793f54f6 100644 --- a/gcc/cfglayout.c +++ b/gcc/cfglayout.c @@ -1062,6 +1062,7 @@ duplicate_insn_chain (rtx from, rtx to) { switch (GET_CODE (insn)) { + case DEBUG_INSN: case INSN: case CALL_INSN: case JUMP_INSN: diff --git a/gcc/cfgrtl.c b/gcc/cfgrtl.c index cfeca97eaee..a7460767296 100644 --- a/gcc/cfgrtl.c +++ b/gcc/cfgrtl.c @@ -2703,7 +2703,8 @@ rtl_block_ends_with_call_p (basic_block bb) while (!CALL_P (insn) && insn != BB_HEAD (bb) && (keep_with_call_p (insn) - || NOTE_P (insn))) + || NOTE_P (insn) + || DEBUG_INSN_P (insn))) insn = PREV_INSN (insn); return (CALL_P (insn)); } diff --git a/gcc/combine.c b/gcc/combine.c index 392dedb3da5..866a4890329 100644 --- a/gcc/combine.c +++ b/gcc/combine.c @@ -932,7 +932,7 @@ create_log_links (void) { FOR_BB_INSNS_REVERSE (bb, insn) { - if (!INSN_P (insn)) + if (!INSN_P (insn) || DEBUG_INSN_P (insn)) continue; /* Log links are created only once. */ @@ -1123,7 +1123,7 @@ combine_instructions (rtx f, unsigned int nregs) insn = next ? next : NEXT_INSN (insn)) { next = 0; - if (INSN_P (insn)) + if (INSN_P (insn) && !DEBUG_INSN_P (insn)) { /* See if we know about function return values before this insn based upon SUBREG flags. */ @@ -2163,6 +2163,51 @@ reg_subword_p (rtx x, rtx reg) && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT; } +/* Auxiliary data structure for propagate_for_debug_stmt. */ + +struct rtx_subst_pair +{ + rtx from, to; +}; + +/* If *LOC is the same as FROM in the struct rtx_subst_pair passed as + DATA, replace it with a copy of TO. */ + +static int +propagate_for_debug_subst (rtx *loc, void *data) +{ + struct rtx_subst_pair *pair = data; + rtx from = pair->from, to = pair->to; + rtx x = *loc; + + if (rtx_equal_p (x, from)) + { + *loc = copy_rtx (to); + return -1; + } + + return 0; +} + +/* Replace occurrences of DEST with SRC in DEBUG_INSNs between INSN + and LAST. */ + +static void +propagate_for_debug (rtx insn, rtx last, rtx dest, rtx src) +{ + struct rtx_subst_pair p; + + p.from = dest; + p.to = src; + + while ((insn = NEXT_INSN (insn)) != last) + if (DEBUG_INSN_P (insn)) + { + for_each_rtx (&INSN_VAR_LOCATION_LOC (insn), + propagate_for_debug_subst, &p); + df_insn_rescan (insn); + } +} /* Try to combine the insns I1 and I2 into I3. Here I1 and I2 appear earlier than I3. @@ -3569,12 +3614,18 @@ try_combine (rtx i3, rtx i2, rtx i1, int *new_direct_jump_p) PATTERN (i2) = newi2pat; } else - SET_INSN_DELETED (i2); + { + if (MAY_HAVE_DEBUG_INSNS) + propagate_for_debug (i2, i3, i2dest, i2src); + SET_INSN_DELETED (i2); + } if (i1) { LOG_LINKS (i1) = 0; REG_NOTES (i1) = 0; + if (MAY_HAVE_DEBUG_INSNS) + propagate_for_debug (i1, i3, i1dest, i1src); SET_INSN_DELETED (i1); } @@ -12902,7 +12953,9 @@ distribute_links (rtx links) (insn && (this_basic_block->next_bb == EXIT_BLOCK_PTR || BB_HEAD (this_basic_block->next_bb) != insn)); insn = NEXT_INSN (insn)) - if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn))) + if (DEBUG_INSN_P (insn)) + continue; + else if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn))) { if (reg_referenced_p (reg, PATTERN (insn))) place = insn; diff --git a/gcc/cse.c b/gcc/cse.c index 21846f308d6..249b8e867c3 100644 --- a/gcc/cse.c +++ b/gcc/cse.c @@ -4119,6 +4119,8 @@ cse_insn (rtx insn, rtx libcall_insn) apply_change_group (); fold_rtx (x, insn); } + else if (DEBUG_INSN_P (insn)) + canon_reg (PATTERN (insn), insn); /* Store the equivalent value in SRC_EQV, if different, or if the DEST is a STRICT_LOW_PART. The latter condition is necessary because SRC_EQV @@ -5534,7 +5536,7 @@ cse_insn (rtx insn, rtx libcall_insn) { prev = PREV_INSN (prev); } - while (prev != bb_head && NOTE_P (prev)); + while (prev != bb_head && (NOTE_P (prev) || DEBUG_INSN_P (prev))); /* Do not swap the registers around if the previous instruction attaches a REG_EQUIV note to REG1. @@ -5977,6 +5979,7 @@ cse_extended_basic_block (struct cse_basic_block_data *ebb_data) FIXME: This is a real kludge and needs to be done some other way. */ if (INSN_P (insn) + && !DEBUG_INSN_P (insn) && num_insns++ > PARAM_VALUE (PARAM_MAX_CSE_INSNS)) { flush_hash_table (); @@ -6422,6 +6425,20 @@ insn_live_p (rtx insn, int *counts) } return false; } + else if (DEBUG_INSN_P (insn)) + { + rtx next; + + for (next = NEXT_INSN (insn); next; next = NEXT_INSN (next)) + if (NOTE_P (next)) + continue; + else if (!DEBUG_INSN_P (next)) + return true; + else if (INSN_VAR_LOCATION_DECL (insn) == INSN_VAR_LOCATION_DECL (next)) + return false; + + return true; + } else return true; } @@ -6494,6 +6511,7 @@ delete_trivially_dead_insns (rtx insns, int nreg) int ndead = 0; timevar_push (TV_DELETE_TRIVIALLY_DEAD); + /* First count the number of times each register is used. */ counts = XCNEWVEC (int, nreg); for (insn = insns; insn; insn = NEXT_INSN (insn)) diff --git a/gcc/dce.c b/gcc/dce.c index dec86692bf6..7e02e215263 100644 --- a/gcc/dce.c +++ b/gcc/dce.c @@ -104,6 +104,7 @@ deletable_insn_p (rtx insn, bool fast) switch (GET_CODE (body)) { case USE: + case VAR_LOCATION: return false; case CLOBBER: diff --git a/gcc/df-problems.c b/gcc/df-problems.c index 9ae57d12e2f..79e09ce4ae1 100644 --- a/gcc/df-problems.c +++ b/gcc/df-problems.c @@ -856,7 +856,7 @@ df_lr_bb_local_compute (unsigned int bb_index) { unsigned int uid = INSN_UID (insn); - if (!INSN_P (insn)) + if (!INSN_P (insn) || DEBUG_INSN_P (insn)) continue; for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++) @@ -3127,6 +3127,8 @@ df_set_note (enum reg_note note_type, rtx insn, rtx old, rtx reg) rtx this = old; rtx prev = NULL; + gcc_assert (!DEBUG_INSN_P (insn)); + while (this) if (XEXP (this, 0) == reg) { @@ -3259,9 +3261,12 @@ df_whole_mw_reg_dead_p (struct df_mw_hardreg *mws, static rtx df_set_dead_notes_for_mw (rtx insn, rtx old, struct df_mw_hardreg *mws, bitmap live, bitmap do_not_gen, - bitmap artificial_uses) + bitmap artificial_uses, bool *added_notes_p) { unsigned int r; + bool is_debug = *added_notes_p; + + *added_notes_p = false; #ifdef REG_DEAD_DEBUGGING if (dump_file) @@ -3279,6 +3284,11 @@ df_set_dead_notes_for_mw (rtx insn, rtx old, struct df_mw_hardreg *mws, if (df_whole_mw_reg_dead_p (mws, live, artificial_uses, do_not_gen)) { /* Add a dead note for the entire multi word register. */ + if (is_debug) + { + *added_notes_p = true; + return old; + } old = df_set_note (REG_DEAD, insn, old, mws->mw_reg); #ifdef REG_DEAD_DEBUGGING df_print_note ("adding 1: ", insn, REG_NOTES (insn)); @@ -3291,6 +3301,11 @@ df_set_dead_notes_for_mw (rtx insn, rtx old, struct df_mw_hardreg *mws, && !bitmap_bit_p (artificial_uses, r) && !bitmap_bit_p (do_not_gen, r)) { + if (is_debug) + { + *added_notes_p = true; + return old; + } old = df_set_note (REG_DEAD, insn, old, regno_reg_rtx[r]); #ifdef REG_DEAD_DEBUGGING df_print_note ("adding 2: ", insn, REG_NOTES (insn)); @@ -3401,10 +3416,13 @@ df_note_bb_compute (unsigned int bb_index, struct df_mw_hardreg **mws_rec; rtx old_dead_notes; rtx old_unused_notes; + int debug_insn; if (!INSN_P (insn)) continue; + debug_insn = DEBUG_INSN_P (insn); + bitmap_clear (do_not_gen); df_kill_notes (insn, &old_dead_notes, &old_unused_notes); @@ -3489,10 +3507,18 @@ df_note_bb_compute (unsigned int bb_index, struct df_mw_hardreg *mws = *mws_rec; if ((mws->type != DF_REF_REG_DEF) && !df_ignore_stack_reg (mws->start_regno)) - old_dead_notes - = df_set_dead_notes_for_mw (insn, old_dead_notes, - mws, live, do_not_gen, - artificial_uses); + { + bool really_add_notes = debug_insn != 0; + + old_dead_notes + = df_set_dead_notes_for_mw (insn, old_dead_notes, + mws, live, do_not_gen, + artificial_uses, + &really_add_notes); + + if (really_add_notes) + debug_insn = -1; + } mws_rec++; } @@ -3510,6 +3536,12 @@ df_note_bb_compute (unsigned int bb_index, #endif if (!bitmap_bit_p (live, uregno)) { + if (debug_insn) + { + debug_insn = -1; + break; + } + if ( (!(DF_REF_FLAGS (use) & DF_REF_MW_HARDREG)) && (!bitmap_bit_p (do_not_gen, uregno)) && (!bitmap_bit_p (artificial_uses, uregno)) @@ -3541,6 +3573,14 @@ df_note_bb_compute (unsigned int bb_index, free_EXPR_LIST_node (old_dead_notes); old_dead_notes = next; } + + if (debug_insn == -1) + { + /* ??? We could probably do better here, replacing dead + registers with their definitions. */ + INSN_VAR_LOCATION_LOC (insn) = gen_rtx_UNKNOWN_VAR_LOC (VOIDmode); + df_insn_rescan_debug_internal (insn); + } } } @@ -3691,6 +3731,9 @@ df_simulate_uses (rtx insn, bitmap live) struct df_ref **use_rec; unsigned int uid = INSN_UID (insn); + if (DEBUG_INSN_P (insn)) + return; + for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++) { struct df_ref *use = *use_rec; @@ -3745,7 +3788,7 @@ df_simulate_artificial_refs_at_top (basic_block bb, bitmap live) void df_simulate_one_insn_forwards (basic_block bb, rtx insn, bitmap live) { - if (! INSN_P (insn)) + if (! INSN_P (insn) || DEBUG_INSN_P (insn)) return; df_simulate_uses (insn, live); @@ -3785,7 +3828,7 @@ df_simulate_artificial_refs_at_end (basic_block bb, bitmap live) void df_simulate_one_insn_backwards (basic_block bb, rtx insn, bitmap live) { - if (! INSN_P (insn)) + if (! INSN_P (insn) || DEBUG_INSN_P (insn)) return; df_simulate_defs (insn, live); diff --git a/gcc/df-scan.c b/gcc/df-scan.c index a1f71b03b6d..0b77e691fd8 100644 --- a/gcc/df-scan.c +++ b/gcc/df-scan.c @@ -1070,11 +1070,12 @@ df_free_collection_rec (struct df_collection_rec *collection_rec) pool_free (problem_data->mw_reg_pool, *mw); } +/* Rescan INSN. Return TRUE if the rescanning produced any changes. + If KEEPCLEAN is false, don't mark the basic block as dirty even if + there have been changes. */ -/* Rescan INSN. Return TRUE if the rescanning produced any changes. */ - -bool -df_insn_rescan (rtx insn) +static bool +df_insn_rescan_1 (rtx insn, bool keepclean) { unsigned int uid = INSN_UID (insn); struct df_insn_info *insn_info = NULL; @@ -1154,10 +1155,29 @@ df_insn_rescan (rtx insn) } df_refs_add_to_chains (&collection_rec, bb, insn); - df_set_bb_dirty (bb); + if (!keepclean) + df_set_bb_dirty (bb); return true; } +/* Rescan INSN. Return TRUE if the rescanning produced any changes. */ + +bool +df_insn_rescan (rtx insn) +{ + return df_insn_rescan_1 (insn, false); +} + +/* Same as df_insn_rescan, but don't mark the basic block as + dirty. */ + +bool +df_insn_rescan_debug_internal (rtx insn) +{ + gcc_assert (DEBUG_INSN_P (insn)); + return df_insn_rescan_1 (insn, true); +} + /* Rescan all of the insns in the function. Note that the artificial uses and defs are not touched. This function will destroy def-se @@ -2967,6 +2987,13 @@ df_uses_record (struct df_collection_rec *collection_rec, break; } + case VAR_LOCATION: + df_uses_record (collection_rec, + &PAT_VAR_LOCATION_LOC (x), + DF_REF_REG_USE, bb, insn, + flags); + return; + case PRE_DEC: case POST_DEC: case PRE_INC: diff --git a/gcc/df.h b/gcc/df.h index 3ff7f39834c..b129d45c3fa 100644 --- a/gcc/df.h +++ b/gcc/df.h @@ -896,6 +896,7 @@ extern struct df_insn_info * df_insn_create_insn_record (rtx); extern void df_insn_delete (basic_block, unsigned int); extern void df_bb_refs_record (int, bool); extern bool df_insn_rescan (rtx); +extern bool df_insn_rescan_debug_internal (rtx); extern void df_insn_rescan_all (void); extern void df_process_deferred_rescans (void); extern bool df_has_eh_preds (basic_block); diff --git a/gcc/dse.c b/gcc/dse.c index 7bf759d45fc..5ac3fee7dff 100644 --- a/gcc/dse.c +++ b/gcc/dse.c @@ -1931,6 +1931,12 @@ scan_insn (bb_info_t bb_info, rtx insn) bb_info->last_insn = insn_info; + if (DEBUG_INSN_P (insn)) + { + insn_info->cannot_delete = true; + return; + } + /* Cselib clears the table for this case, so we have to essentially do the same. */ if (NONJUMP_INSN_P (insn) diff --git a/gcc/dwarf2out.c b/gcc/dwarf2out.c index 9ca68fb4207..0b8e6f8b41e 100644 --- a/gcc/dwarf2out.c +++ b/gcc/dwarf2out.c @@ -9073,6 +9073,8 @@ mem_loc_descriptor (rtx rtl, enum machine_mode mode, /* ... fall through ... */ case SUBREG: + case SIGN_EXTEND: + case ZERO_EXTEND: /* The case of a subreg may arise when we have a local (register) variable or a formal (register) parameter which doesn't quite fill up an entire register. For now, just assume that it is @@ -9317,6 +9319,8 @@ loc_descriptor (rtx rtl, enum var_init_status initialized) switch (GET_CODE (rtl)) { case SUBREG: + case SIGN_EXTEND: + case ZERO_EXTEND: /* The case of a subreg may arise when we have a local (register) variable or a formal (register) parameter which doesn't quite fill up an entire register. For now, just assume that it is diff --git a/gcc/emit-rtl.c b/gcc/emit-rtl.c index 8d1b1c7e85a..4dddcb407b6 100644 --- a/gcc/emit-rtl.c +++ b/gcc/emit-rtl.c @@ -2506,6 +2506,7 @@ repeat: return; break; + case DEBUG_INSN: case INSN: case JUMP_INSN: case CALL_INSN: @@ -2612,6 +2613,7 @@ repeat: case CC0: return; + case DEBUG_INSN: case INSN: case JUMP_INSN: case CALL_INSN: @@ -2682,6 +2684,7 @@ set_used_flags (rtx x) case CC0: return; + case DEBUG_INSN: case INSN: case JUMP_INSN: case CALL_INSN: @@ -2987,6 +2990,7 @@ active_insn_p (const_rtx insn) { return (CALL_P (insn) || JUMP_P (insn) || (NONJUMP_INSN_P (insn) + && !DEBUG_INSN_P (insn) && (! reload_completed || (GET_CODE (PATTERN (insn)) != USE && GET_CODE (PATTERN (insn)) != CLOBBER)))); @@ -3422,6 +3426,25 @@ make_insn_raw (rtx pattern) return insn; } +/* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */ + +rtx +make_debug_insn_raw (rtx pattern) +{ + rtx insn; + + insn = rtx_alloc (DEBUG_INSN); + INSN_UID (insn) = cur_insn_uid++; + + PATTERN (insn) = pattern; + INSN_CODE (insn) = -1; + REG_NOTES (insn) = NULL; + INSN_LOCATOR (insn) = curr_insn_locator (); + BLOCK_FOR_INSN (insn) = NULL; + + return insn; +} + /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */ rtx @@ -3608,7 +3631,8 @@ add_insn_before (rtx insn, rtx before, basic_block bb) /* Replace insn with an deleted instruction note. */ -void set_insn_deleted (rtx insn) +void +set_insn_deleted (rtx insn) { df_insn_delete (BLOCK_FOR_INSN (insn), INSN_UID (insn)); PUT_CODE (insn, NOTE); @@ -3837,6 +3861,7 @@ emit_insn_before_noloc (rtx x, rtx before, basic_block bb) switch (GET_CODE (x)) { + case DEBUG_INSN: case INSN: case JUMP_INSN: case CALL_INSN: @@ -3880,6 +3905,7 @@ emit_jump_insn_before_noloc (rtx x, rtx before) switch (GET_CODE (x)) { + case DEBUG_INSN: case INSN: case JUMP_INSN: case CALL_INSN: @@ -3923,6 +3949,7 @@ emit_call_insn_before_noloc (rtx x, rtx before) switch (GET_CODE (x)) { + case DEBUG_INSN: case INSN: case JUMP_INSN: case CALL_INSN: @@ -3954,6 +3981,50 @@ emit_call_insn_before_noloc (rtx x, rtx before) return last; } +/* Make an instruction with body X and code DEBUG_INSN + and output it before the instruction BEFORE. */ + +rtx +emit_debug_insn_before_noloc (rtx x, rtx before) +{ + rtx last = NULL_RTX, insn; + + gcc_assert (before); + + switch (GET_CODE (x)) + { + case DEBUG_INSN: + case INSN: + case JUMP_INSN: + case CALL_INSN: + case CODE_LABEL: + case BARRIER: + case NOTE: + insn = x; + while (insn) + { + rtx next = NEXT_INSN (insn); + add_insn_before (insn, before, NULL); + last = insn; + insn = next; + } + break; + +#ifdef ENABLE_RTL_CHECKING + case SEQUENCE: + gcc_unreachable (); + break; +#endif + + default: + last = make_debug_insn_raw (x); + add_insn_before (last, before, NULL); + break; + } + + return last; +} + /* Make an insn of code BARRIER and output it before the insn BEFORE. */ @@ -4059,6 +4130,7 @@ emit_insn_after_noloc (rtx x, rtx after, basic_block bb) switch (GET_CODE (x)) { + case DEBUG_INSN: case INSN: case JUMP_INSN: case CALL_INSN: @@ -4096,6 +4168,7 @@ emit_jump_insn_after_noloc (rtx x, rtx after) switch (GET_CODE (x)) { + case DEBUG_INSN: case INSN: case JUMP_INSN: case CALL_INSN: @@ -4132,6 +4205,7 @@ emit_call_insn_after_noloc (rtx x, rtx after) switch (GET_CODE (x)) { + case DEBUG_INSN: case INSN: case JUMP_INSN: case CALL_INSN: @@ -4156,6 +4230,43 @@ emit_call_insn_after_noloc (rtx x, rtx after) return last; } +/* Make an instruction with body X and code CALL_INSN + and output it after the instruction AFTER. */ + +rtx +emit_debug_insn_after_noloc (rtx x, rtx after) +{ + rtx last; + + gcc_assert (after); + + switch (GET_CODE (x)) + { + case DEBUG_INSN: + case INSN: + case JUMP_INSN: + case CALL_INSN: + case CODE_LABEL: + case BARRIER: + case NOTE: + last = emit_insn_after_1 (x, after, NULL); + break; + +#ifdef ENABLE_RTL_CHECKING + case SEQUENCE: + gcc_unreachable (); + break; +#endif + + default: + last = make_debug_insn_raw (x); + add_insn_after (last, after, NULL); + break; + } + + return last; +} + /* Make an insn of code BARRIER and output it after the insn AFTER. */ @@ -4294,6 +4405,37 @@ emit_call_insn_after (rtx pattern, rtx after) return emit_call_insn_after_noloc (pattern, after); } +/* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */ +rtx +emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc) +{ + rtx last = emit_debug_insn_after_noloc (pattern, after); + + if (pattern == NULL_RTX || !loc) + return last; + + after = NEXT_INSN (after); + while (1) + { + if (active_insn_p (after) && !INSN_LOCATOR (after)) + INSN_LOCATOR (after) = loc; + if (after == last) + break; + after = NEXT_INSN (after); + } + return last; +} + +/* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */ +rtx +emit_debug_insn_after (rtx pattern, rtx after) +{ + if (INSN_P (after)) + return emit_debug_insn_after_setloc (pattern, after, INSN_LOCATOR (after)); + else + return emit_debug_insn_after_noloc (pattern, after); +} + /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to SCOPE. */ rtx emit_insn_before_setloc (rtx pattern, rtx before, int loc) @@ -4393,6 +4535,39 @@ emit_call_insn_before (rtx pattern, rtx before) else return emit_call_insn_before_noloc (pattern, before); } + +/* like emit_insn_before_noloc, but set insn_locator according to scope. */ +rtx +emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc) +{ + rtx first = PREV_INSN (before); + rtx last = emit_debug_insn_before_noloc (pattern, before); + + if (pattern == NULL_RTX) + return last; + + first = NEXT_INSN (first); + while (1) + { + if (active_insn_p (first) && !INSN_LOCATOR (first)) + INSN_LOCATOR (first) = loc; + if (first == last) + break; + first = NEXT_INSN (first); + } + return last; +} + +/* like emit_debug_insn_before_noloc, + but set insn_locator according to before. */ +rtx +emit_debug_insn_before (rtx pattern, rtx before) +{ + if (INSN_P (before)) + return emit_debug_insn_before_setloc (pattern, before, INSN_LOCATOR (before)); + else + return emit_debug_insn_before_noloc (pattern, before); +} /* Take X and emit it at the end of the doubly-linked INSN list. @@ -4410,6 +4585,7 @@ emit_insn (rtx x) switch (GET_CODE (x)) { + case DEBUG_INSN: case INSN: case JUMP_INSN: case CALL_INSN: @@ -4441,6 +4617,52 @@ emit_insn (rtx x) return last; } +/* Make an insn of code DEBUG_INSN with pattern X + and add it to the end of the doubly-linked list. */ + +rtx +emit_debug_insn (rtx x) +{ + rtx last = last_insn; + rtx insn; + + if (x == NULL_RTX) + return last; + + switch (GET_CODE (x)) + { + case DEBUG_INSN: + case INSN: + case JUMP_INSN: + case CALL_INSN: + case CODE_LABEL: + case BARRIER: + case NOTE: + insn = x; + while (insn) + { + rtx next = NEXT_INSN (insn); + add_insn (insn); + last = insn; + insn = next; + } + break; + +#ifdef ENABLE_RTL_CHECKING + case SEQUENCE: + gcc_unreachable (); + break; +#endif + + default: + last = make_debug_insn_raw (x); + add_insn (last); + break; + } + + return last; +} + /* Make an insn of code JUMP_INSN with pattern X and add it to the end of the doubly-linked list. */ @@ -4451,6 +4673,7 @@ emit_jump_insn (rtx x) switch (GET_CODE (x)) { + case DEBUG_INSN: case INSN: case JUMP_INSN: case CALL_INSN: @@ -4492,6 +4715,7 @@ emit_call_insn (rtx x) switch (GET_CODE (x)) { + case DEBUG_INSN: case INSN: case JUMP_INSN: case CALL_INSN: @@ -4713,6 +4937,8 @@ emit (rtx x) } case CALL_INSN: return emit_call_insn (x); + case DEBUG_INSN: + return emit_debug_insn (x); default: gcc_unreachable (); } @@ -5507,6 +5733,10 @@ emit_copy_of_insn_after (rtx insn, rtx after) new = emit_jump_insn_after (copy_insn (PATTERN (insn)), after); break; + case DEBUG_INSN: + new = emit_debug_insn_after (copy_insn (PATTERN (insn)), after); + break; + case CALL_INSN: new = emit_call_insn_after (copy_insn (PATTERN (insn)), after); if (CALL_INSN_FUNCTION_USAGE (insn)) diff --git a/gcc/expr.c b/gcc/expr.c index 9d2a61ad782..0578cf676f5 100644 --- a/gcc/expr.c +++ b/gcc/expr.c @@ -9129,6 +9129,10 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, return const0_rtx; } + case VAR_DEBUG_VALUE: + /* Expanded by expand_gimple_basic_block only. */ + gcc_unreachable (); + case RETURN_EXPR: if (!TREE_OPERAND (exp, 0)) expand_null_return (); diff --git a/gcc/final.c b/gcc/final.c index b59a2220011..937f6229d45 100644 --- a/gcc/final.c +++ b/gcc/final.c @@ -407,7 +407,8 @@ get_attr_length_1 (rtx insn ATTRIBUTE_UNUSED, case INSN: body = PATTERN (insn); - if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER) + if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER + || DEBUG_INSN_P (insn)) return 0; else if (GET_CODE (body) == ASM_INPUT || asm_noperands (body) >= 0) diff --git a/gcc/function.c b/gcc/function.c index c37368415ea..c55918d0572 100644 --- a/gcc/function.c +++ b/gcc/function.c @@ -1695,8 +1695,11 @@ instantiate_virtual_regs (void) || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC || GET_CODE (PATTERN (insn)) == ASM_INPUT) continue; - - instantiate_virtual_regs_in_insn (insn); + else if (DEBUG_INSN_P (insn)) + for_each_rtx (&INSN_VAR_LOCATION (insn), + instantiate_virtual_regs_in_rtx, NULL); + else + instantiate_virtual_regs_in_insn (insn); if (INSN_DELETED_P (insn)) continue; diff --git a/gcc/gcse.c b/gcc/gcse.c index 6f308c22e71..26e38d4b23e 100644 --- a/gcc/gcse.c +++ b/gcc/gcse.c @@ -936,7 +936,7 @@ alloc_gcse_mem (void) FOR_EACH_BB (bb) FOR_BB_INSNS (bb, insn) { - if (INSN_P (insn)) + if (INSN_P (insn) && !DEBUG_INSN_P (insn)) uid_cuid[INSN_UID (insn)] = i++; else uid_cuid[INSN_UID (insn)] = i; @@ -949,7 +949,7 @@ alloc_gcse_mem (void) i = 0; FOR_EACH_BB (bb) FOR_BB_INSNS (bb, insn) - if (INSN_P (insn)) + if (INSN_P (insn) && !DEBUG_INSN_P (insn)) CUID_INSN (i++) = insn; /* Allocate vars to track sets of regs. */ @@ -3710,7 +3710,9 @@ bypass_conditional_jumps (void) { setcc = NULL_RTX; FOR_BB_INSNS (bb, insn) - if (NONJUMP_INSN_P (insn)) + if (DEBUG_INSN_P (insn)) + continue; + else if (NONJUMP_INSN_P (insn)) { if (setcc) break; @@ -5307,7 +5309,7 @@ compute_ld_motion_mems (void) { FOR_BB_INSNS (bb, insn) { - if (INSN_P (insn)) + if (INSN_P (insn) && !DEBUG_INSN_P (insn)) { if (GET_CODE (PATTERN (insn)) == SET) { diff --git a/gcc/haifa-sched.c b/gcc/haifa-sched.c index 9d1f8b025f5..d902003a5a3 100644 --- a/gcc/haifa-sched.c +++ b/gcc/haifa-sched.c @@ -708,6 +708,10 @@ dep_cost (dep_t link) static bool contributes_to_priority_p (dep_t dep) { + if (DEBUG_INSN_P (DEP_CON (dep)) + || DEBUG_INSN_P (DEP_PRO (dep))) + return false; + /* Critical path is meaningful in block boundaries only. */ if (!current_sched_info->contributes_to_priority (DEP_CON (dep), DEP_PRO (dep))) @@ -727,6 +731,31 @@ contributes_to_priority_p (dep_t dep) return true; } +/* Compute the number of nondebug forward deps of an insn. */ + +static int +nondebug_dep_list_size (rtx insn) +{ + sd_iterator_def sd_it; + dep_t dep; + int dbgcount = 0, nodbgcount = 0; + + if (!MAY_HAVE_DEBUG_INSNS) + return sd_lists_size (insn, SD_LIST_FORW); + + FOR_EACH_DEP (insn, SD_LIST_FORW, sd_it, dep) + { + if (DEBUG_INSN_P (DEP_CON (dep))) + dbgcount++; + else + nodbgcount++; + } + + gcc_assert (dbgcount + nodbgcount == sd_lists_size (insn, SD_LIST_FORW)); + + return nodbgcount; +} + /* Compute the priority number for INSN. */ static int priority (rtx insn) @@ -741,7 +770,7 @@ priority (rtx insn) { int this_priority = 0; - if (sd_lists_empty_p (insn, SD_LIST_FORW)) + if (nondebug_dep_list_size (insn) == 0) /* ??? We should set INSN_PRIORITY to insn_cost when and insn has some forward deps but all of them are ignored by contributes_to_priority hook. At the moment we set priority of @@ -837,8 +866,19 @@ rank_for_schedule (const void *x, const void *y) { rtx tmp = *(const rtx *) y; rtx tmp2 = *(const rtx *) x; + rtx last; int tmp_class, tmp2_class; int val, priority_val, weight_val, info_val; + bool has_debug = MAY_HAVE_DEBUG_INSNS; + + if (has_debug) + { + /* Schedule debug insns as early as possible. */ + if (DEBUG_INSN_P (tmp) && !DEBUG_INSN_P (tmp2)) + return -1; + else if (DEBUG_INSN_P (tmp2)) + return 1; + } /* The insn in a schedule group should be issued the first. */ if (SCHED_GROUP_P (tmp) != SCHED_GROUP_P (tmp2)) @@ -886,8 +926,15 @@ rank_for_schedule (const void *x, const void *y) if (info_val) return info_val; - /* Compare insns based on their relation to the last-scheduled-insn. */ - if (INSN_P (last_scheduled_insn)) + last = last_scheduled_insn; + + if (has_debug) + while (DEBUG_INSN_P (last)) + last = PREV_INSN (last); + + /* Compare insns based on their relation to the last scheduled + non-debug insn. */ + if (INSN_P (last)) { dep_t dep1; dep_t dep2; @@ -897,7 +944,7 @@ rank_for_schedule (const void *x, const void *y) 2) Anti/Output dependent on last scheduled insn. 3) Independent of last scheduled insn, or has latency of one. Choose the insn from the highest numbered class if different. */ - dep1 = sd_find_dep_between (last_scheduled_insn, tmp, true); + dep1 = sd_find_dep_between (last, tmp, true); if (dep1 == NULL || dep_cost (dep1) == 1) tmp_class = 3; @@ -907,7 +954,7 @@ rank_for_schedule (const void *x, const void *y) else tmp_class = 2; - dep2 = sd_find_dep_between (last_scheduled_insn, tmp2, true); + dep2 = sd_find_dep_between (last, tmp2, true); if (dep2 == NULL || dep_cost (dep2) == 1) tmp2_class = 3; @@ -925,8 +972,13 @@ rank_for_schedule (const void *x, const void *y) This gives the scheduler more freedom when scheduling later instructions at the expense of added register pressure. */ - val = (sd_lists_size (tmp2, SD_LIST_FORW) - - sd_lists_size (tmp, SD_LIST_FORW)); + if (has_debug) + val = (nondebug_dep_list_size (tmp2) + - nondebug_dep_list_size (tmp)); + else + val = (sd_lists_size (tmp2, SD_LIST_FORW) + - sd_lists_size (tmp, SD_LIST_FORW)); + if (val != 0) return val; @@ -2467,7 +2519,8 @@ schedule_block (basic_block *target_bb, int rgn_n_insns1) /* A naked CLOBBER or USE generates no instruction, so do not count them against the issue rate. */ else if (GET_CODE (PATTERN (insn)) != USE - && GET_CODE (PATTERN (insn)) != CLOBBER) + && GET_CODE (PATTERN (insn)) != CLOBBER + && !DEBUG_INSN_P (insn)) can_issue_more--; advance = schedule_insn (insn); @@ -4368,7 +4421,7 @@ add_jump_dependencies (rtx insn, rtx jump) if (insn == jump) break; - if (sd_lists_empty_p (insn, SD_LIST_FORW)) + if (nondebug_dep_list_size (insn) == 0) { dep_def _new_dep, *new_dep = &_new_dep; diff --git a/gcc/ifcvt.c b/gcc/ifcvt.c index 0c0ab8cab92..c2168543869 100644 --- a/gcc/ifcvt.c +++ b/gcc/ifcvt.c @@ -193,7 +193,7 @@ first_active_insn (basic_block bb) insn = NEXT_INSN (insn); } - while (NOTE_P (insn)) + while (NOTE_P (insn) || DEBUG_INSN_P (insn)) { if (insn == BB_END (bb)) return NULL_RTX; @@ -216,6 +216,7 @@ last_active_insn (basic_block bb, int skip_use_p) while (NOTE_P (insn) || JUMP_P (insn) + || DEBUG_INSN_P (insn) || (skip_use_p && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == USE)) @@ -268,7 +269,7 @@ cond_exec_process_insns (ce_if_block_t *ce_info ATTRIBUTE_UNUSED, for (insn = start; ; insn = NEXT_INSN (insn)) { - if (NOTE_P (insn)) + if (NOTE_P (insn) || DEBUG_INSN_P (insn)) goto insn_done; gcc_assert(NONJUMP_INSN_P (insn) || CALL_P (insn)); @@ -2196,6 +2197,8 @@ noce_process_if_block (struct noce_if_info *if_info) else { insn_b = prev_nonnote_insn (if_info->cond_earliest); + while (insn_b && DEBUG_INSN_P (insn_b)) + insn_b = PREV_INSN (insn_b); /* We're going to be moving the evaluation of B down from above COND_EARLIEST to JUMP. Make sure the relevant data is still intact. */ @@ -2400,6 +2403,8 @@ check_cond_move_block (basic_block bb, rtx *vals, VEC (int, heap) *regs, rtx con /* We can only handle simple jumps at the end of the basic block. It is almost impossible to update the CFG otherwise. */ insn = BB_END (bb); + while (DEBUG_INSN_P (insn)) + insn = PREV_INSN (insn); if (JUMP_P (insn) && !onlyjump_p (insn)) return FALSE; @@ -2407,7 +2412,7 @@ check_cond_move_block (basic_block bb, rtx *vals, VEC (int, heap) *regs, rtx con { rtx set, dest, src; - if (!INSN_P (insn) || JUMP_P (insn)) + if (!INSN_P (insn) || DEBUG_INSN_P (insn) || JUMP_P (insn)) continue; set = single_set (insn); if (!set) @@ -2485,7 +2490,8 @@ cond_move_convert_if_block (struct noce_if_info *if_infop, rtx set, target, dest, t, e; unsigned int regno; - if (!INSN_P (insn) || JUMP_P (insn)) + /* ??? Maybe emit conditional debug insn? */ + if (!INSN_P (insn) || DEBUG_INSN_P (insn) || JUMP_P (insn)) continue; set = single_set (insn); gcc_assert (set && REG_P (SET_DEST (set))); @@ -3026,6 +3032,7 @@ block_jumps_and_fallthru_p (basic_block cur_bb, basic_block target_bb) if (INSN_P (insn) && !JUMP_P (insn) + && !DEBUG_INSN_P (insn) && GET_CODE (PATTERN (insn)) != USE && GET_CODE (PATTERN (insn)) != CLOBBER) n_insns++; @@ -3689,6 +3696,9 @@ dead_or_predicable (basic_block test_bb, basic_block merge_bb, head = BB_HEAD (merge_bb); end = BB_END (merge_bb); + while (DEBUG_INSN_P (end) && end != head) + end = PREV_INSN (end); + /* If merge_bb ends with a tablejump, predicating/moving insn's into test_bb and then deleting merge_bb will result in the jumptable that follows merge_bb being removed along with merge_bb and then we @@ -3698,6 +3708,8 @@ dead_or_predicable (basic_block test_bb, basic_block merge_bb, if (LABEL_P (head)) head = NEXT_INSN (head); + while (DEBUG_INSN_P (head) && head != end) + head = NEXT_INSN (head); if (NOTE_P (head)) { if (head == end) @@ -3706,6 +3718,8 @@ dead_or_predicable (basic_block test_bb, basic_block merge_bb, goto no_body; } head = NEXT_INSN (head); + while (DEBUG_INSN_P (head) && head != end) + head = NEXT_INSN (head); } if (JUMP_P (end)) @@ -3716,6 +3730,8 @@ dead_or_predicable (basic_block test_bb, basic_block merge_bb, goto no_body; } end = PREV_INSN (end); + while (DEBUG_INSN_P (end) && end != head) + end = PREV_INSN (end); } /* Disable handling dead code by conditional execution if the machine needs @@ -3773,7 +3789,7 @@ dead_or_predicable (basic_block test_bb, basic_block merge_bb, { if (CALL_P (insn)) return FALSE; - if (INSN_P (insn)) + if (INSN_P (insn) && !DEBUG_INSN_P (insn)) { if (may_trap_p (PATTERN (insn))) return FALSE; @@ -3819,7 +3835,7 @@ dead_or_predicable (basic_block test_bb, basic_block merge_bb, FOR_BB_INSNS (merge_bb, insn) { - if (INSN_P (insn)) + if (INSN_P (insn) && !DEBUG_INSN_P (insn)) { unsigned int uid = INSN_UID (insn); struct df_ref **def_rec; diff --git a/gcc/init-regs.c b/gcc/init-regs.c index 40e7b13ab79..f84e26f697f 100644 --- a/gcc/init-regs.c +++ b/gcc/init-regs.c @@ -71,7 +71,7 @@ initialize_uninitialized_regs (void) { unsigned int uid = INSN_UID (insn); struct df_ref **use_rec; - if (!INSN_P (insn)) + if (!INSN_P (insn) || DEBUG_INSN_P (insn)) continue; for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++) diff --git a/gcc/local-alloc.c b/gcc/local-alloc.c index dc56ca47917..83bf61ed782 100644 --- a/gcc/local-alloc.c +++ b/gcc/local-alloc.c @@ -1283,7 +1283,7 @@ block_alloc (int b) insn = BB_END (BASIC_BLOCK (b)); while (1) { - if (!NOTE_P (insn)) + if (!NOTE_P (insn) && !DEBUG_INSN_P (insn)) { ++insn_count; gcc_assert (insn_count <= max_uid); @@ -1308,10 +1308,10 @@ block_alloc (int b) insn = BB_HEAD (BASIC_BLOCK (b)); while (1) { - if (!NOTE_P (insn)) + if (!NOTE_P (insn) && !DEBUG_INSN_P (insn)) insn_number++; - if (INSN_P (insn)) + if (INSN_P (insn) && !DEBUG_INSN_P (insn)) { rtx link, set; int win = 0; diff --git a/gcc/lower-subreg.c b/gcc/lower-subreg.c index b8e2eb65842..645aff67b95 100644 --- a/gcc/lower-subreg.c +++ b/gcc/lower-subreg.c @@ -533,6 +533,32 @@ resolve_subreg_use (rtx *px, void *data) return 0; } +/* This is called via for_each_rtx. Look for SUBREGs which can be + decomposed and decomposed REGs that need copying. */ + +static int +adjust_decomposed_uses (rtx *px, void *data ATTRIBUTE_UNUSED) +{ + rtx x = *px; + + if (x == NULL_RTX) + return 0; + + if (resolve_subreg_p (x)) + { + x = simplify_subreg_concatn (GET_MODE (x), SUBREG_REG (x), + SUBREG_BYTE (x)); + + if (x) + *px = x; + } + + if (resolve_reg_p (x)) + *px = copy_rtx (x); + + return 0; +} + /* We are deleting INSN. Move any EH_REGION notes to INSNS. */ static void @@ -960,6 +986,18 @@ resolve_use (rtx pat, rtx insn) return false; } +/* A VAR_LOCATION can be simplified. */ + +static void +resolve_debug (rtx insn) +{ + for_each_rtx (&PATTERN (insn), adjust_decomposed_uses, NULL_RTX); + + df_insn_rescan (insn); + + resolve_reg_notes (insn); +} + /* Checks if INSN is a decomposable multiword-shift or zero-extend and sets the decomposable_context bitmap accordingly. A non-zero value is returned if a decomposable insn has been found. */ @@ -1270,6 +1308,8 @@ decompose_multiword_subregs (void) resolve_clobber (pat, insn); else if (GET_CODE (pat) == USE) resolve_use (pat, insn); + else if (DEBUG_INSN_P (insn)) + resolve_debug (insn); else { rtx set; diff --git a/gcc/print-rtl.c b/gcc/print-rtl.c index d77645573f3..fea368616ab 100644 --- a/gcc/print-rtl.c +++ b/gcc/print-rtl.c @@ -207,6 +207,21 @@ print_rtx (const_rtx in_rtx) /* For other rtl, print the mode if it's not VOID. */ else if (GET_MODE (in_rtx) != VOIDmode) fprintf (outfile, ":%s", GET_MODE_NAME (GET_MODE (in_rtx))); + +#ifndef GENERATOR_FILE + if (GET_CODE (in_rtx) == VAR_LOCATION) + { + print_mem_expr (outfile, PAT_VAR_LOCATION_DECL (in_rtx)); + fputc (' ', outfile); + print_rtx (PAT_VAR_LOCATION_LOC (in_rtx)); + if (PAT_VAR_LOCATION_STATUS (in_rtx) + == VAR_INIT_STATUS_UNINITIALIZED) + fprintf (outfile, " [uninit]"); + fputc (')', outfile); + sawclose = 1; + i = GET_RTX_LENGTH (VAR_LOCATION); + } +#endif } } @@ -320,14 +335,8 @@ print_rtx (const_rtx in_rtx) case NOTE_INSN_VAR_LOCATION: #ifndef GENERATOR_FILE - fprintf (outfile, " ("); - print_mem_expr (outfile, NOTE_VAR_LOCATION_DECL (in_rtx)); - fprintf (outfile, " "); - print_rtx (NOTE_VAR_LOCATION_LOC (in_rtx)); - if (NOTE_VAR_LOCATION_STATUS (in_rtx) == - VAR_INIT_STATUS_UNINITIALIZED) - fprintf (outfile, " [uninit]"); - fprintf (outfile, ")"); + fputc (' ', outfile); + print_rtx (NOTE_VAR_LOCATION (in_rtx)); #endif break; diff --git a/gcc/recog.c b/gcc/recog.c index 6a9ae4510c0..fd96e75c382 100644 --- a/gcc/recog.c +++ b/gcc/recog.c @@ -373,6 +373,8 @@ verify_changes (int num) if (! memory_address_p (GET_MODE (object), XEXP (object, 0))) break; } + else if (DEBUG_INSN_P (object)) + continue; else if (insn_invalid_p (object)) { rtx pat = PATTERN (object); @@ -413,7 +415,8 @@ verify_changes (int num) validate_change (object, &PATTERN (object), newpat, 1); continue; } - else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER) + else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER + || GET_CODE (pat) == VAR_LOCATION) /* If this insn is a CLOBBER or USE, it is always valid, but is never recognized. */ continue; @@ -1940,6 +1943,7 @@ extract_insn (rtx insn) case ASM_INPUT: case ADDR_VEC: case ADDR_DIFF_VEC: + case VAR_LOCATION: return; case SET: @@ -2946,7 +2950,7 @@ peephole2_optimize (void) for (insn = BB_END (bb); ; insn = prev) { prev = PREV_INSN (insn); - if (INSN_P (insn)) + if (INSN_P (insn) && !DEBUG_INSN_P (insn)) { rtx try, before_try, x; int match_len; diff --git a/gcc/reg-stack.c b/gcc/reg-stack.c index f5d263fbff2..f502958aec5 100644 --- a/gcc/reg-stack.c +++ b/gcc/reg-stack.c @@ -310,7 +310,7 @@ stack_regs_mentioned (const_rtx insn) unsigned int uid, max; int test; - if (! INSN_P (insn) || !stack_regs_mentioned_data) + if (! INSN_P (insn) /* || DEBUG_INSN_P (insn) */ || !stack_regs_mentioned_data) return 0; uid = INSN_UID (insn); @@ -1361,6 +1361,16 @@ subst_stack_regs_pat (rtx insn, stack regstack, rtx pat) since the REG_DEAD notes are not issued.) */ break; + case VAR_LOCATION: + for (dest = ®_NOTES (insn); *dest; dest = &XEXP (*dest, 1)) + if (REG_NOTE_KIND (*dest) == REG_DEAD + && STACK_REG_P (*(src = &XEXP (*dest, 0))) + && TEST_HARD_REG_BIT (regstack->reg_set, REGNO (*src))) + /* ??? This is not right. We want to *avoid* emitting the + pop and the corresponding push. */ + emit_pop_insn (insn, regstack, *src, EMIT_AFTER); + break; + case CLOBBER: { rtx note; diff --git a/gcc/regmove.c b/gcc/regmove.c index 4cb083cbd48..8bf319606fc 100644 --- a/gcc/regmove.c +++ b/gcc/regmove.c @@ -545,9 +545,12 @@ optimize_reg_copy_1 (rtx insn, rtx dest, rtx src) /* For SREGNO, count the total number of insns scanned. For DREGNO, count the total number of insns scanned after passing the death note for DREGNO. */ - s_length++; - if (dest_death) - d_length++; + if (!DEBUG_INSN_P (p)) + { + s_length++; + if (dest_death) + d_length++; + } /* If the insn in which SRC dies is a CALL_INSN, don't count it as a call that has been crossed. Otherwise, count it. */ @@ -976,7 +979,7 @@ fixup_match_2 (rtx insn, rtx dst, rtx src, rtx offset) if (find_regno_note (p, REG_DEAD, REGNO (dst))) dst_death = p; - if (! dst_death) + if (! dst_death && !DEBUG_INSN_P (p)) length++; pset = single_set (p); @@ -1421,7 +1424,8 @@ regmove_optimize (rtx f, int nregs) else if (! INSN_P (p)) continue; - length++; + if (!DEBUG_INSN_P (p)) + length++; /* ??? See if all of SRC is set in P. This test is much more conservative than it needs to be. */ @@ -1431,22 +1435,9 @@ regmove_optimize (rtx f, int nregs) /* We use validate_replace_rtx, in case there are multiple identical source operands. All of them have to be changed at the same time. */ + validate_change (p, &SET_DEST (pset), dst, 1); if (validate_replace_rtx (src, dst, insn)) - { - if (validate_change (p, &SET_DEST (pset), - dst, 0)) - success = 1; - else - { - /* Change all source operands back. - This modifies the dst as a side-effect. */ - validate_replace_rtx (dst, src, insn); - /* Now make sure the dst is right. */ - validate_change (insn, - recog_data.operand_loc[match_no], - dst, 0); - } - } + success = 1; break; } @@ -1455,9 +1446,21 @@ regmove_optimize (rtx f, int nregs) eliminate SRC. We can't make this change if DST is mentioned at all in P, since we are going to change its value. */ - if (reg_overlap_mentioned_p (src, PATTERN (p)) - || reg_mentioned_p (dst, PATTERN (p))) - break; + if (reg_overlap_mentioned_p (src, PATTERN (p))) + { + if (DEBUG_INSN_P (p)) + validate_replace_rtx_group (dst, src, insn); + else + break; + } + if (reg_mentioned_p (dst, PATTERN (p))) + { + if (DEBUG_INSN_P (p)) + validate_change (p, &INSN_VAR_LOCATION_LOC (p), + gen_rtx_UNKNOWN_VAR_LOC (VOIDmode), 1); + else + break; + } /* If we have passed a call instruction, and the pseudo-reg DST is not already live across a call, @@ -1516,6 +1519,8 @@ regmove_optimize (rtx f, int nregs) break; } + else if (num_changes_pending () > 0) + cancel_changes (0); } /* If we weren't able to replace any of the alternatives, try an @@ -1737,9 +1742,12 @@ fixup_match_1 (rtx insn, rtx set, rtx src, rtx src_subreg, rtx dst, else if (! INSN_P (p)) continue; - length++; - if (src_note) - s_length++; + if (!DEBUG_INSN_P (p)) + { + length++; + if (src_note) + s_length++; + } if (reg_set_p (src, p) || reg_set_p (dst, p) || (GET_CODE (PATTERN (p)) == USE @@ -1849,16 +1857,30 @@ fixup_match_1 (rtx insn, rtx set, rtx src, rtx src_subreg, rtx dst, } if (reg_overlap_mentioned_p (dst, PATTERN (p))) - break; + { + if (DEBUG_INSN_P (p)) + validate_replace_rtx_group (dst, src_subreg, p); + else + break; + } if (! src_note && reg_overlap_mentioned_p (src, PATTERN (p))) { - /* INSN was already checked to be movable wrt. the registers that it - sets / uses when we found no REG_DEAD note for src on it, but it - still might clobber the flags register. We'll have to check that - we won't insert it into the shadow of a live flags register when - we finally know where we are to move it. */ - overlap = p; - src_note = find_reg_note (p, REG_DEAD, src); + if (DEBUG_INSN_P (p)) + /* ??? Can we do better? */ + validate_change (p, &INSN_VAR_LOCATION_LOC (p), + gen_rtx_UNKNOWN_VAR_LOC (VOIDmode), 1); + else + { + /* INSN was already checked to be movable wrt. the + registers that it sets / uses when we found no + REG_DEAD note for src on it, but it still might + clobber the flags register. We'll have to check that + we won't insert it into the shadow of a live flags + register when we finally know where we are to move + it. */ + overlap = p; + src_note = find_reg_note (p, REG_DEAD, src); + } } /* If we have passed a call instruction, and the pseudo-reg SRC is not @@ -1877,7 +1899,11 @@ fixup_match_1 (rtx insn, rtx set, rtx src, rtx src_subreg, rtx dst, } if (! success) - return 0; + { + if (num_changes_pending () > 0) + cancel_changes (0); + return 0; + } /* Remove the death note for DST from P. */ remove_note (p, dst_note); diff --git a/gcc/regrename.c b/gcc/regrename.c index 74112c3ca53..df7fb63e456 100644 --- a/gcc/regrename.c +++ b/gcc/regrename.c @@ -1397,7 +1397,10 @@ replace_oldest_value_reg (rtx *loc, enum reg_class cl, rtx insn, fprintf (dump_file, "insn %u: replaced reg %u with %u\n", INSN_UID (insn), REGNO (*loc), REGNO (new)); - validate_change (insn, loc, new, 1); + if (DEBUG_INSN_P (insn)) + *loc = new; + else + validate_change (insn, loc, new, 1); return true; } return false; @@ -1421,6 +1424,9 @@ replace_oldest_value_addr (rtx *loc, enum reg_class cl, switch (code) { case PLUS: + if (DEBUG_INSN_P (insn)) + break; + { rtx orig_op0 = XEXP (x, 0); rtx orig_op1 = XEXP (x, 1); @@ -1555,9 +1561,14 @@ replace_oldest_value_addr (rtx *loc, enum reg_class cl, static bool replace_oldest_value_mem (rtx x, rtx insn, struct value_data *vd) { - return replace_oldest_value_addr (&XEXP (x, 0), - base_reg_class (GET_MODE (x), MEM, - SCRATCH), + enum reg_class cl; + + if (DEBUG_INSN_P (insn)) + cl = ALL_REGS; + else + cl = base_reg_class (GET_MODE (x), MEM, SCRATCH); + + return replace_oldest_value_addr (&XEXP (x, 0), cl, GET_MODE (x), insn, vd); } @@ -1566,7 +1577,7 @@ replace_oldest_value_mem (rtx x, rtx insn, struct value_data *vd) static bool copyprop_hardreg_forward_1 (basic_block bb, struct value_data *vd) { - bool changed = false; + bool anything_changed = false; rtx insn; for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn)) @@ -1575,8 +1586,22 @@ copyprop_hardreg_forward_1 (basic_block bb, struct value_data *vd) bool is_asm, any_replacements; rtx set; bool replaced[MAX_RECOG_OPERANDS]; + bool changed = false; + + if (DEBUG_INSN_P (insn)) + { + rtx loc = INSN_VAR_LOCATION_LOC (insn); + if (!VAR_LOC_UNKNOWN_P (loc) + && replace_oldest_value_addr (&INSN_VAR_LOCATION_LOC (insn), + ALL_REGS, GET_MODE (loc), + insn, vd)) + { + df_insn_rescan (insn); + anything_changed = true; + } + } - if (! INSN_P (insn)) + if (DEBUG_INSN_P (insn) || ! INSN_P (insn)) { if (insn == BB_END (bb)) break; @@ -1763,6 +1788,12 @@ copyprop_hardreg_forward_1 (basic_block bb, struct value_data *vd) } did_replacement: + if (changed) + { + df_insn_rescan (insn); + anything_changed = true; + } + /* Clobber call-clobbered registers. */ if (CALL_P (insn)) for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) @@ -1780,7 +1811,7 @@ copyprop_hardreg_forward_1 (basic_block bb, struct value_data *vd) break; } - return changed; + return anything_changed; } /* Main entry point for the forward copy propagation optimization. */ diff --git a/gcc/regstat.c b/gcc/regstat.c index 9f8e41f35af..7b7790f807f 100644 --- a/gcc/regstat.c +++ b/gcc/regstat.c @@ -62,11 +62,27 @@ regstat_init_n_sets_and_refs (void) regstat_n_sets_and_refs = xmalloc (max_regno * sizeof (struct regstat_n_sets_and_refs_t)); - for (i = 0; i < max_regno; i++) - { - SET_REG_N_SETS (i, DF_REG_DEF_COUNT (i)); - SET_REG_N_REFS (i, DF_REG_USE_COUNT (i) + REG_N_SETS (i)); - } + if (MAY_HAVE_DEBUG_INSNS) + for (i = 0; i < max_regno; i++) + { + int use_count; + struct df_ref *use; + + use_count = DF_REG_USE_COUNT (i); + for (use = DF_REG_USE_CHAIN (i); use; use = DF_REF_NEXT_REG (use)) + if (DF_REF_INSN (use) && DEBUG_INSN_P (DF_REF_INSN (use))) + use_count--; + + + SET_REG_N_SETS (i, DF_REG_DEF_COUNT (i)); + SET_REG_N_REFS (i, use_count + REG_N_SETS (i)); + } + else + for (i = 0; i < max_regno; i++) + { + SET_REG_N_SETS (i, DF_REG_DEF_COUNT (i)); + SET_REG_N_REFS (i, DF_REG_USE_COUNT (i) + REG_N_SETS (i)); + } timevar_pop (TV_REG_STATS); } @@ -150,7 +166,7 @@ regstat_bb_compute_ri (unsigned int bb_index, struct df_mw_hardreg **mws_rec; rtx link; - if (!INSN_P (insn)) + if (!INSN_P (insn) || DEBUG_INSN_P (insn)) continue; /* Increment the live_length for all of the registers that diff --git a/gcc/reload1.c b/gcc/reload1.c index 9021050c409..b3906b46757 100644 --- a/gcc/reload1.c +++ b/gcc/reload1.c @@ -791,7 +791,7 @@ reload (rtx first, int global) && GET_MODE (insn) != VOIDmode) PUT_MODE (insn, VOIDmode); - if (INSN_P (insn)) + if (INSN_P (insn) && !DEBUG_INSN_P (insn)) scan_paradoxical_subregs (PATTERN (insn)); if (set != 0 && REG_P (SET_DEST (set))) @@ -3071,7 +3071,8 @@ eliminate_regs_in_insn (rtx insn, int replace) || GET_CODE (PATTERN (insn)) == CLOBBER || GET_CODE (PATTERN (insn)) == ADDR_VEC || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC - || GET_CODE (PATTERN (insn)) == ASM_INPUT); + || GET_CODE (PATTERN (insn)) == ASM_INPUT + || DEBUG_INSN_P (insn)); return 0; } diff --git a/gcc/resource.c b/gcc/resource.c index 7cb4d2ae490..b578e23816c 100644 --- a/gcc/resource.c +++ b/gcc/resource.c @@ -996,6 +996,9 @@ mark_target_live_regs (rtx insns, rtx target, struct resources *res) rtx real_insn = insn; enum rtx_code code = GET_CODE (insn); + if (DEBUG_INSN_P (insn)) + continue; + /* If this insn is from the target of a branch, it isn't going to be used in the sequel. If it is used in both cases, this test will not be true. */ diff --git a/gcc/rtl.def b/gcc/rtl.def index 9dee20016e3..0799cecfc8f 100644 --- a/gcc/rtl.def +++ b/gcc/rtl.def @@ -111,6 +111,9 @@ DEF_RTL_EXPR(ADDRESS, "address", "e", RTX_MATCH) ---------------------------------------------------------------------- */ +/* An annotation for variable assignment tracking. */ +DEF_RTL_EXPR(DEBUG_INSN, "debug_insn", "iuuBieie", RTX_INSN) + /* An instruction that cannot jump. */ DEF_RTL_EXPR(INSN, "insn", "iuuBieie", RTX_INSN) diff --git a/gcc/rtl.h b/gcc/rtl.h index 579f8cdb2cb..c68662bbf3e 100644 --- a/gcc/rtl.h +++ b/gcc/rtl.h @@ -380,9 +380,15 @@ struct rtvec_def GTY(()) { /* Predicate yielding nonzero iff X is an insn that cannot jump. */ #define NONJUMP_INSN_P(X) (GET_CODE (X) == INSN) +/* Predicate yielding nonzero iff X is a debug note/insn. */ +#define DEBUG_INSN_P(X) (GET_CODE (X) == DEBUG_INSN) + +/* Nonzero if DEBUG_INSN_P may possibly hold. */ +#define MAY_HAVE_DEBUG_INSNS (flag_var_tracking_assignments) + /* Predicate yielding nonzero iff X is a real insn. */ #define INSN_P(X) \ - (NONJUMP_INSN_P (X) || JUMP_P (X) || CALL_P (X)) + (NONJUMP_INSN_P (X) || DEBUG_INSN_P (X) || JUMP_P (X) || CALL_P (X)) /* Predicate yielding nonzero iff X is a note insn. */ #define NOTE_P(X) (GET_CODE (X) == NOTE) @@ -753,12 +759,13 @@ extern void rtl_check_failed_flag (const char *, const_rtx, const char *, #define INSN_CODE(INSN) XINT (INSN, 6) #define RTX_FRAME_RELATED_P(RTX) \ - (RTL_FLAG_CHECK5("RTX_FRAME_RELATED_P", (RTX), INSN, CALL_INSN, \ - JUMP_INSN, BARRIER, SET)->frame_related) + (RTL_FLAG_CHECK6("RTX_FRAME_RELATED_P", (RTX), DEBUG_INSN, INSN, \ + CALL_INSN, JUMP_INSN, BARRIER, SET)->frame_related) /* 1 if RTX is an insn that has been deleted. */ #define INSN_DELETED_P(RTX) \ - (RTL_FLAG_CHECK6("INSN_DELETED_P", (RTX), INSN, CALL_INSN, JUMP_INSN, \ + (RTL_FLAG_CHECK7("INSN_DELETED_P", (RTX), DEBUG_INSN, INSN, \ + CALL_INSN, JUMP_INSN, \ CODE_LABEL, BARRIER, NOTE)->volatil) /* 1 if RTX is a call to a const or pure function. */ @@ -853,16 +860,40 @@ extern const char * const reg_note_name[]; && NOTE_KIND (INSN) == NOTE_INSN_BASIC_BLOCK) /* Variable declaration and the location of a variable. */ -#define NOTE_VAR_LOCATION_DECL(INSN) (XCTREE (XCEXP (INSN, 4, NOTE), \ - 0, VAR_LOCATION)) -#define NOTE_VAR_LOCATION_LOC(INSN) (XCEXP (XCEXP (INSN, 4, NOTE), \ - 1, VAR_LOCATION)) +#define PAT_VAR_LOCATION_DECL(PAT) (XCTREE ((PAT), 0, VAR_LOCATION)) +#define PAT_VAR_LOCATION_LOC(PAT) (XCEXP ((PAT), 1, VAR_LOCATION)) /* Initialization status of the variable in the location. Status can be unknown, uninitialized or initialized. See enumeration type below. */ -#define NOTE_VAR_LOCATION_STATUS(INSN) (XCINT (XCEXP (INSN, 4, NOTE), \ - 2, VAR_LOCATION)) +#define PAT_VAR_LOCATION_STATUS(PAT) (XCINT ((PAT), 2, VAR_LOCATION)) + +/* Accessors for a NOTE_INSN_VAR_LOCATION. */ +#define NOTE_VAR_LOCATION_DECL(NOTE) \ + PAT_VAR_LOCATION_DECL (NOTE_VAR_LOCATION (NOTE)) +#define NOTE_VAR_LOCATION_LOC(NOTE) \ + PAT_VAR_LOCATION_LOC (NOTE_VAR_LOCATION (NOTE)) +#define NOTE_VAR_LOCATION_STATUS(NOTE) \ + PAT_VAR_LOCATION_STATUS (NOTE_VAR_LOCATION (NOTE)) + +/* The VAR_LOCATION rtx in a DEBUG_INSN. */ +#define INSN_VAR_LOCATION(INSN) PATTERN (INSN) + +/* Accessors for a tree-expanded var location debug insn. */ +#define INSN_VAR_LOCATION_DECL(INSN) \ + PAT_VAR_LOCATION_DECL (INSN_VAR_LOCATION (INSN)) +#define INSN_VAR_LOCATION_LOC(INSN) \ + PAT_VAR_LOCATION_LOC (INSN_VAR_LOCATION (INSN)) +#define INSN_VAR_LOCATION_STATUS(INSN) \ + PAT_VAR_LOCATION_STATUS (INSN_VAR_LOCATION (INSN)) + +/* Expand to the RTL that denotes an unknown variable location in a + DEBUG_INSN. */ +#define gen_rtx_UNKNOWN_VAR_LOC(M) (gen_rtx_CLOBBER (M, const0_rtx)) + +/* Determine whether X is such an unknown location. */ +#define VAR_LOC_UNKNOWN_P(X) \ + (GET_CODE (X) == CLOBBER && XEXP ((X), 0) == const0_rtx) /* Possible initialization status of a variable. When requested by the user, this information is tracked and recorded in the DWARF @@ -1238,8 +1269,9 @@ do { \ /* During sched, 1 if RTX is an insn that must be scheduled together with the preceding insn. */ #define SCHED_GROUP_P(RTX) \ - (RTL_FLAG_CHECK3("SCHED_GROUP_P", (RTX), INSN, JUMP_INSN, CALL_INSN \ - )->in_struct) + (RTL_FLAG_CHECK4("SCHED_GROUP_P", (RTX), DEBUG_INSN, INSN, \ + JUMP_INSN, CALL_INSN \ + )->in_struct) /* For a SET rtx, SET_DEST is the place that is set and SET_SRC is the value it is set to. */ @@ -1568,6 +1600,9 @@ extern rtx emit_jump_insn_before_setloc (rtx, rtx, int); extern rtx emit_call_insn_before (rtx, rtx); extern rtx emit_call_insn_before_noloc (rtx, rtx); extern rtx emit_call_insn_before_setloc (rtx, rtx, int); +extern rtx emit_debug_insn_before (rtx, rtx); +extern rtx emit_debug_insn_before_noloc (rtx, rtx); +extern rtx emit_debug_insn_before_setloc (rtx, rtx, int); extern rtx emit_barrier_before (rtx); extern rtx emit_label_before (rtx, rtx); extern rtx emit_note_before (enum insn_note, rtx); @@ -1580,10 +1615,14 @@ extern rtx emit_jump_insn_after_setloc (rtx, rtx, int); extern rtx emit_call_insn_after (rtx, rtx); extern rtx emit_call_insn_after_noloc (rtx, rtx); extern rtx emit_call_insn_after_setloc (rtx, rtx, int); +extern rtx emit_debug_insn_after (rtx, rtx); +extern rtx emit_debug_insn_after_noloc (rtx, rtx); +extern rtx emit_debug_insn_after_setloc (rtx, rtx, int); extern rtx emit_barrier_after (rtx); extern rtx emit_label_after (rtx, rtx); extern rtx emit_note_after (enum insn_note, rtx); extern rtx emit_insn (rtx); +extern rtx emit_debug_insn (rtx); extern rtx emit_jump_insn (rtx); extern rtx emit_call_insn (rtx); extern rtx emit_label (rtx); @@ -1591,6 +1630,7 @@ extern rtx emit_barrier (void); extern rtx emit_note (enum insn_note); extern rtx emit_note_copy (rtx); extern rtx make_insn_raw (rtx); +extern rtx make_debug_insn_raw (rtx); extern rtx make_jump_insn_raw (rtx); extern void add_function_usage_to (rtx, rtx); extern rtx last_call_insn (void); diff --git a/gcc/rtlanal.c b/gcc/rtlanal.c index da1ceb4adbd..03155e09e21 100644 --- a/gcc/rtlanal.c +++ b/gcc/rtlanal.c @@ -4620,7 +4620,11 @@ canonicalize_condition (rtx insn, rtx cond, int reverse, rtx *earliest, stop if it isn't a single set or if it has a REG_INC note because we don't want to bother dealing with it. */ - if ((prev = prev_nonnote_insn (prev)) == 0 + do + prev = prev_nonnote_insn (prev); + while (prev && DEBUG_INSN_P (prev)); + + if (prev == 0 || !NONJUMP_INSN_P (prev) || FIND_REG_INC_NOTE (prev, NULL_RTX) /* In cfglayout mode, there do not have to be labels at the diff --git a/gcc/sched-deps.c b/gcc/sched-deps.c index cdd055b646f..21a0b8132df 100644 --- a/gcc/sched-deps.c +++ b/gcc/sched-deps.c @@ -1729,6 +1729,12 @@ sched_analyze_2 (struct deps *deps, rtx x, rtx insn) rtx pending, pending_mem; rtx t = x; + if (DEBUG_INSN_P (insn)) + { + sched_analyze_2 (deps, XEXP (x, 0), insn); + return; + } + if (current_sched_info->use_cselib) { t = shallow_copy_rtx (t); @@ -1930,6 +1936,8 @@ sched_analyze_insn (struct deps *deps, rtx x, rtx insn) { rtx next; next = next_nonnote_insn (insn); + while (next && DEBUG_INSN_P (next)) + next = next_nonnote_insn (next); if (next && BARRIER_P (next)) reg_pending_barrier = TRUE_BARRIER; else @@ -1985,6 +1993,21 @@ sched_analyze_insn (struct deps *deps, rtx x, rtx insn) } } + if (DEBUG_INSN_P (insn)) + { + rtx head = BB_HEAD (BLOCK_FOR_INSN (insn)), prev; + + if (insn != head) + { + for (prev = PREV_INSN (insn); prev != head; prev = PREV_INSN (prev)) + if (INSN_P (prev)) + { + add_dependence (insn, prev, REG_DEP_TRUE); + break; + } + } + } + /* If this instruction can throw an exception, then moving it changes where block boundaries fall. This is mighty confusing elsewhere. Therefore, prevent such an instruction from being moved. Same for @@ -2045,9 +2068,19 @@ sched_analyze_insn (struct deps *deps, rtx x, rtx insn) } else { + if (DEBUG_INSN_P (insn)) + { + EXECUTE_IF_SET_IN_REG_SET (reg_pending_uses, 0, i, rsi) + { + struct deps_reg *reg_last = &deps->reg_last[i]; + add_dependence_list (insn, reg_last->sets, 0, REG_DEP_TRUE); + add_dependence_list (insn, reg_last->clobbers, 0, REG_DEP_TRUE); + } + CLEAR_REG_SET (reg_pending_uses); + } /* If the current insn is conditional, we can't free any of the lists. */ - if (sched_get_condition (insn)) + else if (sched_get_condition (insn)) { EXECUTE_IF_SET_IN_REG_SET (reg_pending_uses, 0, i, rsi) { @@ -2248,7 +2281,7 @@ sched_analyze (struct deps *deps, rtx head, rtx tail) sd_init_insn (insn); } - if (NONJUMP_INSN_P (insn) || JUMP_P (insn)) + if (NONJUMP_INSN_P (insn) || DEBUG_INSN_P (insn) || JUMP_P (insn)) { /* Make each JUMP_INSN a scheduling barrier for memory references. */ diff --git a/gcc/sched-rgn.c b/gcc/sched-rgn.c index 0ca1a472fa3..87244cce10e 100644 --- a/gcc/sched-rgn.c +++ b/gcc/sched-rgn.c @@ -2247,6 +2247,9 @@ add_branch_dependences (rtx head, rtx tail) are not moved before reload because we can wind up with register allocation failures. */ + while (tail != head && DEBUG_INSN_P (tail)) + tail = PREV_INSN (tail); + insn = tail; last = 0; while (CALL_P (insn) @@ -2282,6 +2285,13 @@ add_branch_dependences (rtx head, rtx tail) break; insn = PREV_INSN (insn); + if (DEBUG_INSN_P (insn)) + { + if (last) + add_dependence (last, insn, REG_DEP_ANTI); + while (insn != head && DEBUG_INSN_P (insn)) + insn = PREV_INSN (insn); + } } /* Make sure these insns are scheduled last in their block. */ diff --git a/gcc/sched-vis.c b/gcc/sched-vis.c index 8f4597247e5..3a5894f7fa3 100644 --- a/gcc/sched-vis.c +++ b/gcc/sched-vis.c @@ -557,6 +557,10 @@ print_pattern (char *buf, const_rtx x, int verbose) print_value (t1, XEXP (x, 0), verbose); sprintf (buf, "use %s", t1); break; + case VAR_LOCATION: + print_value (t1, PAT_VAR_LOCATION_LOC (x), verbose); + sprintf (buf, "loc %s", t1); + break; case COND_EXEC: if (GET_CODE (COND_EXEC_TEST (x)) == NE && XEXP (COND_EXEC_TEST (x), 1) == const0_rtx) diff --git a/gcc/var-tracking.c b/gcc/var-tracking.c index 97986b66886..13a9dd20e86 100644 --- a/gcc/var-tracking.c +++ b/gcc/var-tracking.c @@ -112,6 +112,9 @@ enum micro_operation_type MO_USE, /* Use location (REG or MEM). */ MO_USE_NO_VAR,/* Use location which is not associated with a variable or the variable is not trackable. */ + MO_LOC_MAIN, /* Use location from the debug insn. */ + MO_LOC_USE, /* The location appears in a debug insn, but it's not + the location of the debug insn's decl. */ MO_SET, /* Set location. */ MO_COPY, /* Copy the same portion of a variable from one location to another. */ @@ -836,14 +839,12 @@ var_debug_decl (tree decl) return decl; } -/* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */ +/* Set the register LOC to contain DECL, OFFSET. */ static void -var_reg_set (dataflow_set *set, rtx loc, enum var_init_status initialized, - rtx set_src) +var_reg_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized, + tree decl, HOST_WIDE_INT offset, rtx set_src) { - tree decl = REG_EXPR (loc); - HOST_WIDE_INT offset = REG_OFFSET (loc); attrs node; decl = var_debug_decl (decl); @@ -856,6 +857,18 @@ var_reg_set (dataflow_set *set, rtx loc, enum var_init_status initialized, set_variable_part (set, loc, decl, offset, initialized, set_src); } +/* Set the register to contain REG_EXPR (LOC), REG_OFFSET (LOC). */ + +static void +var_reg_set (dataflow_set *set, rtx loc, enum var_init_status initialized, + rtx set_src) +{ + tree decl = REG_EXPR (loc); + HOST_WIDE_INT offset = REG_OFFSET (loc); + + var_reg_decl_set (set, loc, initialized, decl, offset, set_src); +} + static int get_init_value (dataflow_set *set, rtx loc, tree decl) { @@ -975,6 +988,17 @@ var_regno_delete (dataflow_set *set, int regno) *reg = NULL; } +/* Set the location of DECL, OFFSET as the MEM LOC. */ + +static void +var_mem_decl_set (dataflow_set *set, rtx loc, enum var_init_status initialized, + tree decl, HOST_WIDE_INT offset, rtx set_src) +{ + decl = var_debug_decl (decl); + + set_variable_part (set, loc, decl, offset, initialized, set_src); +} + /* Set the location part of variable MEM_EXPR (LOC) in dataflow set SET to LOC. Adjust the address first if it is stack pointer based. */ @@ -986,9 +1010,7 @@ var_mem_set (dataflow_set *set, rtx loc, enum var_init_status initialized, tree decl = MEM_EXPR (loc); HOST_WIDE_INT offset = MEM_OFFSET (loc) ? INTVAL (MEM_OFFSET (loc)) : 0; - decl = var_debug_decl (decl); - - set_variable_part (set, loc, decl, offset, initialized, set_src); + var_mem_decl_set (set, loc, initialized, decl, offset, set_src); } /* Delete and set the location part of variable MEM_EXPR (LOC) in @@ -1672,27 +1694,72 @@ same_variable_part_p (rtx loc, tree expr, HOST_WIDE_INT offset) return (expr == expr2 && offset == offset2); } +/* Determine what kind of micro operation to choose for a USE. Return + MO_CLOBBER if no micro operation is to be generated. */ -/* Count uses (register and memory references) LOC which will be tracked. - INSN is instruction which the LOC is part of. */ - -static int -count_uses (rtx *loc, void *insn) +static enum micro_operation_type +use_type (rtx *loc, rtx insn) { - basic_block bb = BLOCK_FOR_INSN ((rtx) insn); + tree expr; + + if (DEBUG_INSN_P (insn)) + { + if (!VAR_LOC_UNKNOWN_P (*loc) && !REG_P (*loc) && !MEM_P (*loc)) + return MO_CLOBBER; - if (REG_P (*loc)) + expr = INSN_VAR_LOCATION_DECL (insn); + + if (!track_expr_p (expr)) + return MO_CLOBBER; + + if (&INSN_VAR_LOCATION_LOC (insn) == loc) + return MO_LOC_MAIN; + else + return MO_LOC_USE; + } + else if (REG_P (*loc)) { gcc_assert (REGNO (*loc) < FIRST_PSEUDO_REGISTER); - VTI (bb)->n_mos++; + + expr = REG_EXPR (*loc); + + if (!expr) + return MO_USE_NO_VAR; + else if (var_debug_value_for_decl (expr)) + return MO_CLOBBER; + else if (track_expr_p (expr)) + return MO_USE; + else + return MO_USE_NO_VAR; } - else if (MEM_P (*loc) - && MEM_EXPR (*loc) - && track_expr_p (MEM_EXPR (*loc))) + else if (MEM_P (*loc)) { - VTI (bb)->n_mos++; + expr = MEM_EXPR (*loc); + + if (!expr) + return MO_CLOBBER; + else if (var_debug_value_for_decl (expr)) + return MO_CLOBBER; + else if (track_expr_p (expr)) + return MO_USE; + else + return MO_CLOBBER; } + return MO_CLOBBER; +} + +/* Count uses (register and memory references) LOC which will be tracked. + INSN is instruction which the LOC is part of. */ + +static int +count_uses (rtx *loc, void *data) +{ + rtx insn = (rtx) data; + + if (use_type (loc, insn) != MO_CLOBBER) + VTI (BLOCK_FOR_INSN (insn))->n_mos++; + return 0; } @@ -1717,28 +1784,19 @@ count_stores (rtx loc, const_rtx expr ATTRIBUTE_UNUSED, void *insn) to VTI (bb)->mos. INSN is instruction which the LOC is part of. */ static int -add_uses (rtx *loc, void *insn) +add_uses (rtx *loc, void *data) { - if (REG_P (*loc)) - { - basic_block bb = BLOCK_FOR_INSN ((rtx) insn); - micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++; + rtx insn = (rtx)data; + enum micro_operation_type type = use_type (loc, insn); - mo->type = ((REG_EXPR (*loc) && track_expr_p (REG_EXPR (*loc))) - ? MO_USE : MO_USE_NO_VAR); - mo->u.loc = *loc; - mo->insn = (rtx) insn; - } - else if (MEM_P (*loc) - && MEM_EXPR (*loc) - && track_expr_p (MEM_EXPR (*loc))) + if (type != MO_CLOBBER) { - basic_block bb = BLOCK_FOR_INSN ((rtx) insn); + basic_block bb = BLOCK_FOR_INSN (insn); micro_operation *mo = VTI (bb)->mos + VTI (bb)->n_mos++; - mo->type = MO_USE; + mo->type = type; mo->u.loc = *loc; - mo->insn = (rtx) insn; + mo->insn = insn; } return 0; @@ -1759,6 +1817,9 @@ add_uses_1 (rtx *x, void *insn) static void add_stores (rtx loc, const_rtx expr, void *insn) { + if (use_type (&loc, (rtx) insn) == MO_CLOBBER) + return; + if (REG_P (loc)) { basic_block bb = BLOCK_FOR_INSN ((rtx) insn); @@ -1944,6 +2005,32 @@ compute_bb_dataflow (basic_block bb) } break; + case MO_LOC_MAIN: + { + rtx loc = VTI (bb)->mos[i].u.loc; + rtx insn = VTI (bb)->mos[i].insn; + + if (VAR_LOC_UNKNOWN_P (loc)) + clobber_variable_part (out, NULL_RTX, + INSN_VAR_LOCATION_DECL (insn), 0, + NULL_RTX); + else if (REG_P (loc)) + var_reg_decl_set (out, loc, VAR_INIT_STATUS_INITIALIZED, + INSN_VAR_LOCATION_DECL (insn), 0, NULL_RTX); + else if (MEM_P (loc)) + var_mem_decl_set (out, loc, VAR_INIT_STATUS_INITIALIZED, + INSN_VAR_LOCATION_DECL (insn), 0, NULL_RTX); + } + break; + + case MO_LOC_USE: + { + /* ??? Note that this reg or mem is part of the value of + decl such that, when it's set, we know the variable + no longer holds its value. */ + } + break; + case MO_SET: { rtx loc = VTI (bb)->mos[i].u.loc; @@ -2867,8 +2954,8 @@ emit_notes_in_bb (basic_block bb) case MO_USE: { rtx loc = VTI (bb)->mos[i].u.loc; - enum var_init_status status = VAR_INIT_STATUS_UNINITIALIZED; + if (! flag_var_tracking_uninit) status = VAR_INIT_STATUS_INITIALIZED; if (GET_CODE (loc) == REG) @@ -2880,6 +2967,42 @@ emit_notes_in_bb (basic_block bb) } break; + case MO_LOC_MAIN: + { + rtx loc = VTI (bb)->mos[i].u.loc; + rtx insn = VTI (bb)->mos[i].insn, next; + + if (VAR_LOC_UNKNOWN_P (loc)) + clobber_variable_part (&set, NULL_RTX, + INSN_VAR_LOCATION_DECL (insn), 0, + NULL_RTX); + else if (REG_P (loc)) + var_reg_decl_set (&set, loc, VAR_INIT_STATUS_INITIALIZED, + INSN_VAR_LOCATION_DECL (insn), 0, NULL_RTX); + else if (MEM_P (loc)) + var_mem_decl_set (&set, loc, VAR_INIT_STATUS_INITIALIZED, + INSN_VAR_LOCATION_DECL (insn), 0, NULL_RTX); + + for (next = NEXT_INSN (insn); + next && BLOCK_FOR_INSN (insn) == BLOCK_FOR_INSN (next); + next = NEXT_INSN (next)) + if (DEBUG_INSN_P (next)) + insn = next; + else if (!NOTE_P (next)) + break; + + emit_notes_for_changes (insn, EMIT_NOTE_AFTER_INSN); + } + break; + + case MO_LOC_USE: + { + /* ??? Note that this reg or mem is part of the value of + decl such that, when it's set, we know the variable + no longer holds its value. */ + } + break; + case MO_SET: { rtx loc = VTI (bb)->mos[i].u.loc; @@ -3087,6 +3210,7 @@ vt_initialize (void) { rtx insn; HOST_WIDE_INT pre, post = 0; + int count; /* Count the number of micro operations. */ VTI (bb)->n_mos = 0; @@ -3110,6 +3234,8 @@ vt_initialize (void) } } + count = VTI (bb)->n_mos; + /* Add the micro-operations to the array. */ VTI (bb)->mos = XNEWVEC (micro_operation, VTI (bb)->n_mos); VTI (bb)->n_mos = 0; @@ -3137,12 +3263,13 @@ vt_initialize (void) note_uses (&PATTERN (insn), add_uses_1, insn); n2 = VTI (bb)->n_mos - 1; - /* Order the MO_USEs to be before MO_USE_NO_VARs. */ + /* Order the MO_USEs to be before MO_USE_NO_VARs, + MO_LOC_MAIN and MO_LOC_USE. */ while (n1 < n2) { while (n1 < n2 && VTI (bb)->mos[n1].type == MO_USE) n1++; - while (n1 < n2 && VTI (bb)->mos[n2].type == MO_USE_NO_VAR) + while (n1 < n2 && VTI (bb)->mos[n2].type != MO_USE) n2--; if (n1 < n2) { @@ -3197,6 +3324,7 @@ vt_initialize (void) } } } + gcc_assert (count == VTI (bb)->n_mos); } /* Init the IN and OUT sets. */ @@ -3219,6 +3347,38 @@ vt_initialize (void) vt_add_function_parameters (); } +/* Get rid of all debug insns from the insn stream. */ + +static void +delete_debug_insns (void) +{ + basic_block bb; + rtx insn, next; + + if (!MAY_HAVE_DEBUG_INSNS) + return; + + FOR_EACH_BB (bb) + { + FOR_BB_INSNS_SAFE (bb, insn, next) + if (DEBUG_INSN_P (insn)) + delete_insn (insn); + } +} + +/* Run a fast, BB-local only version of var tracking, to take care of + information that we don't do global analysis on, such that not all + information is lost. If SKIPPED holds, we're skipping the global + pass entirely, so we should try to use information it would have + handled as well.. */ + +static void +vt_debug_insns_local (bool skipped ATTRIBUTE_UNUSED) +{ + /* ??? Just skip it all for now. */ + delete_debug_insns (); +} + /* Free the data structures needed for variable tracking. */ static void @@ -3249,7 +3409,10 @@ unsigned int variable_tracking_main (void) { if (n_basic_blocks > 500 && n_edges / n_basic_blocks >= 20) - return 0; + { + vt_debug_insns_local (true); + return 0; + } mark_dfs_back_edges (); vt_initialize (); @@ -3258,6 +3421,7 @@ variable_tracking_main (void) if (!vt_stack_adjustments ()) { vt_finalize (); + vt_debug_insns_local (true); return 0; } } @@ -3272,6 +3436,7 @@ variable_tracking_main (void) } vt_finalize (); + vt_debug_insns_local (false); return 0; } -- cgit v1.2.3