aboutsummaryrefslogtreecommitdiff
path: root/gcc/loop.c
diff options
context:
space:
mode:
Diffstat (limited to 'gcc/loop.c')
-rw-r--r--gcc/loop.c71
1 files changed, 58 insertions, 13 deletions
diff --git a/gcc/loop.c b/gcc/loop.c
index 192461a934c..7acd727042e 100644
--- a/gcc/loop.c
+++ b/gcc/loop.c
@@ -2557,14 +2557,21 @@ verify_dominator (loop_number)
&& GET_CODE (PATTERN (insn)) != RETURN)
{
rtx label = JUMP_LABEL (insn);
- int label_luid = INSN_LUID (label);
-
- if (! condjump_p (insn)
- && ! condjump_in_parallel_p (insn))
+ int label_luid;
+
+ /* If it is not a jump we can easily understand or for
+ which we do not have jump target information in the JUMP_LABEL
+ field (consider ADDR_VEC and ADDR_DIFF_VEC insns), then clear
+ LOOP_NUMBER_CONT_DOMINATOR. */
+ if ((! condjump_p (insn)
+ && ! condjump_in_parallel_p (insn))
+ || label == NULL_RTX)
{
loop_number_cont_dominator[loop_number] = NULL_RTX;
return;
}
+
+ label_luid = INSN_LUID (label);
if (label_luid < INSN_LUID (loop_number_loop_cont[loop_number])
&& (label_luid
> INSN_LUID (loop_number_cont_dominator[loop_number])))
@@ -3669,6 +3676,9 @@ strength_reduce (scan_start, end, loop_top, insn_count,
/* This is 1 if current insn may be executed more than once for every
loop iteration. */
int maybe_multiple = 0;
+ /* This is 1 if we have past a branch back to the top of the loop
+ (aka a loop latch). */
+ int past_loop_latch = 0;
/* Temporary list pointers for traversing loop_iv_list. */
struct iv_class *bl, **backbl;
/* Ratio of extra register life span we can justify
@@ -3836,16 +3846,30 @@ strength_reduce (scan_start, end, loop_top, insn_count,
loop_depth--;
}
+ /* Note if we pass a loop latch. If we do, then we can not clear
+ NOT_EVERY_ITERATION below when we pass the last CODE_LABEL in
+ a loop since a jump before the last CODE_LABEL may have started
+ a new loop iteration.
+
+ Note that LOOP_TOP is only set for rotated loops and we need
+ this check for all loops, so compare against the CODE_LABEL
+ which immediately follows LOOP_START. */
+ if (GET_CODE (p) == JUMP_INSN && JUMP_LABEL (p) == NEXT_INSN (loop_start))
+ past_loop_latch = 1;
+
/* Unlike in the code motion pass where MAYBE_NEVER indicates that
an insn may never be executed, NOT_EVERY_ITERATION indicates whether
or not an insn is known to be executed each iteration of the
loop, whether or not any iterations are known to occur.
Therefore, if we have just passed a label and have no more labels
- between here and the test insn of the loop, we know these insns
- will be executed each iteration. */
+ between here and the test insn of the loop, and we have not passed
+ a jump to the top of the loop, then we know these insns will be
+ executed each iteration. */
- if (not_every_iteration && GET_CODE (p) == CODE_LABEL
+ if (not_every_iteration
+ && ! past_loop_latch
+ && GET_CODE (p) == CODE_LABEL
&& no_labels_between_p (p, loop_end)
&& loop_insn_first_p (p, loop_cont))
not_every_iteration = 0;
@@ -4117,8 +4141,15 @@ strength_reduce (scan_start, end, loop_top, insn_count,
n_extra_increment += bl->biv_count - 1;
/* If the loop contains volatile memory references do not allow any
- replacements to take place, since this could loose the volatile markers. */
- if (n_extra_increment && ! loop_has_volatile)
+ replacements to take place, since this could loose the volatile
+ markers.
+
+ Disabled for the gcc-2.95 release. There are still some problems with
+ giv recombination. We have a patch from Joern which should fix those
+ problems. But the patch is fairly complex and not really suitable for
+ the gcc-2.95 branch at this stage. */
+ if (0 && n_extra_increment && ! loop_has_volatile)
+
{
int nregs = first_increment_giv + n_extra_increment;
@@ -4176,6 +4207,8 @@ strength_reduce (scan_start, end, loop_top, insn_count,
|| ! next->always_executed
|| next->maybe_multiple
|| ! CONSTANT_P (next->add_val)
+ || v->mult_val != const1_rtx
+ || next->mult_val != const1_rtx
|| ! (biv_dead_after_loop
|| no_jumps_between_p (v->insn, next->insn)))
{
@@ -4198,6 +4231,7 @@ strength_reduce (scan_start, end, loop_top, insn_count,
VARRAY_GROW (set_in_loop, nregs);
VARRAY_GROW (n_times_set, nregs);
VARRAY_GROW (may_not_optimize, nregs);
+ VARRAY_GROW (reg_single_usage, nregs);
}
if (! validate_change (next->insn, next->location, add_val, 0))
@@ -4720,7 +4754,13 @@ strength_reduce (scan_start, end, loop_top, insn_count,
VARRAY_GROW (reg_iv_type, nregs);
VARRAY_GROW (reg_iv_info, nregs);
}
+#if 0
+ /* Disabled for the gcc-2.95 release. There are still some problems with
+ giv recombination. We have a patch from Joern which should fix those
+ problems. But the patch is fairly complex and not really suitable for
+ the gcc-2.95 branch at this stage. */
recombine_givs (bl, loop_start, loop_end, unroll_p);
+#endif
/* Reduce each giv that we decided to reduce. */
@@ -7757,7 +7797,7 @@ check_dbra_loop (loop_end, insn_count, loop_start, loop_info)
reversible_mem_store
= (! unknown_address_altered
- && ! invariant_p (XEXP (loop_store_mems, 0)));
+ && ! invariant_p (XEXP (XEXP (loop_store_mems, 0), 0)));
/* If the store depends on a register that is set after the
store, it depends on the initial value, and is thus not
@@ -8179,11 +8219,16 @@ loop_insn_first_p (insn, reference)
if (p == reference || ! q)
return 1;
+ /* Either of P or Q might be a NOTE. Notes have the same LUID as the
+ previous insn, hence the <= comparison below does not work if
+ P is a note. */
if (INSN_UID (p) < max_uid_for_loop
- && INSN_UID (q) < max_uid_for_loop)
- return INSN_LUID (p) < INSN_LUID (q);
+ && INSN_UID (q) < max_uid_for_loop
+ && GET_CODE (p) != NOTE)
+ return INSN_LUID (p) <= INSN_LUID (q);
- if (INSN_UID (p) >= max_uid_for_loop)
+ if (INSN_UID (p) >= max_uid_for_loop
+ || GET_CODE (p) == NOTE)
p = NEXT_INSN (p);
if (INSN_UID (q) >= max_uid_for_loop)
q = NEXT_INSN (q);