aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorRichard Kenner <kenner@vlsi1.ultra.nyu.edu>1999-04-04 17:50:59 +0000
committerRichard Kenner <kenner@vlsi1.ultra.nyu.edu>1999-04-04 17:50:59 +0000
commitf4a7af050f2afacd94abff59fe9f9d6b632cecb3 (patch)
tree01b923a66d33e8305dd4250bfa749e6824b898a0
parente5a4cfde6746180034a33accfb03c317d4bdbe2a (diff)
(optimize_minmax_comparison): New function.
(invert_truthvalue, case WITH_RECORD_EXPR): New case. (fold): Use STRIP_SIGN_NOPS instead of STRIP_TYPE_NOPS. (fold, case EQ_EXPR): Call optimize_minmax_comparison and add cases with ABS_EXPR, NEGATE_EXPR, PLUS_EXPR, MINUS_EXPR, and widening conversions. Properly swap operands when one is constant. git-svn-id: https://gcc.gnu.org/svn/gcc/branches/premerge-fsf-branch@26173 138bc75d-0d04-0410-961f-82ee72b054a4
-rw-r--r--gcc/fold-const.c179
1 files changed, 176 insertions, 3 deletions
diff --git a/gcc/fold-const.c b/gcc/fold-const.c
index 7e9b91aabff..7513ec5fea8 100644
--- a/gcc/fold-const.c
+++ b/gcc/fold-const.c
@@ -91,6 +91,7 @@ static int merge_ranges PROTO((int *, tree *, tree *, int, tree, tree,
static tree fold_range_test PROTO((tree));
static tree unextend PROTO((tree, int, int, tree));
static tree fold_truthop PROTO((enum tree_code, tree, tree, tree));
+static tree optimize_minmax_comparison PROTO((tree));
static tree strip_compound_expr PROTO((tree, tree));
static int count_cond PROTO((tree, int));
static int multiple_of_p PROTO((tree, tree, tree));
@@ -2475,6 +2476,11 @@ invert_truthvalue (arg)
return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
invert_truthvalue (TREE_OPERAND (arg, 1)));
+ case WITH_RECORD_EXPR:
+ return build (WITH_RECORD_EXPR, type,
+ invert_truthvalue (TREE_OPERAND (arg, 0)),
+ TREE_OPERAND (arg, 1));
+
case NON_LVALUE_EXPR:
return invert_truthvalue (TREE_OPERAND (arg, 0));
@@ -3856,6 +3862,102 @@ fold_truthop (code, truth_type, lhs, rhs)
const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
}
+/* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
+ constant. */
+
+static tree
+optimize_minmax_comparison (t)
+ tree t;
+{
+ tree type = TREE_TYPE (t);
+ tree arg0 = TREE_OPERAND (t, 0);
+ enum tree_code op_code;
+ tree comp_const = TREE_OPERAND (t, 1);
+ tree minmax_const;
+ int consts_equal, consts_lt;
+ tree inner, tem;
+
+ STRIP_SIGN_NOPS (arg0);
+
+ op_code = TREE_CODE (arg0);
+ minmax_const = TREE_OPERAND (arg0, 1);
+ consts_equal = tree_int_cst_equal (minmax_const, comp_const);
+ consts_lt = tree_int_cst_lt (minmax_const, comp_const);
+ inner = TREE_OPERAND (arg0, 0);
+
+ /* If something does not permit us to optimize, return the original tree. */
+ if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
+ || TREE_CODE (comp_const) != INTEGER_CST
+ || TREE_CONSTANT_OVERFLOW (comp_const)
+ || TREE_CODE (minmax_const) != INTEGER_CST
+ || TREE_CONSTANT_OVERFLOW (minmax_const))
+ return t;
+
+ /* Now handle all the various comparison codes. We only handle EQ_EXPR
+ and GT_EXPR, doing the rest with recursive calls using logical
+ simplifications. */
+ switch (TREE_CODE (t))
+ {
+ case NE_EXPR: case LT_EXPR: case LE_EXPR:
+ return
+ invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
+
+ case GE_EXPR:
+ return
+ fold (build (TRUTH_ORIF_EXPR, type,
+ optimize_minmax_comparison
+ (build (EQ_EXPR, type, arg0, comp_const)),
+ optimize_minmax_comparison
+ (build (GT_EXPR, type, arg0, comp_const))));
+
+ case EQ_EXPR:
+ if (op_code == MAX_EXPR && consts_equal)
+ /* MAX (X, 0) == 0 -> X <= 0 */
+ return fold (build (LE_EXPR, type, inner, comp_const));
+
+ else if (op_code == MAX_EXPR && consts_lt)
+ /* MAX (X, 0) == 5 -> X == 5 */
+ return fold (build (EQ_EXPR, type, inner, comp_const));
+
+ else if (op_code == MAX_EXPR)
+ /* MAX (X, 0) == -1 -> false */
+ return omit_one_operand (type, integer_zero_node, inner);
+
+ else if (consts_equal)
+ /* MIN (X, 0) == 0 -> X >= 0 */
+ return fold (build (GE_EXPR, type, inner, comp_const));
+
+ else if (consts_lt)
+ /* MIN (X, 0) == 5 -> false */
+ return omit_one_operand (type, integer_zero_node, inner);
+
+ else
+ /* MIN (X, 0) == -1 -> X == -1 */
+ return fold (build (EQ_EXPR, type, inner, comp_const));
+
+ case GT_EXPR:
+ if (op_code == MAX_EXPR && (consts_equal || consts_lt))
+ /* MAX (X, 0) > 0 -> X > 0
+ MAX (X, 0) > 5 -> X > 5 */
+ return fold (build (GT_EXPR, type, inner, comp_const));
+
+ else if (op_code == MAX_EXPR)
+ /* MAX (X, 0) > -1 -> true */
+ return omit_one_operand (type, integer_one_node, inner);
+
+ else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
+ /* MIN (X, 0) > 0 -> false
+ MIN (X, 0) > 5 -> false */
+ return omit_one_operand (type, integer_zero_node, inner);
+
+ else
+ /* MIN (X, 0) > -1 -> X < -1 */
+ return fold (build (LT_EXPR, type, inner, comp_const));
+ }
+
+ return t;
+}
+
/* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
S, a SAVE_EXPR, return the expression actually being evaluated. Note
that we may sometimes modify the tree. */
@@ -3963,7 +4065,7 @@ fold (expr)
/* Don't use STRIP_NOPS, because signedness of argument type matters. */
if (arg0 != 0)
- STRIP_TYPE_NOPS (arg0);
+ STRIP_SIGN_NOPS (arg0);
if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
subop = TREE_REALPART (arg0);
@@ -3997,7 +4099,7 @@ fold (expr)
{
/* Signedness matters here. Perhaps we can refine this
later. */
- STRIP_TYPE_NOPS (op);
+ STRIP_SIGN_NOPS (op);
}
else
{
@@ -5474,6 +5576,76 @@ fold (expr)
}
}
+ /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
+ a MINUS_EXPR of a constant, we can convert it into a comparison with
+ a revised constant as long as no overflow occurs. */
+ if ((code == EQ_EXPR || code == NE_EXPR)
+ && TREE_CODE (arg1) == INTEGER_CST
+ && (TREE_CODE (arg0) == PLUS_EXPR
+ || TREE_CODE (arg0) == MINUS_EXPR)
+ && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
+ && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
+ ? MINUS_EXPR : PLUS_EXPR,
+ arg1, TREE_OPERAND (arg0, 1), 0))
+ && ! TREE_CONSTANT_OVERFLOW (tem))
+ return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
+
+ /* Similarly for a NEGATE_EXPR. */
+ else if ((code == EQ_EXPR || code == NE_EXPR)
+ && TREE_CODE (arg0) == NEGATE_EXPR
+ && TREE_CODE (arg1) == INTEGER_CST
+ && 0 != (tem = fold (build1 (NEGATE_EXPR, TREE_TYPE (arg1),
+ arg1)))
+ && TREE_CODE (tem) == INTEGER_CST
+ && ! TREE_CONSTANT_OVERFLOW (tem))
+ return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
+
+ /* If we have X - Y == 0, we can convert that to X == Y and similarly
+ for !=. Don't do this for ordered comparisons due to overflow. */
+ else if ((code == NE_EXPR || code == EQ_EXPR)
+ && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
+ return fold (build (code, type,
+ TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
+
+ /* If we are widening one operand of an integer comparison,
+ see if the other operand is similarly being widened. Perhaps we
+ can do the comparison in the narrower type. */
+ else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
+ && TREE_CODE (arg0) == NOP_EXPR
+ && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
+ && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
+ && (TREE_TYPE (t1) == TREE_TYPE (tem)
+ || (TREE_CODE (t1) == INTEGER_CST
+ && int_fits_type_p (t1, TREE_TYPE (tem)))))
+ return fold (build (code, type, tem, convert (TREE_TYPE (tem), t1)));
+
+ /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
+ constant, we can simplify it. */
+ else if (TREE_CODE (arg1) == INTEGER_CST
+ && (TREE_CODE (arg0) == MIN_EXPR
+ || TREE_CODE (arg0) == MAX_EXPR)
+ && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
+ return optimize_minmax_comparison (t);
+
+ /* If we are comparing an ABS_EXPR with a constant, we can
+ convert all the cases into explicit comparisons, but they may
+ well not be faster than doing the ABS and one comparison.
+ But ABS (X) <= C is a range comparison, which becomes a subtraction
+ and a comparison, and is probably faster. */
+ else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
+ && TREE_CODE (arg0) == ABS_EXPR
+ && ! TREE_SIDE_EFFECTS (arg0))
+ {
+ tree inner = TREE_OPERAND (arg0, 0);
+
+ tem = fold (build1 (NEGATE_EXPR, TREE_TYPE (arg1), arg1));
+ if (TREE_CODE (tem) == INTEGER_CST
+ && ! TREE_CONSTANT_OVERFLOW (tem))
+ return fold (build (TRUTH_ANDIF_EXPR, type,
+ build (GE_EXPR, type, inner, tem),
+ build (LE_EXPR, type, inner, arg1)));
+ }
+
/* If this is an EQ or NE comparison with zero and ARG0 is
(1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
two operations, but the latter can be done in one less insn
@@ -5825,6 +5997,7 @@ fold (expr)
/* Note that it is safe to invert for real values here because we
will check below in the one case that it matters. */
+ t1 = NULL_TREE;
invert = 0;
if (code == NE_EXPR || code == GE_EXPR)
{
@@ -5925,7 +6098,7 @@ fold (expr)
t = build (code, type, tem,
TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
arg0 = tem;
- arg1 = TREE_OPERAND (t, 2);
+ arg1 = TREE_OPERAND (t, 1);
STRIP_NOPS (arg1);
}
}