aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorJeff Law <law@redhat.com>2015-05-07 13:18:07 +0000
committerJeff Law <law@redhat.com>2015-05-07 13:18:07 +0000
commitc7966d87b402e812e7910d9b37fe2664767c1171 (patch)
tree5cc975e1ee19aa16d83a69b02b66bd72b50748e7
parentd652551d5a7d8240a282e8f41b7963a430384ee1 (diff)
* match.pd (bit_and (plus/minus (convert @0) (convert @1) mask): New
simplifier to narrow arithmetic. * generic-match-head.c: (types_match, single_use): New functions. * gimple-match-head.c: (types_match, single_use): New functions. * gcc.dg/tree-ssa/shorten-1.c: New test. git-svn-id: https://gcc.gnu.org/svn/gcc/trunk@222877 138bc75d-0d04-0410-961f-82ee72b054a4
-rw-r--r--gcc/ChangeLog7
-rw-r--r--gcc/generic-match-head.c16
-rw-r--r--gcc/gimple-match-head.c18
-rw-r--r--gcc/match.pd54
-rw-r--r--gcc/testsuite/ChangeLog4
-rw-r--r--gcc/testsuite/gcc.dg/tree-ssa/shorten-1.c79
6 files changed, 165 insertions, 13 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index 50c430a75aa..78ba4b2eac3 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,3 +1,10 @@
+2015-05-07 Jeff Law <law@redhat.com>
+
+ * match.pd (bit_and (plus/minus (convert @0) (convert @1) mask): New
+ simplifier to narrow arithmetic.
+ * generic-match-head.c: (types_match, single_use): New functions.
+ * gimple-match-head.c: (types_match, single_use): New functions.
+
2015-05-07 Venkataramanan Kumar <venkataramanan.kumar@amd.com>
* combine.c (make_compound_operation): Remove checks for PLUS/MINUS
diff --git a/gcc/generic-match-head.c b/gcc/generic-match-head.c
index daa56aa370e..efaa2d8b7ce 100644
--- a/gcc/generic-match-head.c
+++ b/gcc/generic-match-head.c
@@ -70,4 +70,20 @@ along with GCC; see the file COPYING3. If not see
#include "dumpfile.h"
#include "generic-match.h"
+/* Routine to determine if the types T1 and T2 are effectively
+ the same for GENERIC. */
+static inline bool
+types_match (tree t1, tree t2)
+{
+ return TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2);
+}
+
+/* Return if T has a single use. For GENERIC, we assume this is
+ always true. */
+
+static inline bool
+single_use (tree t ATTRIBUTE_UNUSED)
+{
+ return true;
+}
diff --git a/gcc/gimple-match-head.c b/gcc/gimple-match-head.c
index c7b2f957bac..7b9348b53ec 100644
--- a/gcc/gimple-match-head.c
+++ b/gcc/gimple-match-head.c
@@ -861,3 +861,21 @@ do_valueize (tree (*valueize)(tree), tree op)
return op;
}
+/* Routine to determine if the types T1 and T2 are effectively
+ the same for GIMPLE. */
+
+static inline bool
+types_match (tree t1, tree t2)
+{
+ return types_compatible_p (t1, t2);
+}
+
+/* Return if T has a single use. For GIMPLE, we also allow any
+ non-SSA_NAME (ie constants) and zero uses to cope with uses
+ that aren't linked up yet. */
+
+static inline bool
+single_use (tree t)
+{
+ return TREE_CODE (t) != SSA_NAME || has_zero_uses (t) || has_single_use (t);
+}
diff --git a/gcc/match.pd b/gcc/match.pd
index 87ecaf10140..51a950acaa9 100644
--- a/gcc/match.pd
+++ b/gcc/match.pd
@@ -289,8 +289,7 @@ along with GCC; see the file COPYING3. If not see
(if (((TREE_CODE (@1) == INTEGER_CST
&& INTEGRAL_TYPE_P (TREE_TYPE (@0))
&& int_fits_type_p (@1, TREE_TYPE (@0)))
- || (GIMPLE && types_compatible_p (TREE_TYPE (@0), TREE_TYPE (@1)))
- || (GENERIC && TREE_TYPE (@0) == TREE_TYPE (@1)))
+ || types_match (TREE_TYPE (@0), TREE_TYPE (@1)))
/* ??? This transform conflicts with fold-const.c doing
Convert (T)(x & c) into (T)x & (T)c, if c is an integer
constants (if x has signed type, the sign bit cannot be set
@@ -949,8 +948,7 @@ along with GCC; see the file COPYING3. If not see
/* Unordered tests if either argument is a NaN. */
(simplify
(bit_ior (unordered @0 @0) (unordered @1 @1))
- (if ((GIMPLE && types_compatible_p (TREE_TYPE (@0), TREE_TYPE (@1)))
- || (GENERIC && TREE_TYPE (@0) == TREE_TYPE (@1)))
+ (if (types_match (TREE_TYPE (@0), TREE_TYPE (@1)))
(unordered @0 @1)))
(simplify
(bit_ior:c (unordered @0 @0) (unordered:c@2 @0 @1))
@@ -1054,7 +1052,7 @@ along with GCC; see the file COPYING3. If not see
operation and convert the result to the desired type. */
(for op (plus minus)
(simplify
- (convert (op (convert@2 @0) (convert@3 @1)))
+ (convert (op@4 (convert@2 @0) (convert@3 @1)))
(if (INTEGRAL_TYPE_P (type)
/* We check for type compatibility between @0 and @1 below,
so there's no need to check that @1/@3 are integral types. */
@@ -1070,15 +1068,45 @@ along with GCC; see the file COPYING3. If not see
&& TYPE_PRECISION (type) == GET_MODE_PRECISION (TYPE_MODE (type))
/* The inner conversion must be a widening conversion. */
&& TYPE_PRECISION (TREE_TYPE (@2)) > TYPE_PRECISION (TREE_TYPE (@0))
- && ((GENERIC
- && (TYPE_MAIN_VARIANT (TREE_TYPE (@0))
- == TYPE_MAIN_VARIANT (TREE_TYPE (@1)))
- && (TYPE_MAIN_VARIANT (TREE_TYPE (@0))
- == TYPE_MAIN_VARIANT (type)))
- || (GIMPLE
- && types_compatible_p (TREE_TYPE (@0), TREE_TYPE (@1))
- && types_compatible_p (TREE_TYPE (@0), type))))
+ && types_match (TREE_TYPE (@0), TREE_TYPE (@1))
+ && types_match (TREE_TYPE (@0), type)
+ && single_use (@4))
(if (TYPE_OVERFLOW_WRAPS (TREE_TYPE (@0)))
(convert (op @0 @1)))
(with { tree utype = unsigned_type_for (TREE_TYPE (@0)); }
(convert (op (convert:utype @0) (convert:utype @1)))))))
+
+/* This is another case of narrowing, specifically when there's an outer
+ BIT_AND_EXPR which masks off bits outside the type of the innermost
+ operands. Like the previous case we have to convert the operands
+ to unsigned types to avoid introducing undefined behaviour for the
+ arithmetic operation. */
+(for op (minus plus)
+ (simplify
+ (bit_and (op@5 (convert@2 @0) (convert@3 @1)) INTEGER_CST@4)
+ (if (INTEGRAL_TYPE_P (type)
+ /* We check for type compatibility between @0 and @1 below,
+ so there's no need to check that @1/@3 are integral types. */
+ && INTEGRAL_TYPE_P (TREE_TYPE (@0))
+ && INTEGRAL_TYPE_P (TREE_TYPE (@2))
+ /* The precision of the type of each operand must match the
+ precision of the mode of each operand, similarly for the
+ result. */
+ && (TYPE_PRECISION (TREE_TYPE (@0))
+ == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (@0))))
+ && (TYPE_PRECISION (TREE_TYPE (@1))
+ == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (@1))))
+ && TYPE_PRECISION (type) == GET_MODE_PRECISION (TYPE_MODE (type))
+ /* The inner conversion must be a widening conversion. */
+ && TYPE_PRECISION (TREE_TYPE (@2)) > TYPE_PRECISION (TREE_TYPE (@0))
+ && types_match (TREE_TYPE (@0), TREE_TYPE (@1))
+ && (tree_int_cst_min_precision (@4, UNSIGNED)
+ <= TYPE_PRECISION (TREE_TYPE (@0)))
+ && single_use (@5))
+ (if (TYPE_OVERFLOW_WRAPS (TREE_TYPE (@0)))
+ (with { tree ntype = TREE_TYPE (@0); }
+ (convert (bit_and (op @0 @1) (convert:ntype @4)))))
+ (with { tree utype = unsigned_type_for (TREE_TYPE (@0)); }
+ (convert (bit_and (op (convert:utype @0) (convert:utype @1))
+ (convert:utype @4)))))))
+
diff --git a/gcc/testsuite/ChangeLog b/gcc/testsuite/ChangeLog
index 69cb4ffd56c..1bf57d1dca6 100644
--- a/gcc/testsuite/ChangeLog
+++ b/gcc/testsuite/ChangeLog
@@ -1,3 +1,7 @@
+2015-05-07 Jeff Law <law@redhat.com>
+
+ * gcc.dg/tree-ssa/shorten-1.c: New test.
+
2015-05-07 Richard Biener <rguenther@suse.de>
PR tree-optimization/66002
diff --git a/gcc/testsuite/gcc.dg/tree-ssa/shorten-1.c b/gcc/testsuite/gcc.dg/tree-ssa/shorten-1.c
new file mode 100644
index 00000000000..c9015c58aa6
--- /dev/null
+++ b/gcc/testsuite/gcc.dg/tree-ssa/shorten-1.c
@@ -0,0 +1,79 @@
+/* { dg-do compile } */
+/* { dg-options "-O2 -fdump-tree-optimized" } */
+
+extern const unsigned char mode_ibit[];
+extern const unsigned char mode_fbit[];
+extern const signed char smode_ibit[];
+extern const signed char smode_fbit[];
+
+/* We use bit-and rather than modulo to ensure we're actually
+ testing the desired match.pd pattern. */
+unsigned char
+muufubar (int indx)
+{
+ int ret = (mode_fbit [indx] - mode_ibit [indx]) & 3;
+ return ret;
+}
+
+signed char
+msufubar (int indx)
+{
+ int ret = (mode_fbit [indx] - mode_ibit [indx]) & 3;
+ return ret;
+}
+
+unsigned char
+musfubar (int indx)
+{
+ int ret = (smode_fbit [indx] - smode_ibit [indx]) & 3;
+ return ret;
+}
+
+signed char
+mssfubar (int indx)
+{
+ int ret = (smode_fbit [indx] - smode_ibit [indx]) & 3;
+ return ret;
+}
+
+
+unsigned char
+puufubar (int indx)
+{
+ int ret = (mode_fbit [indx] + mode_ibit [indx]) & 3;
+ return ret;
+}
+
+signed char
+psufubar (int indx)
+{
+ int ret = (mode_fbit [indx] + mode_ibit [indx]) & 3;
+ return ret;
+}
+
+unsigned char
+pusfubar (int indx)
+{
+ int ret = (smode_fbit [indx] + smode_ibit [indx]) & 3;
+ return ret;
+}
+
+signed char
+pssfubar (int indx)
+{
+ int ret = (smode_fbit [indx] + smode_ibit [indx]) & 3;
+ return ret;
+}
+
+/* The shortening patterns in match.pd should arrange to do the
+ arithmetic in char modes and thus any casts to ints should
+ have been removed. */
+/* { dg-final {scan-tree-dump-not "\\(int\\)" "optimized"} } */
+
+/* We should have casted 4 operands from signed to unsigned char types. */
+/* { dg-final {scan-tree-dump-times "\\(unsigned char\\)" 8 "optimized" } } */
+
+/* And two return values should have been casted from unsigned char to
+ a normal char. */
+/* { dg-final {scan-tree-dump-times "\\(signed char\\)" 4 "optimized" } } */
+/* { dg-final { cleanup-tree-dump "optimized" } } */