aboutsummaryrefslogtreecommitdiff
path: root/gcc/fold-const.c
diff options
context:
space:
mode:
Diffstat (limited to 'gcc/fold-const.c')
-rw-r--r--gcc/fold-const.c40
1 files changed, 23 insertions, 17 deletions
diff --git a/gcc/fold-const.c b/gcc/fold-const.c
index 8e32c7222b5..f6d92abd250 100644
--- a/gcc/fold-const.c
+++ b/gcc/fold-const.c
@@ -1933,7 +1933,6 @@ fold_convertible_p (const_tree type, const_tree arg)
case REAL_TYPE:
case FIXED_POINT_TYPE:
- case COMPLEX_TYPE:
case VECTOR_TYPE:
case VOID_TYPE:
return TREE_CODE (type) == TREE_CODE (orig);
@@ -12746,11 +12745,15 @@ fold_binary_loc (location_t loc,
|| TREE_CODE (arg0) == BIT_IOR_EXPR
|| TREE_CODE (arg0) == BIT_XOR_EXPR)
&& TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
- return fold_build2_loc (loc, TREE_CODE (arg0), type,
- fold_build2_loc (loc, code, type,
- TREE_OPERAND (arg0, 0), arg1),
- fold_build2_loc (loc, code, type,
- TREE_OPERAND (arg0, 1), arg1));
+ {
+ tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
+ tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
+ return fold_build2_loc (loc, TREE_CODE (arg0), type,
+ fold_build2_loc (loc, code, type,
+ arg00, arg1),
+ fold_build2_loc (loc, code, type,
+ arg01, arg1));
+ }
/* Two consecutive rotates adding up to the precision of the
type can be ignored. */
@@ -12762,7 +12765,7 @@ fold_binary_loc (location_t loc,
&& ((TREE_INT_CST_LOW (arg1)
+ TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
== prec))
- return TREE_OPERAND (arg0, 0);
+ return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
/* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
(X & C2) >> C1 into (X >> C1) & (C2 >> C1)
@@ -16738,17 +16741,20 @@ fold_indirect_ref_1 (location_t loc, tree type, tree op0)
if (TREE_CODE (op00type) == VECTOR_TYPE
&& type == TREE_TYPE (op00type))
{
- HOST_WIDE_INT offset = tree_to_shwi (op01);
tree part_width = TYPE_SIZE (type);
- unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
- unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
- tree index = bitsize_int (indexi);
-
- if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
- return fold_build3_loc (loc,
- BIT_FIELD_REF, type, op00,
- part_width, index);
-
+ unsigned HOST_WIDE_INT max_offset
+ = (tree_to_uhwi (part_width) / BITS_PER_UNIT
+ * TYPE_VECTOR_SUBPARTS (op00type));
+ if (tree_int_cst_sign_bit (op01) == 0
+ && compare_tree_int (op01, max_offset) == -1)
+ {
+ unsigned HOST_WIDE_INT offset = tree_to_uhwi (op01);
+ unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
+ tree index = bitsize_int (indexi);
+ return fold_build3_loc (loc,
+ BIT_FIELD_REF, type, op00,
+ part_width, index);
+ }
}
/* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
else if (TREE_CODE (op00type) == COMPLEX_TYPE