aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--gcc/ChangeLog36
-rw-r--r--gcc/config/rs6000/rs6000-protos.h6
-rw-r--r--gcc/config/rs6000/rs6000.c407
-rw-r--r--gcc/config/rs6000/rs6000.h12
-rw-r--r--gcc/config/rs6000/rs6000.md287
5 files changed, 576 insertions, 172 deletions
diff --git a/gcc/ChangeLog b/gcc/ChangeLog
index 17a3bc49bdb..540409a74d4 100644
--- a/gcc/ChangeLog
+++ b/gcc/ChangeLog
@@ -1,5 +1,41 @@
2002-07-24 Alan Modra <amodra@bigpond.net.au>
+ * config/rs6000/rs6000-protos.h (mask_operand_wrap): Declare.
+ (mask64_2_operand): Declare.
+ (build_mask64_2_operands): Declare.
+ (and64_2_operand): Declare.
+ (extract_MB): Declare.
+ (extract_ME): Declare.
+ * config/rs6000/rs6000.c (mask64_operand): Allow all ones. Remove
+ CONST_DOUBLE code.
+ (mask_operand_wrap): New insn predicate.
+ (mask64_2_operand): Likewise.
+ (and64_2_operand): Likewise.
+ (build_mask64_2_operands): New function.
+ (extract_MB): New function.
+ (extract_ME): New function.
+ (print_operand <case m,M>): Use extract_MB and extract_ME.
+ (print_operand <case S>): Allow all ones. Remove CONST_DOUBLE support.
+ * config/rs6000/rs6000.h (EXTRA_CONSTRAINT): Add 't'.
+ (PREDICATE_CODES): Add and64_2_operand, mask_operand_wrap and
+ mask64_2_operand. Remove CONST_DOUBLE from mask64_operand.
+ * config/rs6000/rs6000.md (andsi3_internal3): New
+ (andsi3_internal3+1): Enable split for powerpc64.
+ (andsi3_internal3+2): New split.
+ (andsi3_internal4): Renamed old andsi3_internal3.
+ (andsi3_internal5): New.
+ (andsi3_internal5+1): Enable split for powerpc64.
+ (andsi3_internal5+2): New split.
+ (andsi3_internal6, andsi3_internal7, andsi3_internal8): New.
+ (anddi3): Handle 't' constraint.
+ (anddi3+1): New split.
+ (anddi3_internal2): Handle 't' constraint.
+ (anddi3_internal2+1): New split.
+ (anddi3_internal3): Handle 't' constraint.
+ (anddi3_internal3+1): New split.
+
+2002-07-24 Alan Modra <amodra@bigpond.net.au>
+
* config/rs6000/rs6000.md: Remove scratch reg on insns using
addze and similar (plus (comparison r1 r2) r3) insns. Add
missing scratch reg in one case. Formatting fixes.
diff --git a/gcc/config/rs6000/rs6000-protos.h b/gcc/config/rs6000/rs6000-protos.h
index 50a75c72e2c..652cfb9f40a 100644
--- a/gcc/config/rs6000/rs6000-protos.h
+++ b/gcc/config/rs6000/rs6000-protos.h
@@ -64,8 +64,12 @@ extern int non_add_cint_operand PARAMS ((rtx, enum machine_mode));
extern int non_logical_cint_operand PARAMS ((rtx, enum machine_mode));
extern int logical_operand PARAMS ((rtx, enum machine_mode));
extern int mask_operand PARAMS ((rtx, enum machine_mode));
+extern int mask_operand_wrap PARAMS ((rtx, enum machine_mode));
extern int mask64_operand PARAMS ((rtx, enum machine_mode));
+extern int mask64_2_operand PARAMS ((rtx, enum machine_mode));
+extern void build_mask64_2_operands PARAMS ((rtx, rtx *));
extern int and64_operand PARAMS ((rtx, enum machine_mode));
+extern int and64_2_operand PARAMS ((rtx, enum machine_mode));
extern int and_operand PARAMS ((rtx, enum machine_mode));
extern int count_register_operand PARAMS ((rtx, enum machine_mode));
extern int xer_operand PARAMS ((rtx, enum machine_mode));
@@ -98,6 +102,8 @@ extern int addrs_ok_for_quad_peep PARAMS ((rtx, rtx));
extern enum reg_class secondary_reload_class PARAMS ((enum reg_class,
enum machine_mode, rtx));
extern int ccr_bit PARAMS ((rtx, int));
+extern int extract_MB PARAMS ((rtx));
+extern int extract_ME PARAMS ((rtx));
extern void print_operand PARAMS ((FILE *, rtx, int));
extern void print_operand_address PARAMS ((FILE *, rtx));
extern enum rtx_code rs6000_reverse_condition PARAMS ((enum machine_mode,
diff --git a/gcc/config/rs6000/rs6000.c b/gcc/config/rs6000/rs6000.c
index 94b35ff16f3..dbb1dee253d 100644
--- a/gcc/config/rs6000/rs6000.c
+++ b/gcc/config/rs6000/rs6000.c
@@ -1496,64 +1496,185 @@ mask_operand (op, mode)
return c == -lsb;
}
+/* Return 1 for the PowerPC64 rlwinm corner case. */
+
+int
+mask_operand_wrap (op, mode)
+ rtx op;
+ enum machine_mode mode ATTRIBUTE_UNUSED;
+{
+ HOST_WIDE_INT c, lsb;
+
+ if (GET_CODE (op) != CONST_INT)
+ return 0;
+
+ c = INTVAL (op);
+
+ if ((c & 0x80000001) != 0x80000001)
+ return 0;
+
+ c = ~c;
+ if (c == 0)
+ return 0;
+
+ lsb = c & -c;
+ c = ~c;
+ c &= -lsb;
+ lsb = c & -c;
+ return c == -lsb;
+}
+
/* Return 1 if the operand is a constant that is a PowerPC64 mask.
It is if there are no more than one 1->0 or 0->1 transitions.
- Reject all ones and all zeros, since these should have been optimized
- away and confuse the making of MB and ME. */
+ Reject all zeros, since zero should have been optimized away and
+ confuses the making of MB and ME. */
int
mask64_operand (op, mode)
rtx op;
- enum machine_mode mode;
+ enum machine_mode mode ATTRIBUTE_UNUSED;
{
if (GET_CODE (op) == CONST_INT)
{
HOST_WIDE_INT c, lsb;
- /* We don't change the number of transitions by inverting,
- so make sure we start with the LS bit zero. */
c = INTVAL (op);
- if (c & 1)
- c = ~c;
- /* Reject all zeros or all ones. */
+ /* Reject all zeros. */
if (c == 0)
return 0;
+ /* We don't change the number of transitions by inverting,
+ so make sure we start with the LS bit zero. */
+ if (c & 1)
+ c = ~c;
+
/* Find the transition, and check that all bits above are 1's. */
lsb = c & -c;
return c == -lsb;
}
- else if (GET_CODE (op) == CONST_DOUBLE
- && (mode == VOIDmode || mode == DImode))
+ return 0;
+}
+
+/* Like mask64_operand, but allow up to three transitions. This
+ predicate is used by insn patterns that generate two rldicl or
+ rldicr machine insns. */
+
+int
+mask64_2_operand (op, mode)
+ rtx op;
+ enum machine_mode mode ATTRIBUTE_UNUSED;
+{
+ if (GET_CODE (op) == CONST_INT)
{
- HOST_WIDE_INT low, high, lsb;
+ HOST_WIDE_INT c, lsb;
- if (HOST_BITS_PER_WIDE_INT < 64)
- high = CONST_DOUBLE_HIGH (op);
+ c = INTVAL (op);
- low = CONST_DOUBLE_LOW (op);
- if (low & 1)
- {
- if (HOST_BITS_PER_WIDE_INT < 64)
- high = ~high;
- low = ~low;
- }
+ /* Disallow all zeros. */
+ if (c == 0)
+ return 0;
- if (low == 0)
- {
- if (HOST_BITS_PER_WIDE_INT >= 64 || high == 0)
- return 0;
+ /* We don't change the number of transitions by inverting,
+ so make sure we start with the LS bit zero. */
+ if (c & 1)
+ c = ~c;
- lsb = high & -high;
- return high == -lsb;
- }
+ /* Find the first transition. */
+ lsb = c & -c;
- lsb = low & -low;
- return low == -lsb && (HOST_BITS_PER_WIDE_INT >= 64 || high == ~0);
+ /* Invert to look for a second transition. */
+ c = ~c;
+
+ /* Erase first transition. */
+ c &= -lsb;
+
+ /* Find the second transition. */
+ lsb = c & -c;
+
+ /* Invert to look for a third transition. */
+ c = ~c;
+
+ /* Erase second transition. */
+ c &= -lsb;
+
+ /* Find the third transition (if any). */
+ lsb = c & -c;
+
+ /* Match if all the bits above are 1's (or c is zero). */
+ return c == -lsb;
+ }
+ return 0;
+}
+
+/* Generates shifts and masks for a pair of rldicl or rldicr insns to
+ implement ANDing by the mask IN. */
+void
+build_mask64_2_operands (in, out)
+ rtx in;
+ rtx *out;
+{
+#if HOST_BITS_PER_WIDE_INT >= 64
+ unsigned HOST_WIDE_INT c, lsb, m1, m2;
+ int shift;
+
+ if (GET_CODE (in) != CONST_INT)
+ abort ();
+
+ c = INTVAL (in);
+ if (c & 1)
+ {
+ /* Assume c initially something like 0x00fff000000fffff. The idea
+ is to rotate the word so that the middle ^^^^^^ group of zeros
+ is at the MS end and can be cleared with an rldicl mask. We then
+ rotate back and clear off the MS ^^ group of zeros with a
+ second rldicl. */
+ c = ~c; /* c == 0xff000ffffff00000 */
+ lsb = c & -c; /* lsb == 0x0000000000100000 */
+ m1 = -lsb; /* m1 == 0xfffffffffff00000 */
+ c = ~c; /* c == 0x00fff000000fffff */
+ c &= -lsb; /* c == 0x00fff00000000000 */
+ lsb = c & -c; /* lsb == 0x0000100000000000 */
+ c = ~c; /* c == 0xff000fffffffffff */
+ c &= -lsb; /* c == 0xff00000000000000 */
+ shift = 0;
+ while ((lsb >>= 1) != 0)
+ shift++; /* shift == 44 on exit from loop */
+ m1 <<= 64 - shift; /* m1 == 0xffffff0000000000 */
+ m1 = ~m1; /* m1 == 0x000000ffffffffff */
+ m2 = ~c; /* m2 == 0x00ffffffffffffff */
}
else
- return 0;
+ {
+ /* Assume c initially something like 0xff000f0000000000. The idea
+ is to rotate the word so that the ^^^ middle group of zeros
+ is at the LS end and can be cleared with an rldicr mask. We then
+ rotate back and clear off the LS group of ^^^^^^^^^^ zeros with
+ a second rldicr. */
+ lsb = c & -c; /* lsb == 0x0000010000000000 */
+ m2 = -lsb; /* m2 == 0xffffff0000000000 */
+ c = ~c; /* c == 0x00fff0ffffffffff */
+ c &= -lsb; /* c == 0x00fff00000000000 */
+ lsb = c & -c; /* lsb == 0x0000100000000000 */
+ c = ~c; /* c == 0xff000fffffffffff */
+ c &= -lsb; /* c == 0xff00000000000000 */
+ shift = 0;
+ while ((lsb >>= 1) != 0)
+ shift++; /* shift == 44 on exit from loop */
+ m1 = ~c; /* m1 == 0x00ffffffffffffff */
+ m1 >>= shift; /* m1 == 0x0000000000000fff */
+ m1 = ~m1; /* m1 == 0xfffffffffffff000 */
+ }
+
+ /* Note that when we only have two 0->1 and 1->0 transitions, one of the
+ masks will be all 1's. We are guaranteed more than one transition. */
+ out[0] = GEN_INT (64 - shift);
+ out[1] = GEN_INT (m1);
+ out[2] = GEN_INT (shift);
+ out[3] = GEN_INT (m2);
+#else
+ abort ();
+#endif
}
/* Return 1 if the operand is either a non-special register or a constant
@@ -1570,6 +1691,20 @@ and64_operand (op, mode)
return (logical_operand (op, mode) || mask64_operand (op, mode));
}
+/* Like the above, but also match constants that can be implemented
+ with two rldicl or rldicr insns. */
+
+int
+and64_2_operand (op, mode)
+ rtx op;
+ enum machine_mode mode;
+{
+ if (fixed_regs[CR0_REGNO]) /* CR0 not available, don't do andi./andis. */
+ return gpc_reg_operand (op, mode) || mask64_2_operand (op, mode);
+
+ return logical_operand (op, mode) || mask64_2_operand (op, mode);
+}
+
/* Return 1 if the operand is either a non-special register or a
constant that can be used as the operand of an RS/6000 logical AND insn. */
@@ -6056,6 +6191,83 @@ rs6000_init_machine_status ()
return ggc_alloc_cleared (sizeof (machine_function));
}
+/* These macros test for integers and extract the low-order bits. */
+#define INT_P(X) \
+((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
+ && GET_MODE (X) == VOIDmode)
+
+#define INT_LOWPART(X) \
+ (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
+
+int
+extract_MB (op)
+ rtx op;
+{
+ int i;
+ unsigned long val = INT_LOWPART (op);
+
+ /* If the high bit is zero, the value is the first 1 bit we find
+ from the left. */
+ if ((val & 0x80000000) == 0)
+ {
+ if ((val & 0xffffffff) == 0)
+ abort ();
+
+ i = 1;
+ while (((val <<= 1) & 0x80000000) == 0)
+ ++i;
+ return i;
+ }
+
+ /* If the high bit is set and the low bit is not, or the mask is all
+ 1's, the value is zero. */
+ if ((val & 1) == 0 || (val & 0xffffffff) == 0xffffffff)
+ return 0;
+
+ /* Otherwise we have a wrap-around mask. Look for the first 0 bit
+ from the right. */
+ i = 31;
+ while (((val >>= 1) & 1) != 0)
+ --i;
+
+ return i;
+}
+
+int
+extract_ME (op)
+ rtx op;
+{
+ int i;
+ unsigned long val = INT_LOWPART (op);
+
+ /* If the low bit is zero, the value is the first 1 bit we find from
+ the right. */
+ if ((val & 1) == 0)
+ {
+ if ((val & 0xffffffff) == 0)
+ abort ();
+
+ i = 30;
+ while (((val >>= 1) & 1) == 0)
+ --i;
+
+ return i;
+ }
+
+ /* If the low bit is set and the high bit is not, or the mask is all
+ 1's, the value is 31. */
+ if ((val & 0x80000000) == 0 || (val & 0xffffffff) == 0xffffffff)
+ return 31;
+
+ /* Otherwise we have a wrap-around mask. Look for the first 0 bit
+ from the left. */
+ i = 0;
+ while (((val <<= 1) & 0x80000000) != 0)
+ ++i;
+
+ return i;
+}
+
/* Print an operand. Recognize special options, documented below. */
#if TARGET_ELF
@@ -6074,14 +6286,7 @@ print_operand (file, x, code)
{
int i;
HOST_WIDE_INT val;
-
- /* These macros test for integers and extract the low-order bits. */
-#define INT_P(X) \
-((GET_CODE (X) == CONST_INT || GET_CODE (X) == CONST_DOUBLE) \
- && GET_MODE (X) == VOIDmode)
-
-#define INT_LOWPART(X) \
- (GET_CODE (X) == CONST_INT ? INTVAL (X) : CONST_DOUBLE_LOW (X))
+ unsigned HOST_WIDE_INT uval;
switch (code)
{
@@ -6297,34 +6502,7 @@ print_operand (file, x, code)
if (! mask_operand (x, SImode))
output_operand_lossage ("invalid %%m value");
- val = INT_LOWPART (x);
-
- /* If the high bit is set and the low bit is not, the value is zero.
- If the high bit is zero, the value is the first 1 bit we find from
- the left. */
- if ((val & 0x80000000) && ((val & 1) == 0))
- {
- putc ('0', file);
- return;
- }
- else if ((val & 0x80000000) == 0)
- {
- for (i = 1; i < 32; i++)
- if ((val <<= 1) & 0x80000000)
- break;
- fprintf (file, "%d", i);
- return;
- }
-
- /* Otherwise, look for the first 0 bit from the right. The result is its
- number plus 1. We know the low-order bit is one. */
- for (i = 0; i < 32; i++)
- if (((val >>= 1) & 1) == 0)
- break;
-
- /* If we ended in ...01, i would be 0. The correct value is 31, so
- we want 31 - i. */
- fprintf (file, "%d", 31 - i);
+ fprintf (file, "%d", extract_MB (x));
return;
case 'M':
@@ -6332,35 +6510,7 @@ print_operand (file, x, code)
if (! mask_operand (x, SImode))
output_operand_lossage ("invalid %%M value");
- val = INT_LOWPART (x);
-
- /* If the low bit is set and the high bit is not, the value is 31.
- If the low bit is zero, the value is the first 1 bit we find from
- the right. */
- if ((val & 1) && ((val & 0x80000000) == 0))
- {
- fputs ("31", file);
- return;
- }
- else if ((val & 1) == 0)
- {
- for (i = 0; i < 32; i++)
- if ((val >>= 1) & 1)
- break;
-
- /* If we had ....10, i would be 0. The result should be
- 30, so we need 30 - i. */
- fprintf (file, "%d", 30 - i);
- return;
- }
-
- /* Otherwise, look for the first 0 bit from the left. The result is its
- number minus 1. We know the high-order bit is one. */
- for (i = 0; i < 32; i++)
- if (((val <<= 1) & 0x80000000) == 0)
- break;
-
- fprintf (file, "%d", i);
+ fprintf (file, "%d", extract_ME (x));
return;
/* %n outputs the negative of its operand. */
@@ -6456,68 +6606,31 @@ print_operand (file, x, code)
return;
case 'S':
- /* PowerPC64 mask position. All 0's and all 1's are excluded.
+ /* PowerPC64 mask position. All 0's is excluded.
CONST_INT 32-bit mask is considered sign-extended so any
transition must occur within the CONST_INT, not on the boundary. */
if (! mask64_operand (x, DImode))
output_operand_lossage ("invalid %%S value");
- val = INT_LOWPART (x);
+ uval = INT_LOWPART (x);
- if (val & 1) /* Clear Left */
+ if (uval & 1) /* Clear Left */
{
- for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
- if (!((val >>= 1) & 1))
- break;
-
-#if HOST_BITS_PER_WIDE_INT == 32
- if (GET_CODE (x) == CONST_DOUBLE && i == 32)
- {
- val = CONST_DOUBLE_HIGH (x);
-
- if (val == 0)
- --i;
- else
- for (i = 32; i < 64; i++)
- if (!((val >>= 1) & 1))
- break;
- }
-#endif
- /* i = index of last set bit from right
- mask begins at 63 - i from left */
- if (i > 63)
- output_operand_lossage ("%%S computed all 1's mask");
-
- fprintf (file, "%d", 63 - i);
- return;
+ uval &= ((unsigned HOST_WIDE_INT) 1 << 63 << 1) - 1;
+ i = 64;
}
- else /* Clear Right */
+ else /* Clear Right */
{
- for (i = 0; i < HOST_BITS_PER_WIDE_INT; i++)
- if ((val >>= 1) & 1)
- break;
-
-#if HOST_BITS_PER_WIDE_INT == 32
- if (GET_CODE (x) == CONST_DOUBLE && i == 32)
- {
- val = CONST_DOUBLE_HIGH (x);
-
- if (val == (HOST_WIDE_INT) -1)
- --i;
- else
- for (i = 32; i < 64; i++)
- if ((val >>= 1) & 1)
- break;
- }
-#endif
- /* i = index of last clear bit from right
- mask ends at 62 - i from left */
- if (i > 62)
- output_operand_lossage ("%%S computed all 0's mask");
-
- fprintf (file, "%d", 62 - i);
- return;
+ uval = ~uval;
+ uval &= ((unsigned HOST_WIDE_INT) 1 << 63 << 1) - 1;
+ i = 63;
}
+ while (uval != 0)
+ --i, uval >>= 1;
+ if (i < 0)
+ abort ();
+ fprintf (file, "%d", i);
+ return;
case 'T':
/* Print the symbolic name of a branch target register. */
diff --git a/gcc/config/rs6000/rs6000.h b/gcc/config/rs6000/rs6000.h
index 51170c0eab1..b63bb78649c 100644
--- a/gcc/config/rs6000/rs6000.h
+++ b/gcc/config/rs6000/rs6000.h
@@ -1178,7 +1178,8 @@ enum reg_class
'R' is for AIX TOC entries.
'S' is a constant that can be placed into a 64-bit mask operand
'T' is a constant that can be placed into a 32-bit mask operand
- 'U' is for V.4 small data references. */
+ 'U' is for V.4 small data references.
+ 't' is for AND masks that can be performed by two rldic{l,r} insns. */
#define EXTRA_CONSTRAINT(OP, C) \
((C) == 'Q' ? GET_CODE (OP) == MEM && GET_CODE (XEXP (OP, 0)) == REG \
@@ -1187,6 +1188,10 @@ enum reg_class
: (C) == 'T' ? mask_operand (OP, SImode) \
: (C) == 'U' ? (DEFAULT_ABI == ABI_V4 \
&& small_data_operand (OP, GET_MODE (OP))) \
+ : (C) == 't' ? (mask64_2_operand (OP, DImode) \
+ && (fixed_regs[CR0_REGNO] \
+ || !logical_operand (OP, DImode)) \
+ && !mask64_operand (OP, DImode)) \
: 0)
/* Given an rtx X being reloaded into a reg required to be
@@ -2749,10 +2754,13 @@ extern char rs6000_reg_names[][8]; /* register names (0 vs. %r0). */
{"non_add_cint_operand", {CONST_INT}}, \
{"and_operand", {SUBREG, REG, CONST_INT}}, \
{"and64_operand", {SUBREG, REG, CONST_INT, CONST_DOUBLE}}, \
+ {"and64_2_operand", {SUBREG, REG, CONST_INT}}, \
{"logical_operand", {SUBREG, REG, CONST_INT, CONST_DOUBLE}}, \
{"non_logical_cint_operand", {CONST_INT, CONST_DOUBLE}}, \
{"mask_operand", {CONST_INT}}, \
- {"mask64_operand", {CONST_INT, CONST_DOUBLE}}, \
+ {"mask_operand_wrap", {CONST_INT}}, \
+ {"mask64_operand", {CONST_INT}}, \
+ {"mask64_2_operand", {CONST_INT}}, \
{"count_register_operand", {REG}}, \
{"xer_operand", {REG}}, \
{"symbol_ref_operand", {SYMBOL_REF}}, \
diff --git a/gcc/config/rs6000/rs6000.md b/gcc/config/rs6000/rs6000.md
index 4439603c21e..5e2b5870098 100644
--- a/gcc/config/rs6000/rs6000.md
+++ b/gcc/config/rs6000/rs6000.md
@@ -2885,7 +2885,7 @@
{andiu.|andis.} %0,%1,%u2")
;; Note to set cr's other than cr0 we do the and immediate and then
-;; the test again -- this avoids a mcrf which on the higher end
+;; the test again -- this avoids a mfcr which on the higher end
;; machines causes an execution serialization
(define_insn "*andsi3_internal2"
@@ -2908,6 +2908,26 @@
[(set_attr "type" "compare,compare,compare,delayed_compare,compare,compare,compare,compare")
(set_attr "length" "4,4,4,4,8,8,8,8")])
+(define_insn "*andsi3_internal3"
+ [(set (match_operand:CC 0 "cc_reg_operand" "=x,x,x,x,?y,??y,??y,?y")
+ (compare:CC (and:SI (match_operand:SI 1 "gpc_reg_operand" "%r,r,r,r,r,r,r,r")
+ (match_operand:SI 2 "and_operand" "r,K,L,T,r,K,L,T"))
+ (const_int 0)))
+ (clobber (match_scratch:SI 3 "=r,r,r,r,r,r,r,r"))
+ (clobber (match_scratch:CC 4 "=X,X,X,X,X,x,x,X"))]
+ "TARGET_POWERPC64"
+ "@
+ #
+ {andil.|andi.} %3,%1,%b2
+ {andiu.|andis.} %3,%1,%u2
+ {rlinm.|rlwinm.} %3,%1,0,%m2,%M2
+ #
+ #
+ #
+ #"
+ [(set_attr "type" "compare,compare,compare,delayed_compare,compare,compare,compare,compare")
+ (set_attr "length" "8,4,4,4,8,8,8,8")])
+
(define_split
[(set (match_operand:CC 0 "cc_reg_not_cr0_operand" "")
(compare:CC (and:SI (match_operand:SI 1 "gpc_reg_operand" "")
@@ -2915,7 +2935,7 @@
(const_int 0)))
(clobber (match_scratch:SI 3 ""))
(clobber (match_scratch:CC 4 ""))]
- "! TARGET_POWERPC64 && reload_completed"
+ "reload_completed"
[(parallel [(set (match_dup 3)
(and:SI (match_dup 1)
(match_dup 2)))
@@ -2925,7 +2945,27 @@
(const_int 0)))]
"")
-(define_insn "*andsi3_internal3"
+;; We don't have a 32 bit "and. rt,ra,rb" for ppc64. cr is set from the
+;; whole 64 bit reg, and we don't know what is in the high 32 bits.
+
+(define_split
+ [(set (match_operand:CC 0 "cc_reg_operand" "")
+ (compare:CC (and:SI (match_operand:SI 1 "gpc_reg_operand" "")
+ (match_operand:SI 2 "gpc_reg_operand" ""))
+ (const_int 0)))
+ (clobber (match_scratch:SI 3 ""))
+ (clobber (match_scratch:CC 4 ""))]
+ "TARGET_POWERPC64 && reload_completed"
+ [(parallel [(set (match_dup 3)
+ (and:SI (match_dup 1)
+ (match_dup 2)))
+ (clobber (match_dup 4))])
+ (set (match_dup 0)
+ (compare:CC (match_dup 3)
+ (const_int 0)))]
+ "")
+
+(define_insn "*andsi3_internal4"
[(set (match_operand:CC 3 "cc_reg_operand" "=x,x,x,x,?y,??y,??y,?y")
(compare:CC (and:SI (match_operand:SI 1 "gpc_reg_operand" "%r,r,r,r,r,r,r,r")
(match_operand:SI 2 "and_operand" "r,K,L,T,r,K,L,T"))
@@ -2947,6 +2987,28 @@
[(set_attr "type" "compare,compare,compare,delayed_compare,compare,compare,compare,compare")
(set_attr "length" "4,4,4,4,8,8,8,8")])
+(define_insn "*andsi3_internal5"
+ [(set (match_operand:CC 3 "cc_reg_operand" "=x,x,x,x,?y,??y,??y,?y")
+ (compare:CC (and:SI (match_operand:SI 1 "gpc_reg_operand" "%r,r,r,r,r,r,r,r")
+ (match_operand:SI 2 "and_operand" "r,K,L,T,r,K,L,T"))
+ (const_int 0)))
+ (set (match_operand:SI 0 "gpc_reg_operand" "=r,r,r,r,r,r,r,r")
+ (and:SI (match_dup 1)
+ (match_dup 2)))
+ (clobber (match_scratch:CC 4 "=X,X,X,X,X,x,x,X"))]
+ "TARGET_POWERPC64"
+ "@
+ #
+ {andil.|andi.} %0,%1,%b2
+ {andiu.|andis.} %0,%1,%u2
+ {rlinm.|rlwinm.} %0,%1,0,%m2,%M2
+ #
+ #
+ #
+ #"
+ [(set_attr "type" "compare,compare,compare,delayed_compare,compare,compare,compare,compare")
+ (set_attr "length" "8,4,4,4,8,8,8,8")])
+
(define_split
[(set (match_operand:CC 3 "cc_reg_not_cr0_operand" "")
(compare:CC (and:SI (match_operand:SI 1 "gpc_reg_operand" "")
@@ -2956,7 +3018,7 @@
(and:SI (match_dup 1)
(match_dup 2)))
(clobber (match_scratch:CC 4 ""))]
- "! TARGET_POWERPC64 && reload_completed"
+ "reload_completed"
[(parallel [(set (match_dup 0)
(and:SI (match_dup 1)
(match_dup 2)))
@@ -2966,6 +3028,104 @@
(const_int 0)))]
"")
+(define_split
+ [(set (match_operand:CC 3 "cc_reg_operand" "")
+ (compare:CC (and:SI (match_operand:SI 1 "gpc_reg_operand" "")
+ (match_operand:SI 2 "gpc_reg_operand" ""))
+ (const_int 0)))
+ (set (match_operand:SI 0 "gpc_reg_operand" "")
+ (and:SI (match_dup 1)
+ (match_dup 2)))
+ (clobber (match_scratch:CC 4 ""))]
+ "TARGET_POWERPC64 && reload_completed"
+ [(parallel [(set (match_dup 0)
+ (and:SI (match_dup 1)
+ (match_dup 2)))
+ (clobber (match_dup 4))])
+ (set (match_dup 3)
+ (compare:CC (match_dup 0)
+ (const_int 0)))]
+ "")
+
+;; Handle the PowerPC64 rlwinm corner case
+
+(define_insn_and_split "*andsi3_internal6"
+ [(set (match_operand:SI 0 "gpc_reg_operand" "=r")
+ (and:SI (match_operand:SI 1 "gpc_reg_operand" "r")
+ (match_operand:SI 2 "mask_operand_wrap" "i")))]
+ "TARGET_POWERPC64"
+ "#"
+ "TARGET_POWERPC64"
+ [(set (match_dup 0)
+ (and:SI (rotate:SI (match_dup 1) (match_dup 3))
+ (match_dup 4)))
+ (set (match_dup 0)
+ (rotate:SI (match_dup 0) (match_dup 5)))]
+ "
+{
+ int mb = extract_MB (operands[2]);
+ int me = extract_ME (operands[2]);
+ operands[3] = GEN_INT (me + 1);
+ operands[5] = GEN_INT (32 - (me + 1));
+ operands[4] = GEN_INT (~((HOST_WIDE_INT) -1 << (33 + me - mb)));
+}"
+ [(set_attr "length" "8")])
+
+(define_insn_and_split "*andsi3_internal7"
+ [(set (match_operand:CC 2 "cc_reg_operand" "=x,?y")
+ (compare:CC (and:SI (match_operand:SI 0 "gpc_reg_operand" "r,r")
+ (match_operand:SI 1 "mask_operand_wrap" "i,i"))
+ (const_int 0)))
+ (clobber (match_scratch:SI 3 "=r,r"))]
+ "TARGET_POWERPC64"
+ "#"
+ "TARGET_POWERPC64"
+ [(parallel [(set (match_dup 2)
+ (compare:CC (and:SI (rotate:SI (match_dup 0) (match_dup 4))
+ (match_dup 5))
+ (const_int 0)))
+ (clobber (match_dup 3))])]
+ "
+{
+ int mb = extract_MB (operands[1]);
+ int me = extract_ME (operands[1]);
+ operands[4] = GEN_INT (me + 1);
+ operands[5] = GEN_INT (~((HOST_WIDE_INT) -1 << (33 + me - mb)));
+}"
+ [(set_attr "type" "delayed_compare,compare")
+ (set_attr "length" "4,8")])
+
+(define_insn_and_split "*andsi3_internal8"
+ [(set (match_operand:CC 3 "cc_reg_operand" "=x,??y")
+ (compare:CC (and:SI (match_operand:SI 1 "gpc_reg_operand" "r,r")
+ (match_operand:SI 2 "mask_operand_wrap" "i,i"))
+ (const_int 0)))
+ (set (match_operand:SI 0 "gpc_reg_operand" "=r,r")
+ (and:SI (match_dup 1)
+ (match_dup 2)))]
+ "TARGET_POWERPC64"
+ "#"
+ "TARGET_POWERPC64"
+ [(parallel [(set (match_dup 3)
+ (compare:CC (and:SI (rotate:SI (match_dup 1) (match_dup 4))
+ (match_dup 5))
+ (const_int 0)))
+ (set (match_dup 0)
+ (and:SI (rotate:SI (match_dup 1) (match_dup 4))
+ (match_dup 5)))])
+ (set (match_dup 0)
+ (rotate:SI (match_dup 0) (match_dup 6)))]
+ "
+{
+ int mb = extract_MB (operands[2]);
+ int me = extract_ME (operands[2]);
+ operands[4] = GEN_INT (me + 1);
+ operands[6] = GEN_INT (32 - (me + 1));
+ operands[5] = GEN_INT (~((HOST_WIDE_INT) -1 << (33 + me - mb)));
+}"
+ [(set_attr "type" "delayed_compare,compare")
+ (set_attr "length" "8,12")])
+
(define_expand "iorsi3"
[(set (match_operand:SI 0 "gpc_reg_operand" "")
(ior:SI (match_operand:SI 1 "gpc_reg_operand" "")
@@ -7235,24 +7395,47 @@
"")
(define_insn "anddi3"
- [(set (match_operand:DI 0 "gpc_reg_operand" "=r,r,r,r")
- (and:DI (match_operand:DI 1 "gpc_reg_operand" "%r,r,r,r")
- (match_operand:DI 2 "and64_operand" "?r,S,K,J")))
- (clobber (match_scratch:CC 3 "=X,X,x,x"))]
+ [(set (match_operand:DI 0 "gpc_reg_operand" "=r,r,r,r,r")
+ (and:DI (match_operand:DI 1 "gpc_reg_operand" "%r,r,r,r,r")
+ (match_operand:DI 2 "and64_2_operand" "?r,S,K,J,t")))
+ (clobber (match_scratch:CC 3 "=X,X,x,x,X"))]
"TARGET_POWERPC64"
"@
and %0,%1,%2
rldic%B2 %0,%1,0,%S2
andi. %0,%1,%b2
- andis. %0,%1,%u2")
+ andis. %0,%1,%u2
+ #"
+ [(set_attr "length" "4,4,4,4,8")])
+
+(define_split
+ [(set (match_operand:DI 0 "gpc_reg_operand" "")
+ (and:DI (match_operand:DI 1 "gpc_reg_operand" "")
+ (match_operand:DI 2 "mask64_2_operand" "")))
+ (clobber (match_scratch:CC 3 ""))]
+ "TARGET_POWERPC64
+ && (fixed_regs[CR0_REGNO] || !logical_operand (operands[2], DImode))
+ && !mask64_operand (operands[2], DImode)"
+ [(set (match_dup 0)
+ (and:DI (rotate:DI (match_dup 1)
+ (match_dup 4))
+ (match_dup 5)))
+ (set (match_dup 0)
+ (and:DI (rotate:DI (match_dup 0)
+ (match_dup 6))
+ (match_dup 7)))]
+ "
+{
+ build_mask64_2_operands (operands[2], &operands[4]);
+}")
(define_insn "*anddi3_internal2"
- [(set (match_operand:CC 0 "cc_reg_operand" "=x,x,x,x,?y,?y,??y,??y")
- (compare:CC (and:DI (match_operand:DI 1 "gpc_reg_operand" "%r,r,r,r,r,r,r,")
- (match_operand:DI 2 "and64_operand" "r,S,K,J,r,S,K,J"))
+ [(set (match_operand:CC 0 "cc_reg_operand" "=x,x,x,x,x,?y,?y,??y,??y,?y")
+ (compare:CC (and:DI (match_operand:DI 1 "gpc_reg_operand" "%r,r,r,r,r,r,r,r,r,r")
+ (match_operand:DI 2 "and64_2_operand" "r,S,K,J,t,r,S,K,J,t"))
(const_int 0)))
- (clobber (match_scratch:DI 3 "=r,r,r,r,r,r,r,r"))
- (clobber (match_scratch:CC 4 "=X,X,X,X,X,X,x,x"))]
+ (clobber (match_scratch:DI 3 "=r,r,r,r,r,r,r,r,r,r"))
+ (clobber (match_scratch:CC 4 "=X,X,X,X,X,X,X,x,x,X"))]
"TARGET_POWERPC64"
"@
and. %3,%1,%2
@@ -7262,9 +7445,11 @@
#
#
#
+ #
+ #
#"
- [(set_attr "type" "compare,delayed_compare,compare,compare,compare,delayed_compare,compare,compare")
- (set_attr "length" "4,4,4,4,8,8,8,8")])
+ [(set_attr "type" "compare,delayed_compare,compare,compare,delayed_compare,compare,compare,compare,compare,compare")
+ (set_attr "length" "4,4,4,4,8,8,8,8,8,12")])
(define_split
[(set (match_operand:CC 0 "cc_reg_not_cr0_operand" "")
@@ -7283,14 +7468,39 @@
(const_int 0)))]
"")
+(define_split
+ [(set (match_operand:CC 0 "cc_reg_operand" "")
+ (compare:CC (and:DI (match_operand:DI 1 "gpc_reg_operand" "")
+ (match_operand:DI 2 "mask64_2_operand" ""))
+ (const_int 0)))
+ (clobber (match_scratch:DI 3 ""))
+ (clobber (match_scratch:CC 4 ""))]
+ "TARGET_POWERPC64 && reload_completed
+ && (fixed_regs[CR0_REGNO] || !logical_operand (operands[2], DImode))
+ && !mask64_operand (operands[2], DImode)"
+ [(set (match_dup 3)
+ (and:DI (rotate:DI (match_dup 1)
+ (match_dup 5))
+ (match_dup 6)))
+ (parallel [(set (match_dup 0)
+ (compare:CC (and:DI (rotate:DI (match_dup 3)
+ (match_dup 7))
+ (match_dup 8))
+ (const_int 0)))
+ (clobber (match_dup 3))])]
+ "
+{
+ build_mask64_2_operands (operands[2], &operands[5]);
+}")
+
(define_insn "*anddi3_internal3"
- [(set (match_operand:CC 3 "cc_reg_operand" "=x,x,x,x,?y,?y,??y,??y")
- (compare:CC (and:DI (match_operand:DI 1 "gpc_reg_operand" "%r,r,r,r,r,r,r,r")
- (match_operand:DI 2 "and64_operand" "r,S,K,J,r,S,K,J"))
+ [(set (match_operand:CC 3 "cc_reg_operand" "=x,x,x,x,x,?y,?y,??y,??y,?y")
+ (compare:CC (and:DI (match_operand:DI 1 "gpc_reg_operand" "%r,r,r,r,r,r,r,r,r,r")
+ (match_operand:DI 2 "and64_2_operand" "r,S,K,J,t,r,S,K,J,t"))
(const_int 0)))
- (set (match_operand:DI 0 "gpc_reg_operand" "=r,r,r,r,r,r,r,r")
+ (set (match_operand:DI 0 "gpc_reg_operand" "=r,r,r,r,r,r,r,r,r,r")
(and:DI (match_dup 1) (match_dup 2)))
- (clobber (match_scratch:CC 4 "=X,X,X,X,X,X,x,x"))]
+ (clobber (match_scratch:CC 4 "=X,X,X,X,X,X,X,x,x,X"))]
"TARGET_POWERPC64"
"@
and. %0,%1,%2
@@ -7300,9 +7510,11 @@
#
#
#
+ #
+ #
#"
- [(set_attr "type" "compare,delayed_compare,compare,compare,compare,delayed_compare,compare,compare")
- (set_attr "length" "4,4,4,4,8,8,8,8")])
+ [(set_attr "type" "compare,delayed_compare,compare,compare,delayed_compare,compare,compare,compare,compare,compare")
+ (set_attr "length" "4,4,4,4,8,8,8,8,8,12")])
(define_split
[(set (match_operand:CC 3 "cc_reg_not_cr0_operand" "")
@@ -7321,6 +7533,35 @@
(const_int 0)))]
"")
+(define_split
+ [(set (match_operand:CC 3 "cc_reg_operand" "")
+ (compare:CC (and:DI (match_operand:DI 1 "gpc_reg_operand" "")
+ (match_operand:DI 2 "mask64_2_operand" ""))
+ (const_int 0)))
+ (set (match_operand:DI 0 "gpc_reg_operand" "")
+ (and:DI (match_dup 1) (match_dup 2)))
+ (clobber (match_scratch:CC 4 ""))]
+ "TARGET_POWERPC64 && reload_completed
+ && (fixed_regs[CR0_REGNO] || !logical_operand (operands[2], DImode))
+ && !mask64_operand (operands[2], DImode)"
+ [(set (match_dup 0)
+ (and:DI (rotate:DI (match_dup 1)
+ (match_dup 5))
+ (match_dup 6)))
+ (parallel [(set (match_dup 3)
+ (compare:CC (and:DI (rotate:DI (match_dup 0)
+ (match_dup 7))
+ (match_dup 8))
+ (const_int 0)))
+ (set (match_dup 0)
+ (and:DI (rotate:DI (match_dup 0)
+ (match_dup 7))
+ (match_dup 8)))])]
+ "
+{
+ build_mask64_2_operands (operands[2], &operands[5]);
+}")
+
(define_expand "iordi3"
[(set (match_operand:DI 0 "gpc_reg_operand" "")
(ior:DI (match_operand:DI 1 "gpc_reg_operand" "")