;; shift pair, instead using moves and sign extension for counts greater
;; than 31.
-(define_insn "ashldi3"
- [(set (match_operand:DI 0 "general_operand" "=&r")
- (ashift:DI (match_operand:DI 1 "general_operand" "0")
- (match_operand:QI 2 "general_operand" "cJ")))
- (clobber (match_dup 2))]
+(define_expand "ashldi3"
+ [(set (match_operand:DI 0 "register_operand" "")
+ (ashift:DI (match_operand:DI 1 "register_operand" "")
+ (match_operand:QI 2 "nonmemory_operand" "")))]
+ ""
+ "
+{
+ if (! GET_CODE (operands[2]) == CONST_INT
+ || ! CONST_OK_FOR_LETTER_P (INTVAL (operands[2]), 'J'))
+ {
+ operands[2] = copy_to_mode_reg (QImode, operands[2]);
+ emit_insn (gen_ashldi3_non_const_int (operands[0], operands[1],
+ operands[2]));
+ }
+ else
+ emit_insn (gen_ashldi3_const_int (operands[0], operands[1], operands[2]));
+
+ DONE;
+}")
+
+(define_insn "ashldi3_const_int"
+ [(set (match_operand:DI 0 "register_operand" "=&r")
+ (ashift:DI (match_operand:DI 1 "register_operand" "0")
+ (match_operand:QI 2 "const_int_operand" "J")))]
""
"*
{
xops[2] = low[0];
xops[3] = high[0];
- if (REG_P (xops[0])) /* If shift count in %cl */
+ if (INTVAL (xops[0]) > 31)
{
- output_asm_insn (AS2 (ror%B0,%1,%0), xops); /* shift count / 2 */
+ output_asm_insn (AS2 (mov%L3,%2,%3), xops); /* Fast shift by 32 */
+ output_asm_insn (AS2 (xor%L2,%2,%2), xops);
- output_asm_insn (AS2 (shld%L3,%2,%3), xops);
- output_asm_insn (AS2 (sal%L2,%0,%2), xops);
- output_asm_insn (AS2 (shld%L3,%2,%3), xops);
+ if (INTVAL (xops[0]) > 32)
+ {
+ xops[0] = gen_rtx (CONST_INT, VOIDmode, INTVAL (xops[0]) - 32);
+
+ output_asm_insn (AS2 (sal%L3,%0,%3), xops); /* Remaining shift */
+ }
+ }
+ else
+ {
+ output_asm_insn (AS3 (shld%L3,%0,%2,%3), xops);
output_asm_insn (AS2 (sal%L2,%0,%2), xops);
+ }
+ RET;
+}")
- xops[1] = gen_rtx (CONST_INT, VOIDmode, 7); /* shift count & 1 */
+(define_insn "ashldi3_non_const_int"
+ [(set (match_operand:DI 0 "register_operand" "=&r")
+ (ashift:DI (match_operand:DI 1 "register_operand" "0")
+ (match_operand:QI 2 "register_operand" "c")))
+ (clobber (match_dup 2))]
+ ""
+ "*
+{
+ rtx xops[4], low[1], high[1];
- output_asm_insn (AS2 (shr%B0,%1,%0), xops);
+ CC_STATUS_INIT;
- output_asm_insn (AS2 (shld%L3,%2,%3), xops);
- output_asm_insn (AS2 (sal%L2,%0,%2), xops);
- }
- else if (GET_CODE (xops[0]) == CONST_INT)
- {
- if (INTVAL (xops[0]) > 31)
- {
- output_asm_insn (AS2 (mov%L3,%2,%3), xops); /* Fast shift by 32 */
- output_asm_insn (AS2 (xor%L2,%2,%2), xops);
+ split_di (operands, 1, low, high);
+ xops[0] = operands[2];
+ xops[1] = const1_rtx;
+ xops[2] = low[0];
+ xops[3] = high[0];
- if (INTVAL (xops[0]) > 32)
- {
- xops[0] = gen_rtx (CONST_INT, VOIDmode, INTVAL (xops[0]) - 32);
+ output_asm_insn (AS2 (ror%B0,%1,%0), xops); /* shift count / 2 */
+
+ output_asm_insn (AS3_SHIFT_DOUBLE (shld%L3,%0,%2,%3), xops);
+ output_asm_insn (AS2 (sal%L2,%0,%2), xops);
+ output_asm_insn (AS3_SHIFT_DOUBLE (shld%L3,%0,%2,%3), xops);
+ output_asm_insn (AS2 (sal%L2,%0,%2), xops);
+
+ xops[1] = gen_rtx (CONST_INT, VOIDmode, 7); /* shift count & 1 */
+
+ output_asm_insn (AS2 (shr%B0,%1,%0), xops);
+
+ output_asm_insn (AS3_SHIFT_DOUBLE (shld%L3,%0,%2,%3), xops);
+ output_asm_insn (AS2 (sal%L2,%0,%2), xops);
- output_asm_insn (AS2 (sal%L3,%0,%3), xops); /* Remaining shift */
- }
- }
- else
- {
- output_asm_insn (AS3 (shld%L3,%0,%2,%3), xops);
- output_asm_insn (AS2 (sal%L2,%0,%2), xops);
- }
- }
RET;
}")
(define_insn "ashlsi3"
[(set (match_operand:SI 0 "general_operand" "=r,rm")
(ashift:SI (match_operand:SI 1 "general_operand" "r,0")
- (match_operand:SI 2 "general_operand" "M,cI")))]
+ (match_operand:SI 2 "nonmemory_operand" "M,cI")))]
""
"*
{
(define_insn "ashlhi3"
[(set (match_operand:HI 0 "general_operand" "=rm")
(ashift:HI (match_operand:HI 1 "general_operand" "0")
- (match_operand:HI 2 "general_operand" "cI")))]
+ (match_operand:HI 2 "nonmemory_operand" "cI")))]
""
"*
{
(define_insn "ashlqi3"
[(set (match_operand:QI 0 "general_operand" "=qm")
(ashift:QI (match_operand:QI 1 "general_operand" "0")
- (match_operand:QI 2 "general_operand" "cI")))]
+ (match_operand:QI 2 "nonmemory_operand" "cI")))]
""
"*
{
;; See comment above `ashldi3' about how this works.
-(define_insn "ashrdi3"
- [(set (match_operand:DI 0 "general_operand" "=&r")
- (ashiftrt:DI (match_operand:DI 1 "general_operand" "0")
- (match_operand:QI 2 "general_operand" "cJ")))
- (clobber (match_dup 2))]
+(define_expand "ashrdi3"
+ [(set (match_operand:DI 0 "register_operand" "")
+ (ashiftrt:DI (match_operand:DI 1 "register_operand" "")
+ (match_operand:QI 2 "nonmemory_operand" "")))]
+ ""
+ "
+{
+ if (! GET_CODE (operands[2]) == CONST_INT
+ || ! CONST_OK_FOR_LETTER_P (INTVAL (operands[2]), 'J'))
+ {
+ operands[2] = copy_to_mode_reg (QImode, operands[2]);
+ emit_insn (gen_ashrdi3_non_const_int (operands[0], operands[1],
+ operands[2]));
+ }
+ else
+ emit_insn (gen_ashrdi3_const_int (operands[0], operands[1], operands[2]));
+
+ DONE;
+}")
+
+(define_insn "ashrdi3_const_int"
+ [(set (match_operand:DI 0 "register_operand" "=&r")
+ (ashiftrt:DI (match_operand:DI 1 "register_operand" "0")
+ (match_operand:QI 2 "const_int_operand" "J")))]
""
"*
{
- rtx xops[5], low[1], high[1];
+ rtx xops[4], low[1], high[1];
CC_STATUS_INIT;
xops[2] = low[0];
xops[3] = high[0];
- if (REG_P (xops[0])) /* If shift count in %cl */
+ if (INTVAL (xops[0]) > 31)
{
- output_asm_insn (AS2 (ror%B0,%1,%0), xops); /* shift count / 2 */
+ xops[1] = gen_rtx (CONST_INT, VOIDmode, 31);
+ output_asm_insn (AS2 (mov%L2,%3,%2), xops);
+ output_asm_insn (AS2 (sar%L3,%1,%3), xops); /* shift by 32 */
- output_asm_insn (AS2 (shrd%L2,%3,%2), xops);
- output_asm_insn (AS2 (sar%L3,%0,%3), xops);
- output_asm_insn (AS2 (shrd%L2,%3,%2), xops);
+ if (INTVAL (xops[0]) > 32)
+ {
+ xops[0] = gen_rtx (CONST_INT, VOIDmode, INTVAL (xops[0]) - 32);
+
+ output_asm_insn (AS2 (sar%2,%0,%2), xops); /* Remaining shift */
+ }
+ }
+ else
+ {
+ output_asm_insn (AS3 (shrd%L2,%0,%3,%2), xops);
output_asm_insn (AS2 (sar%L3,%0,%3), xops);
+ }
- xops[1] = gen_rtx (CONST_INT, VOIDmode, 7); /* shift count & 1 */
+ RET;
+}")
- output_asm_insn (AS2 (shr%B0,%1,%0), xops);
+(define_insn "ashrdi3_non_const_int"
+ [(set (match_operand:DI 0 "register_operand" "=&r")
+ (ashiftrt:DI (match_operand:DI 1 "register_operand" "0")
+ (match_operand:QI 2 "register_operand" "c")))
+ (clobber (match_dup 2))]
+ ""
+ "*
+{
+ rtx xops[4], low[1], high[1];
- output_asm_insn (AS2 (shrd%L2,%3,%2), xops);
- output_asm_insn (AS2 (sar%L3,%0,%3), xops);
- }
- else if (GET_CODE (xops[0]) == CONST_INT)
- {
- if (INTVAL (xops[0]) > 31)
- {
- xops[1] = gen_rtx (CONST_INT, VOIDmode, 31);
- output_asm_insn (AS2 (mov%L2,%3,%2), xops);
- output_asm_insn (AS2 (sar%L3,%1,%3), xops); /* shift by 32 */
+ CC_STATUS_INIT;
- if (INTVAL (xops[0]) > 32)
- {
- xops[0] = gen_rtx (CONST_INT, VOIDmode, INTVAL (xops[0]) - 32);
+ split_di (operands, 1, low, high);
+ xops[0] = operands[2];
+ xops[1] = const1_rtx;
+ xops[2] = low[0];
+ xops[3] = high[0];
+
+ output_asm_insn (AS2 (ror%B0,%1,%0), xops); /* shift count / 2 */
+
+ output_asm_insn (AS3_SHIFT_DOUBLE (shrd%L2,%0,%3,%2), xops);
+ output_asm_insn (AS2 (sar%L3,%0,%3), xops);
+ output_asm_insn (AS3_SHIFT_DOUBLE (shrd%L2,%0,%3,%2), xops);
+ output_asm_insn (AS2 (sar%L3,%0,%3), xops);
+
+ xops[1] = gen_rtx (CONST_INT, VOIDmode, 7); /* shift count & 1 */
+
+ output_asm_insn (AS2 (shr%B0,%1,%0), xops);
+
+ output_asm_insn (AS3_SHIFT_DOUBLE (shrd%L2,%0,%3,%2), xops);
+ output_asm_insn (AS2 (sar%L3,%0,%3), xops);
- output_asm_insn (AS2 (sar%L2,%0,%2), xops); /* Remaining shift */
- }
- }
- else
- {
- output_asm_insn (AS3 (shrd%L2,%0,%3,%2), xops);
- output_asm_insn (AS2 (sar%L3,%0,%3), xops);
- }
- }
RET;
}")
(define_insn "ashrsi3"
[(set (match_operand:SI 0 "general_operand" "=rm")
(ashiftrt:SI (match_operand:SI 1 "general_operand" "0")
- (match_operand:SI 2 "general_operand" "cI")))]
+ (match_operand:SI 2 "nonmemory_operand" "cI")))]
""
"*
{
(define_insn "ashrhi3"
[(set (match_operand:HI 0 "general_operand" "=rm")
(ashiftrt:HI (match_operand:HI 1 "general_operand" "0")
- (match_operand:HI 2 "general_operand" "cI")))]
+ (match_operand:HI 2 "nonmemory_operand" "cI")))]
""
"*
{
(define_insn "ashrqi3"
[(set (match_operand:QI 0 "general_operand" "=qm")
(ashiftrt:QI (match_operand:QI 1 "general_operand" "0")
- (match_operand:QI 2 "general_operand" "cI")))]
+ (match_operand:QI 2 "nonmemory_operand" "cI")))]
""
"*
{
;; See comment above `ashldi3' about how this works.
-(define_insn "lshrdi3"
- [(set (match_operand:DI 0 "general_operand" "=&r")
- (lshiftrt:DI (match_operand:DI 1 "general_operand" "0")
- (match_operand:QI 2 "general_operand" "cJ")))
- (clobber (match_dup 2))]
+(define_expand "lshrdi3"
+ [(set (match_operand:DI 0 "register_operand" "")
+ (lshiftrt:DI (match_operand:DI 1 "register_operand" "")
+ (match_operand:QI 2 "nonmemory_operand" "")))]
+ ""
+ "
+{
+ if (! GET_CODE (operands[2]) == CONST_INT
+ || ! CONST_OK_FOR_LETTER_P (INTVAL (operands[2]), 'J'))
+ {
+ operands[2] = copy_to_mode_reg (QImode, operands[2]);
+ emit_insn (gen_lshrdi3_non_const_int (operands[0], operands[1],
+ operands[2]));
+ }
+ else
+ emit_insn (gen_lshrdi3_const_int (operands[0], operands[1], operands[2]));
+
+ DONE;
+}")
+
+(define_insn "lshrdi3_const_int"
+ [(set (match_operand:DI 0 "register_operand" "=&r")
+ (lshiftrt:DI (match_operand:DI 1 "register_operand" "0")
+ (match_operand:QI 2 "const_int_operand" "J")))]
""
"*
{
- rtx xops[5], low[1], high[1];
+ rtx xops[4], low[1], high[1];
CC_STATUS_INIT;
xops[2] = low[0];
xops[3] = high[0];
- if (REG_P (xops[0])) /* If shift count in %cl */
+ if (INTVAL (xops[0]) > 31)
{
- output_asm_insn (AS2 (ror%B0,%1,%0), xops); /* shift count / 2 */
+ output_asm_insn (AS2 (mov%L2,%3,%2), xops); /* Fast shift by 32 */
+ output_asm_insn (AS2 (xor%L3,%3,%3), xops);
- output_asm_insn (AS2 (shrd%L2,%3,%2), xops);
- output_asm_insn (AS2 (shr%L3,%0,%3), xops);
- output_asm_insn (AS2 (shrd%L2,%3,%2), xops);
+ if (INTVAL (xops[0]) > 32)
+ {
+ xops[0] = gen_rtx (CONST_INT, VOIDmode, INTVAL (xops[0]) - 32);
+
+ output_asm_insn (AS2 (shr%2,%0,%2), xops); /* Remaining shift */
+ }
+ }
+ else
+ {
+ output_asm_insn (AS3 (shrd%L2,%0,%3,%2), xops);
output_asm_insn (AS2 (shr%L3,%0,%3), xops);
+ }
- xops[1] = gen_rtx (CONST_INT, VOIDmode, 7); /* shift count & 1 */
+ RET;
+}")
- output_asm_insn (AS2 (shr%B0,%1,%0), xops);
+(define_insn "lshrdi3_non_const_int"
+ [(set (match_operand:DI 0 "register_operand" "=&r")
+ (lshiftrt:DI (match_operand:DI 1 "register_operand" "0")
+ (match_operand:QI 2 "register_operand" "c")))
+ (clobber (match_dup 2))]
+ ""
+ "*
+{
+ rtx xops[4], low[1], high[1];
- output_asm_insn (AS2 (shrd%L2,%3,%2), xops);
- output_asm_insn (AS2 (shr%L3,%0,%3), xops);
- }
- else if (GET_CODE (xops[0]) == CONST_INT)
- {
- if (INTVAL (xops[0]) > 31)
- {
- output_asm_insn (AS2 (mov%L2,%3,%2), xops); /* Fast shift by 32 */
- output_asm_insn (AS2 (xor%L3,%3,%3), xops);
+ CC_STATUS_INIT;
- if (INTVAL (xops[0]) > 32)
- {
- xops[0] = gen_rtx (CONST_INT, VOIDmode, INTVAL (xops[0]) - 32);
+ split_di (operands, 1, low, high);
+ xops[0] = operands[2];
+ xops[1] = const1_rtx;
+ xops[2] = low[0];
+ xops[3] = high[0];
+
+ output_asm_insn (AS2 (ror%B0,%1,%0), xops); /* shift count / 2 */
+
+ output_asm_insn (AS3_SHIFT_DOUBLE (shrd%L2,%0,%3,%2), xops);
+ output_asm_insn (AS2 (shr%L3,%0,%3), xops);
+ output_asm_insn (AS3_SHIFT_DOUBLE (shrd%L2,%0,%3,%2), xops);
+ output_asm_insn (AS2 (shr%L3,%0,%3), xops);
+
+ xops[1] = gen_rtx (CONST_INT, VOIDmode, 7); /* shift count & 1 */
+
+ output_asm_insn (AS2 (shr%B0,%1,%0), xops);
+
+ output_asm_insn (AS3_SHIFT_DOUBLE (shrd%L2,%0,%3,%2), xops);
+ output_asm_insn (AS2 (shr%L3,%0,%3), xops);
- output_asm_insn (AS2 (shr%L2,%0,%2), xops); /* Remaining shift */
- }
- }
- else
- {
- output_asm_insn (AS3 (shrd%L2,%0,%3,%2), xops);
- output_asm_insn (AS2 (shr%L3,%0,%3), xops);
- }
- }
RET;
}")
(define_insn "lshrsi3"
[(set (match_operand:SI 0 "general_operand" "=rm")
(lshiftrt:SI (match_operand:SI 1 "general_operand" "0")
- (match_operand:SI 2 "general_operand" "cI")))]
+ (match_operand:SI 2 "nonmemory_operand" "cI")))]
""
"*
{
(define_insn "lshrhi3"
[(set (match_operand:HI 0 "general_operand" "=rm")
(lshiftrt:HI (match_operand:HI 1 "general_operand" "0")
- (match_operand:HI 2 "general_operand" "cI")))]
+ (match_operand:HI 2 "nonmemory_operand" "cI")))]
""
"*
{
(define_insn "lshrqi3"
[(set (match_operand:QI 0 "general_operand" "=qm")
(lshiftrt:QI (match_operand:QI 1 "general_operand" "0")
- (match_operand:QI 2 "general_operand" "cI")))]
+ (match_operand:QI 2 "nonmemory_operand" "cI")))]
""
"*
{
(define_insn "rotlsi3"
[(set (match_operand:SI 0 "general_operand" "=rm")
(rotate:SI (match_operand:SI 1 "general_operand" "0")
- (match_operand:SI 2 "general_operand" "cI")))]
+ (match_operand:SI 2 "nonmemory_operand" "cI")))]
""
"*
{
(define_insn "rotlhi3"
[(set (match_operand:HI 0 "general_operand" "=rm")
(rotate:HI (match_operand:HI 1 "general_operand" "0")
- (match_operand:HI 2 "general_operand" "cI")))]
+ (match_operand:HI 2 "nonmemory_operand" "cI")))]
""
"*
{
(define_insn "rotlqi3"
[(set (match_operand:QI 0 "general_operand" "=qm")
(rotate:QI (match_operand:QI 1 "general_operand" "0")
- (match_operand:QI 2 "general_operand" "cI")))]
+ (match_operand:QI 2 "nonmemory_operand" "cI")))]
""
"*
{
(define_insn "rotrsi3"
[(set (match_operand:SI 0 "general_operand" "=rm")
(rotatert:SI (match_operand:SI 1 "general_operand" "0")
- (match_operand:SI 2 "general_operand" "cI")))]
+ (match_operand:SI 2 "nonmemory_operand" "cI")))]
""
"*
{
(define_insn "rotrhi3"
[(set (match_operand:HI 0 "general_operand" "=rm")
(rotatert:HI (match_operand:HI 1 "general_operand" "0")
- (match_operand:HI 2 "general_operand" "cI")))]
+ (match_operand:HI 2 "nonmemory_operand" "cI")))]
""
"*
{
(define_insn "rotrqi3"
[(set (match_operand:QI 0 "general_operand" "=qm")
(rotatert:QI (match_operand:QI 1 "general_operand" "0")
- (match_operand:QI 2 "general_operand" "cI")))]
+ (match_operand:QI 2 "nonmemory_operand" "cI")))]
""
"*
{
[(set (zero_extract:SI (match_operand:SI 0 "general_operand" "+rm")
(const_int 1)
(match_operand:SI 2 "general_operand" "r"))
- (match_operand:SI 3 "immediate_operand" "i"))]
+ (match_operand:SI 3 "const_int_operand" "n"))]
"! TARGET_486 && GET_CODE (operands[2]) != CONST_INT"
"*
{
"nop")
(define_expand "movstrsi"
- [(parallel [(set (mem:BLK (match_operand:BLK 0 "general_operand" ""))
- (mem:BLK (match_operand:BLK 1 "general_operand" "")))
- (use (match_operand:SI 2 "immediate_operand" ""))
- (use (match_operand:SI 3 "immediate_operand" ""))
- (set (match_operand:SI 4 "register_operand" "")
- (const_int 0))
- (set (match_dup 0)
- (plus:SI (match_dup 0)
- (match_dup 2)))
- (set (match_dup 1)
- (plus:SI (match_dup 1)
- (match_dup 2)))])]
+ [(parallel [(set (mem:BLK (match_operand:BLK 0 "address_operand" ""))
+ (mem:BLK (match_operand:BLK 1 "address_operand" "")))
+ (use (match_operand:SI 2 "const_int_operand" ""))
+ (use (match_operand:SI 3 "const_int_operand" ""))
+ (clobber (match_scratch:SI 4 ""))
+ (clobber (match_dup 0))
+ (clobber (match_dup 1))])]
""
"
{
FAIL;
operands[0] = copy_to_mode_reg (SImode, XEXP (operands[0], 0));
operands[1] = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
- operands[4] = gen_reg_rtx (SImode);
}")
+;; It might seem that operands 0 & 1 could use predicate register_operand.
+;; But strength reduction might offset the MEM expression. So we let
+;; reload put the address into %edi & %esi.
+
(define_insn ""
- [(set (mem:BLK (match_operand:SI 0 "register_operand" "D"))
- (mem:BLK (match_operand:SI 1 "register_operand" "S")))
- (use (match_operand:SI 2 "immediate_operand" "n"))
+ [(set (mem:BLK (match_operand:SI 0 "address_operand" "D"))
+ (mem:BLK (match_operand:SI 1 "address_operand" "S")))
+ (use (match_operand:SI 2 "const_int_operand" "n"))
(use (match_operand:SI 3 "immediate_operand" "i"))
- (set (match_operand:SI 4 "register_operand" "c")
- (const_int 0))
- (set (match_operand:SI 5 "register_operand" "=0")
- (plus:SI (match_dup 0)
- (match_dup 2)))
- (set (match_operand:SI 7 "register_operand" "=1")
- (plus:SI (match_dup 1)
- (match_dup 2)))]
+ (clobber (match_scratch:SI 4 "=&c"))
+ (clobber (match_dup 0))
+ (clobber (match_dup 1))]
""
"*
{
if (INTVAL (operands[2]) & ~0x03)
{
xops[0] = gen_rtx (CONST_INT, VOIDmode, INTVAL (operands[2]) >> 2);
- xops[1] = gen_rtx (REG, SImode, 2);
+ xops[1] = operands[4];
output_asm_insn (AS2 (mov%L1,%0,%1), xops);
#ifdef INTEL_SYNTAX
(define_expand "cmpstrsi"
[(parallel [(set (match_operand:QI 0 "general_operand" "")
(compare:CC
- (mem:BLK (match_operand:BLK 1 "general_operand" ""))
- (mem:BLK (match_operand:BLK 2 "general_operand" ""))))
+ (mem:BLK (match_operand:BLK 1 "address_operand" ""))
+ (mem:BLK (match_operand:BLK 2 "address_operand" ""))))
(use (match_operand:SI 3 "general_operand" ""))
(use (match_operand:SI 4 "immediate_operand" ""))
(clobber (match_dup 1))
;; memcmp recognizers. The `cmpsb' opcode does nothing if the count is
;; zero. Emit extra code to make sure that a zero-length compare is EQ.
+;; It might seem that operands 0 & 1 could use predicate register_operand.
+;; But strength reduction might offset the MEM expression. So we let
+;; reload put the address into %edi & %esi.
+
;; ??? Most comparisons have a constant length, and it's therefore
;; possible to know that the length is non-zero, and to avoid the extra
;; code to handle zero-length compares.
(define_insn ""
[(set (match_operand:QI 0 "general_operand" "=&q")
- (compare:CC (mem:BLK (match_operand:SI 1 "general_operand" "S"))
- (mem:BLK (match_operand:SI 2 "general_operand" "D"))))
- (use (match_operand:SI 3 "general_operand" "c"))
+ (compare:CC (mem:BLK (match_operand:SI 1 "address_operand" "S"))
+ (mem:BLK (match_operand:SI 2 "address_operand" "D"))))
+ (use (match_operand:SI 3 "register_operand" "c"))
(use (match_operand:SI 4 "immediate_operand" "i"))
(clobber (match_dup 1))
(clobber (match_dup 2))
(define_insn ""
[(set (cc0)
- (compare:CC (mem:BLK (match_operand:SI 0 "general_operand" "S"))
- (mem:BLK (match_operand:SI 1 "general_operand" "D"))))
- (use (match_operand:SI 2 "general_operand" "c"))
+ (compare:CC (mem:BLK (match_operand:SI 0 "address_operand" "S"))
+ (mem:BLK (match_operand:SI 1 "address_operand" "D"))))
+ (use (match_operand:SI 2 "register_operand" "c"))
(use (match_operand:SI 3 "immediate_operand" "i"))
(clobber (match_dup 0))
(clobber (match_dup 1))
operands[5] = gen_reg_rtx (SImode);
}")
+;; It might seem that operands 0 & 1 could use predicate register_operand.
+;; But strength reduction might offset the MEM expression. So we let
+;; reload put the address into %edi & %esi.
+
(define_insn ""
[(set (match_operand:SI 0 "register_operand" "=&c")
- (unspec:SI [(mem:BLK (match_operand:SI 1 "register_operand" "D"))
+ (unspec:SI [(mem:BLK (match_operand:SI 1 "address_operand" "D"))
(match_operand:QI 2 "register_operand" "a")
(match_operand:SI 3 "immediate_operand" "i")] 0))
(clobber (match_dup 1))]