This is the mail archive of the gcc-patches@gcc.gnu.org mailing list for the GCC project.


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]
Other format: [Raw text]

[PATCH 4/n, i386]: Merge and rewrite MMX move patterns using x64 and nox64 isa attribute


Hello!

Attached patch merges _rex64 MMX move patterns with base MMX move
patters using x64 and nox64 isa attribute. Additionally, the patch
rewrites MMX move patterns to look like DImove move pattern (keeping
all decorations of various alternatives), introducing all recent
improvements. The patch also handles MMX/SSE interunit moves in a
correct way for non-TARGET_INTER_UNIT_MOVES targets (this is the
reason for testsuite change).

A follow-up patch will merge V2SF move pattern.

2013-03-22  Uros Bizjak  <ubizjak@gmail.com>

	* config/i386/sse.md (*mov<mode>_internal): Merge with
	*mov<mode>_internal_rex64.  Use x64 and nox64 isa attributes.
	Emit insn template depending on type attribute.  Use
	HAVE_AS_IX86_INTERUNIT_MOVQ to handle broken assemblers that require
	movd instead of movq mnemonic for interunit moves.  Rewrite mode
	attribute calculation.  Remove unit attribute calculation.
	Set prefix attribute to maybe_vex for sselog1 and ssemov types.
	Set prefix_data16 attribute for DImode ssemov types.
	Use Ym instead of y for SSE-MMX conversion alternatives.
	Reorder operand constraints.

testsuite/ChangeLog:

2013-03-22  Uros Bizjak  <ubizjak@gmail.com>

	* gcc.target/i386/pr22152.c (dg-options): Add -mtune=core2.

Patch was tested on x86_64-pc-linux-gnu {,-m32} and committed to mainline.

Uros.
Index: config/i386/mmx.md
===================================================================
--- config/i386/mmx.md	(revision 196970)
+++ config/i386/mmx.md	(working copy)
@@ -76,129 +76,127 @@
   DONE;
 })
 
-;; movd instead of movq is required to handle broken assemblers.
-(define_insn "*mov<mode>_internal_rex64"
+(define_insn "*mov<mode>_internal"
   [(set (match_operand:MMXMODEI8 0 "nonimmediate_operand"
-	 "=rm,r,!?y,!y,!?y,m  ,!y ,*x,x,x ,m,r ,Yi")
+	 "=r ,o ,r,r ,m ,!?y,!y,!?y,m  ,x,x,x,m,*x,*x,*x,m ,r ,Yi,!Ym,*Yi")
 	(match_operand:MMXMODEI8 1 "vector_move_operand"
-	 "Cr ,m,C  ,!y,m  ,!?y,*x,!y ,C,xm,x,Yi,r"))]
-  "TARGET_64BIT && TARGET_MMX
+	 "rCo,rC,C,rm,rC,C  ,!y,m  ,!?y,C,x,m,x,C ,*x,m ,*x,Yi,r ,*Yi,!Ym"))]
+  "TARGET_MMX
    && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
-  "@
-    mov{q}\t{%1, %0|%0, %1}
-    mov{q}\t{%1, %0|%0, %1}
-    pxor\t%0, %0
-    movq\t{%1, %0|%0, %1}
-    movq\t{%1, %0|%0, %1}
-    movq\t{%1, %0|%0, %1}
-    movdq2q\t{%1, %0|%0, %1}
-    movq2dq\t{%1, %0|%0, %1}
-    %vpxor\t%0, %d0
-    %vmovq\t{%1, %0|%0, %1}
-    %vmovq\t{%1, %0|%0, %1}
-    %vmovd\t{%1, %0|%0, %1}
-    %vmovd\t{%1, %0|%0, %1}"
-  [(set (attr "type")
+{
+  switch (get_attr_type (insn))
+    {
+    case TYPE_MULTI:
+      return "#";
+
+    case TYPE_IMOV:
+      if (get_attr_mode (insn) == MODE_SI)
+	return "mov{l}\t{%1, %k0|%k0, %1}";
+      else
+	return "mov{q}\t{%1, %0|%0, %1}";
+
+    case TYPE_MMX:
+      return "pxor\t%0, %0";
+
+    case TYPE_MMXMOV:
+#ifndef HAVE_AS_IX86_INTERUNIT_MOVQ
+      /* Handle broken assemblers that require movd instead of movq.  */
+      if (GENERAL_REG_P (operands[0]) || GENERAL_REG_P (operands[1]))
+	return "movd\t{%1, %0|%0, %1}";
+#endif
+      return "movq\t{%1, %0|%0, %1}";
+
+    case TYPE_SSECVT:
+      if (SSE_REG_P (operands[0]))
+	return "movq2dq\t{%1, %0|%0, %1}";
+      else
+	return "movdq2q\t{%1, %0|%0, %1}";
+
+    case TYPE_SSELOG1:
+      return standard_sse_constant_opcode (insn, operands[1]);
+
+    case TYPE_SSEMOV:
+      switch (get_attr_mode (insn))
+	{
+	case MODE_DI:
+#ifndef HAVE_AS_IX86_INTERUNIT_MOVQ
+	  /* Handle broken assemblers that require movd instead of movq.  */
+	  if (GENERAL_REG_P (operands[0]) || GENERAL_REG_P (operands[1]))
+	    return "%vmovd\t{%1, %0|%0, %1}";
+#endif
+	  return "%vmovq\t{%1, %0|%0, %1}";
+	case MODE_TI:
+	  return "%vmovdqa\t{%1, %0|%0, %1}";
+
+	case MODE_V2SF:
+	  gcc_assert (!TARGET_AVX);
+	  return "movlps\t{%1, %0|%0, %1}";
+	case MODE_V4SF:
+	  return "%vmovaps\t{%1, %0|%0, %1}";
+
+	default:
+	  gcc_unreachable ();
+	}
+
+    default:
+      gcc_unreachable ();
+    }
+}
+  [(set (attr "isa")
      (cond [(eq_attr "alternative" "0,1")
+	      (const_string "nox64")
+	    (eq_attr "alternative" "2,3,4,9,10,11,12,17,18")
+	      (const_string "x64")
+	   ]
+	   (const_string "*")))
+   (set (attr "type")
+     (cond [(eq_attr "alternative" "0,1")
+	      (const_string "multi")
+	    (eq_attr "alternative" "2,3,4")
 	      (const_string "imov")
-	    (eq_attr "alternative" "2")
+	    (eq_attr "alternative" "5")
 	      (const_string "mmx")
-	    (eq_attr "alternative" "3,4,5")
+	    (eq_attr "alternative" "6,7,8")
 	      (const_string "mmxmov")
-	    (eq_attr "alternative" "6,7")
+	    (eq_attr "alternative" "9,13")
+	      (const_string "sselog1")
+	    (eq_attr "alternative" "19,20")
 	      (const_string "ssecvt")
-	    (eq_attr "alternative" "8")
-	      (const_string "sselog1")
 	   ]
 	   (const_string "ssemov")))
-   (set (attr "unit")
-     (if_then_else (eq_attr "alternative" "6,7")
-       (const_string "mmx")
-       (const_string "*")))
-   (set (attr "prefix_rep")
-     (if_then_else (eq_attr "alternative" "6,7,9")
+   (set (attr "prefix_rex")
+     (if_then_else (eq_attr "alternative" "17,18")
        (const_string "1")
        (const_string "*")))
-   (set (attr "prefix_data16")
-     (if_then_else (eq_attr "alternative" "10,11,12")
-       (const_string "1")
-       (const_string "*")))
-   (set (attr "prefix_rex")
-     (if_then_else (eq_attr "alternative" "9,10")
-       (symbol_ref "x86_extended_reg_mentioned_p (insn)")
-       (const_string "*")))
    (set (attr "prefix")
-     (if_then_else (eq_attr "alternative" "8,9,10,11,12")
+     (if_then_else (eq_attr "type" "sselog1,ssemov")
        (const_string "maybe_vex")
        (const_string "orig")))
-   (set_attr "mode" "DI")])
-
-(define_insn "*mov<mode>_internal"
-  [(set (match_operand:MMXMODEI8 0 "nonimmediate_operand"
-	 "=!?y,!y,!?y,m  ,!y,*x,*x,*x ,m ,*x,*x,*x,m ,r  ,m")
-	(match_operand:MMXMODEI8 1 "vector_move_operand"
-	 "C   ,!y,m  ,!?y,*x,!y,C ,*xm,*x,C ,*x,m ,*x,irm,r"))]
-  "!TARGET_64BIT && TARGET_MMX
-   && !(MEM_P (operands[0]) && MEM_P (operands[1]))"
-  "@
-    pxor\t%0, %0
-    movq\t{%1, %0|%0, %1}
-    movq\t{%1, %0|%0, %1}
-    movq\t{%1, %0|%0, %1}
-    movdq2q\t{%1, %0|%0, %1}
-    movq2dq\t{%1, %0|%0, %1}
-    %vpxor\t%0, %d0
-    %vmovq\t{%1, %0|%0, %1}
-    %vmovq\t{%1, %0|%0, %1}
-    xorps\t%0, %0
-    movaps\t{%1, %0|%0, %1}
-    movlps\t{%1, %0|%0, %1}
-    movlps\t{%1, %0|%0, %1}
-    #
-    #"
-  [(set (attr "isa")
-     (cond [(eq_attr "alternative" "4,5,6,7,8")
-	      (const_string "sse2")
-	    (eq_attr "alternative" "9,10,11,12")
-	      (const_string "noavx")
-	   ]
-           (const_string "*")))
-   (set (attr "type")
-     (cond [(eq_attr "alternative" "0")
-	      (const_string "mmx")
-	    (eq_attr "alternative" "1,2,3")
-	      (const_string "mmxmov")
-	    (eq_attr "alternative" "4,5")
-	      (const_string "ssecvt")
-	    (eq_attr "alternative" "6,9")
-	      (const_string "sselog1")
-	    (eq_attr "alternative" "13,14")
-	      (const_string "multi")
-	   ]
-	   (const_string "ssemov")))
-   (set (attr "unit")
-     (if_then_else (eq_attr "alternative" "4,5")
-       (const_string "mmx")
-       (const_string "*")))
-   (set (attr "prefix_rep")
-     (if_then_else
-       (ior (eq_attr "alternative" "4,5")
-	    (and (eq_attr "alternative" "7")
-		 (not (match_test "TARGET_AVX"))))
-       (const_string "1")
-       (const_string "*")))
    (set (attr "prefix_data16")
      (if_then_else
-       (and (eq_attr "alternative" "8")
-	    (not (match_test "TARGET_AVX")))
+       (and (eq_attr "type" "ssemov") (eq_attr "mode" "DI"))
        (const_string "1")
        (const_string "*")))
-   (set (attr "prefix")
-     (if_then_else (eq_attr "alternative" "6,7,8")
-       (const_string "maybe_vex")
-       (const_string "orig")))
-   (set_attr "mode" "DI,DI,DI,DI,DI,DI,TI,DI,DI,V4SF,V4SF,V2SF,V2SF,DI,DI")])
+   (set (attr "mode")
+     (cond [(eq_attr "alternative" "2")
+	      (const_string "SI")
+	    (eq_attr "alternative" "9,10,13,14")
+	      (cond [(ior (not (match_test "TARGET_SSE2"))
+			  (match_test "TARGET_SSE_PACKED_SINGLE_INSN_OPTIMAL"))
+		       (const_string "V4SF")
+		     (match_test "TARGET_AVX")
+		       (const_string "TI")
+		     (match_test "optimize_function_for_size_p (cfun)")
+		       (const_string "V4SF")
+		    ]
+		    (const_string "TI"))
 
+	    (and (eq_attr "alternative" "11,12,15,16")
+		 (not (match_test "TARGET_SSE2")))
+	      (const_string "V2SF")
+	   ]
+	   (const_string "DI")))])
+
 (define_expand "movv2sf"
   [(set (match_operand:V2SF 0 "nonimmediate_operand")
 	(match_operand:V2SF 1 "nonimmediate_operand"))]
Index: testsuite/gcc.target/i386/pr22152.c
===================================================================
--- testsuite/gcc.target/i386/pr22152.c	(revision 196970)
+++ testsuite/gcc.target/i386/pr22152.c	(working copy)
@@ -1,6 +1,6 @@
 /* { dg-do compile } */
-/* { dg-options "-O2 -msse2" } */
-/* { dg-options "-O2 -msse2 -mno-vect8-ret-in-mem" { target i?86-*-solaris2.9 *-*-vxworks* } } */
+/* { dg-options "-O2 -msse2 -mtune=core2" } */
+/* { dg-additional-options "-mno-vect8-ret-in-mem" { target i?86-*-solaris2.9 *-*-vxworks* } } */
 /* { dg-additional-options "-mabi=sysv" { target x86_64-*-mingw* } } */
 
 #include <mmintrin.h>

Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]