[PATCH, rs6000] Fix PR83789: __builtin_altivec_lvx fails for powerpc for altivec-4.c

Peter Bergner bergner@vnet.ibm.com
Sun Mar 11 15:23:00 GMT 2018


PR83789 shows a problem in the builtin expansion code not calling the correct
define_insn, given the correct mode (32-bit versus 64-bit).  One could add
tests in this code to call the correct pattern, but it's easier to create
a common define_expand which everyone can call that does the right thing.
This allows us to clean up all the callers, making for much simpler code.
This also fixes the issue that Segher mentioned that this needs fixing for
multiple other vector modes and not just the one mentioned in the bugzilla.

This passed bootstrap and regtesting on powerpc64le-linux and powerpc64-linux
(running testsuite in both 32-bit and 64-bit modes) with no regressions.
Ok for trunk?

I was not able to reproduce the failure reported in the bugzilla, but Kaushik
confirmed that this patch fixes the ICE.

P.S.  I will be away on vacation for the neek week, so if this is ok, I won't
      be able to commit the patch until I return.  Unless you want to commit
      it Segher and watch for fallout.  It's up to you.

Peter

	PR target/83789
	* config/rs6000/altivec.md (altivec_lvx_<mode>_2op): Delete define_insn.
	(altivec_lvx_<mode>_1op): Likewise.
	(altivec_stvx_<mode>_2op): Likewise.
	(altivec_stvx_<mode>_1op): Likewise.
	(altivec_lvx_<VM2:mode>): New define_expand.
	(altivec_stvx_<VM2:mode>): Likewise.
	(altivec_lvx_<VM2:mode>_2op_<P:mptrsize>): New define_insn.
	(altivec_lvx_<VM2:mode>_1op_<P:mptrsize>): Likewise.
	(altivec_stvx_<VM2:mode>_2op_<P:mptrsize>): Likewise.
	(altivec_stvx_<VM2:mode>_1op_<P:mptrsize>): Likewise.
	* config/rs6000/rs6000-p8swap.c (rs6000_gen_stvx): Use new expanders.
	(rs6000_gen_lvx): Likewise.
	* config/rs6000/rs6000.c (altivec_expand_lv_builtin): Likewise.
	(altivec_expand_stv_builtin): Likewise.
	(altivec_expand_builtin): Likewise.
	* config/rs6000/vector.md: Likewise.

Index: gcc/config/rs6000/altivec.md
===================================================================
--- gcc/config/rs6000/altivec.md	(revision 258348)
+++ gcc/config/rs6000/altivec.md	(working copy)
@@ -2747,39 +2747,47 @@ (define_insn "altivec_lvx_<mode>_interna
   "lvx %0,%y1"
   [(set_attr "type" "vecload")])
 
-; The next two patterns embody what lvx should usually look like.
-(define_insn "altivec_lvx_<mode>_2op"
-  [(set (match_operand:VM2 0 "register_operand" "=v")
-        (mem:VM2 (and:DI (plus:DI (match_operand:DI 1 "register_operand" "b")
-                                  (match_operand:DI 2 "register_operand" "r"))
-		         (const_int -16))))]
-  "TARGET_ALTIVEC && TARGET_64BIT"
-  "lvx %0,%1,%2"
-  [(set_attr "type" "vecload")])
-
-(define_insn "altivec_lvx_<mode>_1op"
-  [(set (match_operand:VM2 0 "register_operand" "=v")
-        (mem:VM2 (and:DI (match_operand:DI 1 "register_operand" "r")
-			 (const_int -16))))]
-  "TARGET_ALTIVEC && TARGET_64BIT"
-  "lvx %0,0,%1"
-  [(set_attr "type" "vecload")])
+; The following patterns embody what lvx should usually look like.
+(define_expand "altivec_lvx_<VM2:mode>"
+  [(set (match_operand:VM2 0 "register_operand" "")
+	(match_operand:VM2 1 "altivec_indexed_or_indirect_operand" ""))]
+  "TARGET_ALTIVEC"
+{
+  rtx addr = XEXP (operand1, 0);
+  if (rs6000_sum_of_two_registers_p (addr))
+    {
+      rtx op1 = XEXP (addr, 0);
+      rtx op2 = XEXP (addr, 1);
+      if (TARGET_64BIT)
+	emit_insn (gen_altivec_lvx_<VM2:mode>_2op_di (operand0, op1, op2));
+      else
+	emit_insn (gen_altivec_lvx_<VM2:mode>_2op_si (operand0, op1, op2));
+    }
+  else
+    {
+      if (TARGET_64BIT)
+	emit_insn (gen_altivec_lvx_<VM2:mode>_1op_di (operand0, addr));
+      else
+	emit_insn (gen_altivec_lvx_<VM2:mode>_1op_si (operand0, addr));
+    }
+  DONE;
+})
 
-; 32-bit versions of the above.
-(define_insn "altivec_lvx_<mode>_2op_si"
+; The next two patterns embody what lvx should usually look like.
+(define_insn "altivec_lvx_<VM2:mode>_2op_<P:mptrsize>"
   [(set (match_operand:VM2 0 "register_operand" "=v")
-        (mem:VM2 (and:SI (plus:SI (match_operand:SI 1 "register_operand" "b")
-                                  (match_operand:SI 2 "register_operand" "r"))
-		         (const_int -16))))]
-  "TARGET_ALTIVEC && TARGET_32BIT"
+	(mem:VM2 (and:P (plus:P (match_operand:P 1 "register_operand" "b")
+				(match_operand:P 2 "register_operand" "r"))
+			(const_int -16))))]
+  "TARGET_ALTIVEC"
   "lvx %0,%1,%2"
   [(set_attr "type" "vecload")])
 
-(define_insn "altivec_lvx_<mode>_1op_si"
+(define_insn "altivec_lvx_<VM2:mode>_1op_<P:mptrsize>"
   [(set (match_operand:VM2 0 "register_operand" "=v")
-        (mem:VM2 (and:SI (match_operand:SI 1 "register_operand" "r")
-			 (const_int -16))))]
-  "TARGET_ALTIVEC && TARGET_32BIT"
+	(mem:VM2 (and:P (match_operand:P 1 "register_operand" "r")
+			(const_int -16))))]
+  "TARGET_ALTIVEC"
   "lvx %0,0,%1"
   [(set_attr "type" "vecload")])
 
@@ -2795,39 +2803,47 @@ (define_insn "altivec_stvx_<mode>_intern
   "stvx %1,%y0"
   [(set_attr "type" "vecstore")])
 
-; The next two patterns embody what stvx should usually look like.
-(define_insn "altivec_stvx_<mode>_2op"
-  [(set (mem:VM2 (and:DI (plus:DI (match_operand:DI 1 "register_operand" "b")
-  	                          (match_operand:DI 2 "register_operand" "r"))
-	                 (const_int -16)))
-        (match_operand:VM2 0 "register_operand" "v"))]
-  "TARGET_ALTIVEC && TARGET_64BIT"
-  "stvx %0,%1,%2"
-  [(set_attr "type" "vecstore")])
-
-(define_insn "altivec_stvx_<mode>_1op"
-  [(set (mem:VM2 (and:DI (match_operand:DI 1 "register_operand" "r")
-	                 (const_int -16)))
-        (match_operand:VM2 0 "register_operand" "v"))]
-  "TARGET_ALTIVEC && TARGET_64BIT"
-  "stvx %0,0,%1"
-  [(set_attr "type" "vecstore")])
+; The following patterns embody what stvx should usually look like.
+(define_expand "altivec_stvx_<VM2:mode>"
+  [(set (match_operand:VM2 1 "altivec_indexed_or_indirect_operand" "")
+	(match_operand:VM2 0 "register_operand" ""))]
+  "TARGET_ALTIVEC"
+{
+  rtx addr = XEXP (operand1, 0);
+  if (rs6000_sum_of_two_registers_p (addr))
+    {
+      rtx op1 = XEXP (addr, 0);
+      rtx op2 = XEXP (addr, 1);
+      if (TARGET_64BIT)
+	emit_insn (gen_altivec_stvx_<VM2:mode>_2op_di (operand0, op1, op2));
+      else
+	emit_insn (gen_altivec_stvx_<VM2:mode>_2op_si (operand0, op1, op2));
+    }
+  else
+    {
+      if (TARGET_64BIT)
+	emit_insn (gen_altivec_stvx_<VM2:mode>_1op_di (operand0, addr));
+      else
+	emit_insn (gen_altivec_stvx_<VM2:mode>_1op_si (operand0, addr));
+    }
+  DONE;
+})
 
-; 32-bit versions of the above.
-(define_insn "altivec_stvx_<mode>_2op_si"
-  [(set (mem:VM2 (and:SI (plus:SI (match_operand:SI 1 "register_operand" "b")
-  	                          (match_operand:SI 2 "register_operand" "r"))
-	                 (const_int -16)))
-        (match_operand:VM2 0 "register_operand" "v"))]
-  "TARGET_ALTIVEC && TARGET_32BIT"
+; The next two patterns embody what stvx should usually look like.
+(define_insn "altivec_stvx_<VM2:mode>_2op_<P:mptrsize>"
+  [(set (mem:VM2 (and:P (plus:P (match_operand:P 1 "register_operand" "b")
+				(match_operand:P 2 "register_operand" "r"))
+			(const_int -16)))
+	(match_operand:VM2 0 "register_operand" "v"))]
+  "TARGET_ALTIVEC"
   "stvx %0,%1,%2"
   [(set_attr "type" "vecstore")])
 
-(define_insn "altivec_stvx_<mode>_1op_si"
-  [(set (mem:VM2 (and:SI (match_operand:SI 1 "register_operand" "r")
-	                 (const_int -16)))
-        (match_operand:VM2 0 "register_operand" "v"))]
-  "TARGET_ALTIVEC && TARGET_32BIT"
+(define_insn "altivec_stvx_<VM2:mode>_1op_<P:mptrsize>"
+  [(set (mem:VM2 (and:P (match_operand:P 1 "register_operand" "r")
+			(const_int -16)))
+	(match_operand:VM2 0 "register_operand" "v"))]
+  "TARGET_ALTIVEC"
   "stvx %0,0,%1"
   [(set_attr "type" "vecstore")])
 
Index: gcc/config/rs6000/rs6000-p8swap.c
===================================================================
--- gcc/config/rs6000/rs6000-p8swap.c	(revision 258348)
+++ gcc/config/rs6000/rs6000-p8swap.c	(working copy)
@@ -1547,94 +1547,31 @@ mimic_memory_attributes_and_flags (rtx n
 rtx
 rs6000_gen_stvx (enum machine_mode mode, rtx dest_exp, rtx src_exp)
 {
-  rtx memory_address = XEXP (dest_exp, 0);
   rtx stvx;
 
-  if (rs6000_sum_of_two_registers_p (memory_address))
-    {
-      rtx op1, op2;
-      op1 = XEXP (memory_address, 0);
-      op2 = XEXP (memory_address, 1);
-      if (mode == V16QImode)
-	stvx = TARGET_64BIT
-	  ? gen_altivec_stvx_v16qi_2op (src_exp, op1, op2)
-	  : gen_altivec_stvx_v16qi_2op_si (src_exp, op1, op2);
-      else if (mode == V8HImode)
-	stvx = TARGET_64BIT
-	  ? gen_altivec_stvx_v8hi_2op (src_exp, op1, op2)
-	  : gen_altivec_stvx_v8hi_2op_si (src_exp, op1, op2);
-#ifdef HAVE_V8HFmode
-      else if (mode == V8HFmode)
-	stvx = TARGET_64BIT
-	  ? gen_altivec_stvx_v8hf_2op (src_exp, op1, op2)
-	  : gen_altivec_stvx_v8hf_2op_si (src_exp, op1, op2);
-#endif
-      else if (mode == V4SImode)
-	stvx = TARGET_64BIT
-	  ? gen_altivec_stvx_v4si_2op (src_exp, op1, op2)
-	  : gen_altivec_stvx_v4si_2op_si (src_exp, op1, op2);
-      else if (mode == V4SFmode)
-	stvx = TARGET_64BIT
-	  ? gen_altivec_stvx_v4sf_2op (src_exp, op1, op2)
-	  : gen_altivec_stvx_v4sf_2op_si (src_exp, op1, op2);
-      else if (mode == V2DImode)
-	stvx = TARGET_64BIT
-	  ? gen_altivec_stvx_v2di_2op (src_exp, op1, op2)
-	  : gen_altivec_stvx_v2di_2op_si (src_exp, op1, op2);
-      else if (mode == V2DFmode)
-	stvx = TARGET_64BIT
-	  ? gen_altivec_stvx_v2df_2op (src_exp, op1, op2)
-	  : gen_altivec_stvx_v2df_2op_si (src_exp, op1, op2);
-      else if (mode == V1TImode)
-	stvx = TARGET_64BIT
-	  ? gen_altivec_stvx_v1ti_2op (src_exp, op1, op2)
-	  : gen_altivec_stvx_v1ti_2op_si (src_exp, op1, op2);
-      else
-	/* KFmode, TFmode, other modes not expected in this context.  */
-	gcc_unreachable ();
-    }
-  else				/* REG_P (memory_address) */
-    {
-      if (mode == V16QImode)
-	stvx = TARGET_64BIT
-	  ? gen_altivec_stvx_v16qi_1op (src_exp, memory_address)
-	  : gen_altivec_stvx_v16qi_1op_si (src_exp, memory_address);
-      else if (mode == V8HImode)
-	stvx = TARGET_64BIT
-	  ? gen_altivec_stvx_v8hi_1op (src_exp, memory_address)
-	  : gen_altivec_stvx_v8hi_1op_si (src_exp, memory_address);
+  if (mode == V16QImode)
+    stvx = gen_altivec_stvx_v16qi (src_exp, dest_exp);
+  else if (mode == V8HImode)
+    stvx = gen_altivec_stvx_v8hi (src_exp, dest_exp);
 #ifdef HAVE_V8HFmode
-      else if (mode == V8HFmode)
-	stvx = TARGET_64BIT
-	  ? gen_altivec_stvx_v8hf_1op (src_exp, memory_address)
-	  : gen_altivec_stvx_v8hf_1op_si (src_exp, memory_address);
+  else if (mode == V8HFmode)
+    stvx = gen_altivec_stvx_v8hf (src_exp, dest_exp);
 #endif
-      else if (mode == V4SImode)
-	stvx =TARGET_64BIT
-	  ? gen_altivec_stvx_v4si_1op (src_exp, memory_address)
-	  : gen_altivec_stvx_v4si_1op_si (src_exp, memory_address);
-      else if (mode == V4SFmode)
-	stvx = TARGET_64BIT
-	  ? gen_altivec_stvx_v4sf_1op (src_exp, memory_address)
-	  : gen_altivec_stvx_v4sf_1op_si (src_exp, memory_address);
-      else if (mode == V2DImode)
-	stvx = TARGET_64BIT
-	  ? gen_altivec_stvx_v2di_1op (src_exp, memory_address)
-	  : gen_altivec_stvx_v2di_1op_si (src_exp, memory_address);
-      else if (mode == V2DFmode)
-	stvx = TARGET_64BIT
-	  ? gen_altivec_stvx_v2df_1op (src_exp, memory_address)
-	  : gen_altivec_stvx_v2df_1op_si (src_exp, memory_address);
-      else if (mode == V1TImode)
-	stvx = TARGET_64BIT
-	  ? gen_altivec_stvx_v1ti_1op (src_exp, memory_address)
-	  : gen_altivec_stvx_v1ti_1op_si (src_exp, memory_address);
-      else
-	/* KFmode, TFmode, other modes not expected in this context.  */
-	gcc_unreachable ();
-    }
+  else if (mode == V4SImode)
+    stvx = gen_altivec_stvx_v4si (src_exp, dest_exp);
+  else if (mode == V4SFmode)
+    stvx = gen_altivec_stvx_v4sf (src_exp, dest_exp);
+  else if (mode == V2DImode)
+    stvx = gen_altivec_stvx_v2di (src_exp, dest_exp);
+  else if (mode == V2DFmode)
+    stvx = gen_altivec_stvx_v2df (src_exp, dest_exp);
+  else if (mode == V1TImode)
+    stvx = gen_altivec_stvx_v1ti (src_exp, dest_exp);
+  else
+    /* KFmode, TFmode, other modes not expected in this context.  */
+    gcc_unreachable ();
 
-  rtx new_mem_exp = SET_DEST (stvx);
+  rtx new_mem_exp = SET_DEST (PATTERN (stvx));
   mimic_memory_attributes_and_flags (new_mem_exp, dest_exp);
   return stvx;
 }
@@ -1726,95 +1663,31 @@ replace_swapped_aligned_store (swap_web_
 rtx
 rs6000_gen_lvx (enum machine_mode mode, rtx dest_exp, rtx src_exp)
 {
-  rtx memory_address = XEXP (src_exp, 0);
   rtx lvx;
 
-  if (rs6000_sum_of_two_registers_p (memory_address))
-    {
-      rtx op1, op2;
-      op1 = XEXP (memory_address, 0);
-      op2 = XEXP (memory_address, 1);
-
-      if (mode == V16QImode)
-	lvx = TARGET_64BIT
-	  ? gen_altivec_lvx_v16qi_2op (dest_exp, op1, op2)
-	  : gen_altivec_lvx_v16qi_2op_si (dest_exp, op1, op2);
-      else if (mode == V8HImode)
-	lvx = TARGET_64BIT
-	  ? gen_altivec_lvx_v8hi_2op (dest_exp, op1, op2)
-	  : gen_altivec_lvx_v8hi_2op_si (dest_exp, op1, op2);
-#ifdef HAVE_V8HFmode
-      else if (mode == V8HFmode)
-	lvx = TARGET_64BIT
-	  ? gen_altivec_lvx_v8hf_2op (dest_exp, op1, op2)
-	  : gen_altivec_lvx_v8hf_2op_si (dest_exp, op1, op2);
-#endif
-      else if (mode == V4SImode)
-	lvx = TARGET_64BIT
-	  ? gen_altivec_lvx_v4si_2op (dest_exp, op1, op2)
-	  : gen_altivec_lvx_v4si_2op_si (dest_exp, op1, op2);
-      else if (mode == V4SFmode)
-	lvx = TARGET_64BIT
-	  ? gen_altivec_lvx_v4sf_2op (dest_exp, op1, op2)
-	  : gen_altivec_lvx_v4sf_2op_si (dest_exp, op1, op2);
-      else if (mode == V2DImode)
-	lvx = TARGET_64BIT
-	  ? gen_altivec_lvx_v2di_2op (dest_exp, op1, op2)
-	  : gen_altivec_lvx_v2di_2op_si (dest_exp, op1, op2);
-      else if (mode == V2DFmode)
-	lvx = TARGET_64BIT
-	  ? gen_altivec_lvx_v2df_2op (dest_exp, op1, op2)
-	  : gen_altivec_lvx_v2df_2op_si (dest_exp, op1, op2);
-      else if (mode == V1TImode)
-	lvx = TARGET_64BIT
-	  ? gen_altivec_lvx_v1ti_2op (dest_exp, op1, op2)
-	  : gen_altivec_lvx_v1ti_2op_si (dest_exp, op1, op2);
-      else
-	/* KFmode, TFmode, other modes not expected in this context.  */
-	gcc_unreachable ();
-    }
-  else				/* REG_P (memory_address) */
-    {
-      if (mode == V16QImode)
-	lvx = TARGET_64BIT
-	  ? gen_altivec_lvx_v16qi_1op (dest_exp, memory_address)
-	  : gen_altivec_lvx_v16qi_1op_si (dest_exp, memory_address);
-      else if (mode == V8HImode)
-	lvx = TARGET_64BIT
-	  ? gen_altivec_lvx_v8hi_1op (dest_exp, memory_address)
-	  : gen_altivec_lvx_v8hi_1op_si (dest_exp, memory_address);
+  if (mode == V16QImode)
+    lvx = gen_altivec_lvx_v16qi (dest_exp, src_exp);
+  else if (mode == V8HImode)
+    lvx = gen_altivec_lvx_v8hi (dest_exp, src_exp);
 #ifdef HAVE_V8HFmode
-      else if (mode == V8HFmode)
-	lvx = TARGET_64BIT
-	  ? gen_altivec_lvx_v8hf_1op (dest_exp, memory_address)
-	  : gen_altivec_lvx_v8hf_1op_si (dest_exp, memory_address);
+  else if (mode == V8HFmode)
+    lvx = gen_altivec_lvx_v8hf (dest_exp, src_exp);
 #endif
-      else if (mode == V4SImode)
-	lvx = TARGET_64BIT
-	  ? gen_altivec_lvx_v4si_1op (dest_exp, memory_address)
-	  : gen_altivec_lvx_v4si_1op_si (dest_exp, memory_address);
-      else if (mode == V4SFmode)
-	lvx = TARGET_64BIT
-	  ? gen_altivec_lvx_v4sf_1op (dest_exp, memory_address)
-	  : gen_altivec_lvx_v4sf_1op_si (dest_exp, memory_address);
-      else if (mode == V2DImode)
-	lvx = TARGET_64BIT
-	  ? gen_altivec_lvx_v2di_1op (dest_exp, memory_address)
-	  : gen_altivec_lvx_v2di_1op_si (dest_exp, memory_address);
-      else if (mode == V2DFmode)
-	lvx = TARGET_64BIT
-	  ? gen_altivec_lvx_v2df_1op (dest_exp, memory_address)
-	  : gen_altivec_lvx_v2df_1op_si (dest_exp, memory_address);
-      else if (mode == V1TImode)
-	lvx = TARGET_64BIT
-	  ? gen_altivec_lvx_v1ti_1op (dest_exp, memory_address)
-	  : gen_altivec_lvx_v1ti_1op_si (dest_exp, memory_address);
-      else
-	/* KFmode, TFmode, other modes not expected in this context.  */
-	gcc_unreachable ();
-    }
+  else if (mode == V4SImode)
+    lvx = gen_altivec_lvx_v4si (dest_exp, src_exp);
+  else if (mode == V4SFmode)
+    lvx = gen_altivec_lvx_v4sf (dest_exp, src_exp);
+  else if (mode == V2DImode)
+    lvx = gen_altivec_lvx_v2di (dest_exp, src_exp);
+  else if (mode == V2DFmode)
+    lvx = gen_altivec_lvx_v2df (dest_exp, src_exp);
+  else if (mode == V1TImode)
+    lvx = gen_altivec_lvx_v1ti (dest_exp, src_exp);
+  else
+    /* KFmode, TFmode, other modes not expected in this context.  */
+    gcc_unreachable ();
 
-  rtx new_mem_exp = SET_SRC (lvx);
+  rtx new_mem_exp = SET_SRC (PATTERN (lvx));
   mimic_memory_attributes_and_flags (new_mem_exp, src_exp);
 
   return lvx;
Index: gcc/config/rs6000/rs6000.c
===================================================================
--- gcc/config/rs6000/rs6000.c	(revision 258348)
+++ gcc/config/rs6000/rs6000.c	(working copy)
@@ -14451,12 +14451,12 @@ altivec_expand_lv_builtin (enum insn_cod
   /* For LVX, express the RTL accurately by ANDing the address with -16.
      LVXL and LVE*X expand to use UNSPECs to hide their special behavior,
      so the raw address is fine.  */
-  if (icode == CODE_FOR_altivec_lvx_v2df_2op
-      || icode == CODE_FOR_altivec_lvx_v2di_2op
-      || icode == CODE_FOR_altivec_lvx_v4sf_2op
-      || icode == CODE_FOR_altivec_lvx_v4si_2op
-      || icode == CODE_FOR_altivec_lvx_v8hi_2op
-      || icode == CODE_FOR_altivec_lvx_v16qi_2op)
+  if (icode == CODE_FOR_altivec_lvx_v2df
+      || icode == CODE_FOR_altivec_lvx_v2di
+      || icode == CODE_FOR_altivec_lvx_v4sf
+      || icode == CODE_FOR_altivec_lvx_v4si
+      || icode == CODE_FOR_altivec_lvx_v8hi
+      || icode == CODE_FOR_altivec_lvx_v16qi)
     {
       rtx rawaddr;
       if (op0 == const0_rtx)
@@ -14609,12 +14609,12 @@ altivec_expand_stv_builtin (enum insn_co
   /* For STVX, express the RTL accurately by ANDing the address with -16.
      STVXL and STVE*X expand to use UNSPECs to hide their special behavior,
      so the raw address is fine.  */
-  if (icode == CODE_FOR_altivec_stvx_v2df_2op
-      || icode == CODE_FOR_altivec_stvx_v2di_2op
-      || icode == CODE_FOR_altivec_stvx_v4sf_2op
-      || icode == CODE_FOR_altivec_stvx_v4si_2op
-      || icode == CODE_FOR_altivec_stvx_v8hi_2op
-      || icode == CODE_FOR_altivec_stvx_v16qi_2op)
+  if (icode == CODE_FOR_altivec_stvx_v2df
+      || icode == CODE_FOR_altivec_stvx_v2di
+      || icode == CODE_FOR_altivec_stvx_v4sf
+      || icode == CODE_FOR_altivec_stvx_v4si
+      || icode == CODE_FOR_altivec_stvx_v8hi
+      || icode == CODE_FOR_altivec_stvx_v16qi)
     {
       if (op1 == const0_rtx)
 	rawaddr = op2;
@@ -15524,18 +15524,18 @@ altivec_expand_builtin (tree exp, rtx ta
   switch (fcode)
     {
     case ALTIVEC_BUILTIN_STVX_V2DF:
-      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx_v2df_2op, exp);
+      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx_v2df, exp);
     case ALTIVEC_BUILTIN_STVX_V2DI:
-      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx_v2di_2op, exp);
+      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx_v2di, exp);
     case ALTIVEC_BUILTIN_STVX_V4SF:
-      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx_v4sf_2op, exp);
+      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx_v4sf, exp);
     case ALTIVEC_BUILTIN_STVX:
     case ALTIVEC_BUILTIN_STVX_V4SI:
-      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx_v4si_2op, exp);
+      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx_v4si, exp);
     case ALTIVEC_BUILTIN_STVX_V8HI:
-      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx_v8hi_2op, exp);
+      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx_v8hi, exp);
     case ALTIVEC_BUILTIN_STVX_V16QI:
-      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx_v16qi_2op, exp);
+      return altivec_expand_stv_builtin (CODE_FOR_altivec_stvx_v16qi, exp);
     case ALTIVEC_BUILTIN_STVEBX:
       return altivec_expand_stv_builtin (CODE_FOR_altivec_stvebx, exp);
     case ALTIVEC_BUILTIN_STVEHX:
@@ -15806,23 +15806,23 @@ altivec_expand_builtin (tree exp, rtx ta
       return altivec_expand_lv_builtin (CODE_FOR_altivec_lvxl_v16qi,
 					exp, target, false);
     case ALTIVEC_BUILTIN_LVX_V2DF:
-      return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx_v2df_2op,
+      return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx_v2df,
 					exp, target, false);
     case ALTIVEC_BUILTIN_LVX_V2DI:
-      return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx_v2di_2op,
+      return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx_v2di,
 					exp, target, false);
     case ALTIVEC_BUILTIN_LVX_V4SF:
-      return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx_v4sf_2op,
+      return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx_v4sf,
 					exp, target, false);
     case ALTIVEC_BUILTIN_LVX:
     case ALTIVEC_BUILTIN_LVX_V4SI:
-      return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx_v4si_2op,
+      return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx_v4si,
 					exp, target, false);
     case ALTIVEC_BUILTIN_LVX_V8HI:
-      return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx_v8hi_2op,
+      return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx_v8hi,
 					exp, target, false);
     case ALTIVEC_BUILTIN_LVX_V16QI:
-      return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx_v16qi_2op,
+      return altivec_expand_lv_builtin (CODE_FOR_altivec_lvx_v16qi,
 					exp, target, false);
     case ALTIVEC_BUILTIN_LVLX:
       return altivec_expand_lv_builtin (CODE_FOR_altivec_lvlx,
Index: gcc/config/rs6000/vector.md
===================================================================
--- gcc/config/rs6000/vector.md	(revision 258348)
+++ gcc/config/rs6000/vector.md	(working copy)
@@ -196,12 +196,7 @@ (define_expand "vector_altivec_load_<mod
       operands[1] = rs6000_address_for_altivec (operands[1]);
       rtx and_op = XEXP (operands[1], 0);
       gcc_assert (GET_CODE (and_op) == AND);
-      rtx addr = XEXP (and_op, 0);
-      if (GET_CODE (addr) == PLUS)
-        emit_insn (gen_altivec_lvx_<mode>_2op (operands[0], XEXP (addr, 0),
-	                                       XEXP (addr, 1)));
-      else
-        emit_insn (gen_altivec_lvx_<mode>_1op (operands[0], operands[1]));
+      emit_insn (gen_altivec_lvx_<mode> (operands[0], operands[1]));
       DONE;
     }
 })
@@ -218,12 +213,7 @@ (define_expand "vector_altivec_store_<mo
       operands[0] = rs6000_address_for_altivec (operands[0]);
       rtx and_op = XEXP (operands[0], 0);
       gcc_assert (GET_CODE (and_op) == AND);
-      rtx addr = XEXP (and_op, 0);
-      if (GET_CODE (addr) == PLUS)
-        emit_insn (gen_altivec_stvx_<mode>_2op (operands[1], XEXP (addr, 0),
-	                                        XEXP (addr, 1)));
-      else
-        emit_insn (gen_altivec_stvx_<mode>_1op (operands[1], operands[0]));
+      emit_insn (gen_altivec_stvx_<mode> (operands[1], operands[0]));
       DONE;
     }
 })



More information about the Gcc-patches mailing list