--- /dev/null
+/* { dg-do compile } */
+/* { dg-require-effective-target vect_int } */
+
+_Bool arr[16];
+
+void foo(char *q)
+{
+ char *p = __builtin_assume_aligned (q, 16);
+ _Bool b0, b1, b2, b3, b4, b5, b6, b7, b8, b9, b10, b11, b12, b13, b14, b15;
+ b0 = p[0] != 0;
+ b1 = p[1] != 0;
+ b2 = p[2] != 0;
+ b3 = p[3] != 0;
+ b4 = p[4] != 0;
+ b5 = p[5] != 0;
+ b6 = p[6] != 0;
+ b7 = p[7] != 0;
+ b8 = p[8] != 0;
+ b9 = p[9] != 0;
+ b10 = p[10] != 0;
+ b11 = p[11] != 0;
+ b12 = p[12] != 0;
+ b13 = p[13] != 0;
+ b14 = p[14] != 0;
+ b15 = p[15] != 0;
+ arr[0] = b0;
+ arr[1] = b1;
+ arr[2] = b2;
+ arr[3] = b3;
+ arr[4] = b4;
+ arr[5] = b5;
+ arr[6] = b6;
+ arr[7] = b7;
+ arr[8] = b8;
+ arr[9] = b9;
+ arr[10] = b10;
+ arr[11] = b11;
+ arr[12] = b12;
+ arr[13] = b13;
+ arr[14] = b14;
+ arr[15] = b15;
+}
+
+/* { dg-final { scan-tree-dump "transform load" "slp2" } } */
+/* { dg-final { scan-tree-dump "optimized: basic block" "slp2" } } */
stmt_vec_info last_access_info = vect_find_last_scalar_stmt_in_slp (node);
for (unsigned k = 0; k < SLP_TREE_SCALAR_STMTS (node).length (); ++k)
{
- stmt_vec_info access_info = SLP_TREE_SCALAR_STMTS (node)[k];
+ stmt_vec_info access_info
+ = vect_orig_stmt (SLP_TREE_SCALAR_STMTS (node)[k]);
if (access_info == last_access_info)
continue;
data_reference *dr_a = STMT_VINFO_DATA_REF (access_info);
= vect_find_first_scalar_stmt_in_slp (node);
for (unsigned k = 0; k < SLP_TREE_SCALAR_STMTS (node).length (); ++k)
{
- stmt_vec_info access_info = SLP_TREE_SCALAR_STMTS (node)[k];
+ stmt_vec_info access_info
+ = vect_orig_stmt (SLP_TREE_SCALAR_STMTS (node)[k]);
if (access_info == first_access_info)
continue;
data_reference *dr_a = STMT_VINFO_DATA_REF (access_info);
/* For creating the data-ref pointer we need alignment of the
first element as well. */
- first_stmt_info = vect_find_first_scalar_stmt_in_slp (node);
+ first_stmt_info
+ = vect_stmt_to_vectorize (vect_find_first_scalar_stmt_in_slp (node));
if (first_stmt_info != SLP_TREE_SCALAR_STMTS (node)[0])
{
first_dr_info = STMT_VINFO_DR_INFO (first_stmt_info);
}
}
+/* Walk the grouped store chains and replace entries with their
+ pattern variant if any. */
+
+static void
+vect_fixup_store_groups_with_patterns (vec_info *vinfo)
+{
+ stmt_vec_info first_element;
+ unsigned i;
+
+ FOR_EACH_VEC_ELT (vinfo->grouped_stores, i, first_element)
+ {
+ /* We also have CTORs in this array. */
+ if (!STMT_VINFO_GROUPED_ACCESS (first_element))
+ continue;
+ if (STMT_VINFO_IN_PATTERN_P (first_element))
+ {
+ stmt_vec_info orig = first_element;
+ first_element = STMT_VINFO_RELATED_STMT (first_element);
+ DR_GROUP_FIRST_ELEMENT (first_element) = first_element;
+ DR_GROUP_SIZE (first_element) = DR_GROUP_SIZE (orig);
+ DR_GROUP_GAP (first_element) = DR_GROUP_GAP (orig);
+ DR_GROUP_NEXT_ELEMENT (first_element) = DR_GROUP_NEXT_ELEMENT (orig);
+ vinfo->grouped_stores[i] = first_element;
+ }
+ stmt_vec_info prev = first_element;
+ while (DR_GROUP_NEXT_ELEMENT (prev))
+ {
+ stmt_vec_info elt = DR_GROUP_NEXT_ELEMENT (prev);
+ if (STMT_VINFO_IN_PATTERN_P (elt))
+ {
+ stmt_vec_info orig = elt;
+ elt = STMT_VINFO_RELATED_STMT (elt);
+ DR_GROUP_NEXT_ELEMENT (prev) = elt;
+ DR_GROUP_GAP (elt) = DR_GROUP_GAP (orig);
+ DR_GROUP_NEXT_ELEMENT (elt) = DR_GROUP_NEXT_ELEMENT (orig);
+ }
+ DR_GROUP_FIRST_ELEMENT (elt) = first_element;
+ prev = elt;
+ }
+ }
+}
+
/* Check if the region described by BB_VINFO can be vectorized, returning
true if so. When returning false, set FATAL to true if the same failure
would prevent vectorization at other vector sizes, false if it is still
vect_pattern_recog (bb_vinfo);
+ /* Update store groups from pattern processing. */
+ vect_fixup_store_groups_with_patterns (bb_vinfo);
+
/* Check the SLP opportunities in the basic block, analyze and build SLP
trees. */
if (!vect_analyze_slp (bb_vinfo, n_stmts))