...and also make vect_find_last_scalar_stmt_in_slp return a stmt_vec_info.
2018-07-31 Richard Sandiford <richard.sandiford@arm.com>
gcc/
* tree-vectorizer.h (get_earlier_stmt, get_later_stmt): Take and
return stmt_vec_infos rather than gimple stmts. Do not accept
null arguments.
(vect_find_last_scalar_stmt_in_slp): Return a stmt_vec_info instead
of a gimple stmt.
* tree-vect-slp.c (vect_find_last_scalar_stmt_in_slp): Likewise.
Update use of get_later_stmt.
(vect_get_constant_vectors): Update call accordingly.
(vect_schedule_slp_instance): Likewise
* tree-vect-data-refs.c (vect_slp_analyze_node_dependences): Likewise.
(vect_slp_analyze_instance_dependence): Likewise.
(vect_preserves_scalar_order_p): Update use of get_earlier_stmt.
From-SVN: r263140
+2018-07-31 Richard Sandiford <richard.sandiford@arm.com>
+
+ * tree-vectorizer.h (get_earlier_stmt, get_later_stmt): Take and
+ return stmt_vec_infos rather than gimple stmts. Do not accept
+ null arguments.
+ (vect_find_last_scalar_stmt_in_slp): Return a stmt_vec_info instead
+ of a gimple stmt.
+ * tree-vect-slp.c (vect_find_last_scalar_stmt_in_slp): Likewise.
+ Update use of get_later_stmt.
+ (vect_get_constant_vectors): Update call accordingly.
+ (vect_schedule_slp_instance): Likewise
+ * tree-vect-data-refs.c (vect_slp_analyze_node_dependences): Likewise.
+ (vect_slp_analyze_instance_dependence): Likewise.
+ (vect_preserves_scalar_order_p): Update use of get_earlier_stmt.
+
2018-07-31 Richard Sandiford <richard.sandiford@arm.com>
* tree-vectorizer.h (stmt_info_for_cost::stmt): Replace with...
2018-07-31 Richard Sandiford <richard.sandiford@arm.com>
* tree-vectorizer.h (stmt_info_for_cost::stmt): Replace with...
stmtinfo_a = STMT_VINFO_RELATED_STMT (stmtinfo_a);
if (is_pattern_stmt_p (stmtinfo_b))
stmtinfo_b = STMT_VINFO_RELATED_STMT (stmtinfo_b);
stmtinfo_a = STMT_VINFO_RELATED_STMT (stmtinfo_a);
if (is_pattern_stmt_p (stmtinfo_b))
stmtinfo_b = STMT_VINFO_RELATED_STMT (stmtinfo_b);
- gimple *earlier_stmt = get_earlier_stmt (stmtinfo_a, stmtinfo_b);
- return !DR_IS_WRITE (STMT_VINFO_DATA_REF (vinfo_for_stmt (earlier_stmt)));
+ stmt_vec_info earlier_stmt_info = get_earlier_stmt (stmtinfo_a, stmtinfo_b);
+ return !DR_IS_WRITE (STMT_VINFO_DATA_REF (earlier_stmt_info));
}
/* A subroutine of vect_analyze_data_ref_dependence. Handle
}
/* A subroutine of vect_analyze_data_ref_dependence. Handle
/* This walks over all stmts involved in the SLP load/store done
in NODE verifying we can sink them up to the last stmt in the
group. */
/* This walks over all stmts involved in the SLP load/store done
in NODE verifying we can sink them up to the last stmt in the
group. */
- gimple *last_access = vect_find_last_scalar_stmt_in_slp (node);
+ stmt_vec_info last_access_info = vect_find_last_scalar_stmt_in_slp (node);
for (unsigned k = 0; k < SLP_INSTANCE_GROUP_SIZE (instance); ++k)
{
stmt_vec_info access_info = SLP_TREE_SCALAR_STMTS (node)[k];
for (unsigned k = 0; k < SLP_INSTANCE_GROUP_SIZE (instance); ++k)
{
stmt_vec_info access_info = SLP_TREE_SCALAR_STMTS (node)[k];
- if (access_info == last_access)
+ if (access_info == last_access_info)
continue;
data_reference *dr_a = STMT_VINFO_DATA_REF (access_info);
ao_ref ref;
bool ref_initialized_p = false;
for (gimple_stmt_iterator gsi = gsi_for_stmt (access_info->stmt);
continue;
data_reference *dr_a = STMT_VINFO_DATA_REF (access_info);
ao_ref ref;
bool ref_initialized_p = false;
for (gimple_stmt_iterator gsi = gsi_for_stmt (access_info->stmt);
- gsi_stmt (gsi) != last_access; gsi_next (&gsi))
+ gsi_stmt (gsi) != last_access_info->stmt; gsi_next (&gsi))
{
gimple *stmt = gsi_stmt (gsi);
if (! gimple_vuse (stmt)
{
gimple *stmt = gsi_stmt (gsi);
if (! gimple_vuse (stmt)
store = NULL;
/* Verify we can sink stores to the vectorized stmt insert location. */
store = NULL;
/* Verify we can sink stores to the vectorized stmt insert location. */
- gimple *last_store = NULL;
+ stmt_vec_info last_store_info = NULL;
if (store)
{
if (! vect_slp_analyze_node_dependences (instance, store, vNULL, NULL))
return false;
/* Mark stores in this instance and remember the last one. */
if (store)
{
if (! vect_slp_analyze_node_dependences (instance, store, vNULL, NULL))
return false;
/* Mark stores in this instance and remember the last one. */
- last_store = vect_find_last_scalar_stmt_in_slp (store);
+ last_store_info = vect_find_last_scalar_stmt_in_slp (store);
for (unsigned k = 0; k < SLP_INSTANCE_GROUP_SIZE (instance); ++k)
gimple_set_visited (SLP_TREE_SCALAR_STMTS (store)[k]->stmt, true);
}
for (unsigned k = 0; k < SLP_INSTANCE_GROUP_SIZE (instance); ++k)
gimple_set_visited (SLP_TREE_SCALAR_STMTS (store)[k]->stmt, true);
}
if (! vect_slp_analyze_node_dependences (instance, load,
store
? SLP_TREE_SCALAR_STMTS (store)
if (! vect_slp_analyze_node_dependences (instance, load,
store
? SLP_TREE_SCALAR_STMTS (store)
+ : vNULL, last_store_info))
/* Find the last store in SLP INSTANCE. */
/* Find the last store in SLP INSTANCE. */
vect_find_last_scalar_stmt_in_slp (slp_tree node)
{
vect_find_last_scalar_stmt_in_slp (slp_tree node)
{
+ stmt_vec_info last = NULL;
stmt_vec_info stmt_vinfo;
for (int i = 0; SLP_TREE_SCALAR_STMTS (node).iterate (i, &stmt_vinfo); i++)
{
if (is_pattern_stmt_p (stmt_vinfo))
stmt_vec_info stmt_vinfo;
for (int i = 0; SLP_TREE_SCALAR_STMTS (node).iterate (i, &stmt_vinfo); i++)
{
if (is_pattern_stmt_p (stmt_vinfo))
- last = get_later_stmt (STMT_VINFO_RELATED_STMT (stmt_vinfo), last);
- else
- last = get_later_stmt (stmt_vinfo, last);
+ stmt_vinfo = STMT_VINFO_RELATED_STMT (stmt_vinfo);
+ last = last ? get_later_stmt (stmt_vinfo, last) : stmt_vinfo;
gimple_stmt_iterator gsi;
if (place_after_defs)
{
gimple_stmt_iterator gsi;
if (place_after_defs)
{
- gsi = gsi_for_stmt
- (vect_find_last_scalar_stmt_in_slp (slp_node));
+ stmt_vec_info last_stmt_info
+ = vect_find_last_scalar_stmt_in_slp (slp_node);
+ gsi = gsi_for_stmt (last_stmt_info->stmt);
init = vect_init_vector (stmt_vinfo, vec_cst, vector_type,
&gsi);
}
init = vect_init_vector (stmt_vinfo, vec_cst, vector_type,
&gsi);
}
/* Vectorized stmts go before the last scalar stmt which is where
all uses are ready. */
/* Vectorized stmts go before the last scalar stmt which is where
all uses are ready. */
- si = gsi_for_stmt (vect_find_last_scalar_stmt_in_slp (node));
+ stmt_vec_info last_stmt_info = vect_find_last_scalar_stmt_in_slp (node);
+ si = gsi_for_stmt (last_stmt_info->stmt);
/* Mark the first element of the reduction chain as reduction to properly
transform the node. In the analysis phase only the last element of the
/* Mark the first element of the reduction chain as reduction to properly
transform the node. In the analysis phase only the last element of the
-/* Return the earlier statement between STMT1 and STMT2. */
+/* Return the earlier statement between STMT1_INFO and STMT2_INFO. */
-static inline gimple *
-get_earlier_stmt (gimple *stmt1, gimple *stmt2)
+static inline stmt_vec_info
+get_earlier_stmt (stmt_vec_info stmt1_info, stmt_vec_info stmt2_info)
- unsigned int uid1, uid2;
-
- if (stmt1 == NULL)
- return stmt2;
-
- if (stmt2 == NULL)
- return stmt1;
-
- uid1 = gimple_uid (stmt1);
- uid2 = gimple_uid (stmt2);
-
- if (uid1 == 0 || uid2 == 0)
- return NULL;
-
- gcc_assert (uid1 <= stmt_vec_info_vec->length ()
- && uid2 <= stmt_vec_info_vec->length ());
- gcc_checking_assert ((STMT_VINFO_IN_PATTERN_P (vinfo_for_stmt (stmt1))
- || !STMT_VINFO_RELATED_STMT (vinfo_for_stmt (stmt1)))
- && (STMT_VINFO_IN_PATTERN_P (vinfo_for_stmt (stmt2))
- || !STMT_VINFO_RELATED_STMT (vinfo_for_stmt (stmt2))));
+ gcc_checking_assert ((STMT_VINFO_IN_PATTERN_P (stmt1_info)
+ || !STMT_VINFO_RELATED_STMT (stmt1_info))
+ && (STMT_VINFO_IN_PATTERN_P (stmt2_info)
+ || !STMT_VINFO_RELATED_STMT (stmt2_info)));
- if (uid1 < uid2)
- return stmt1;
+ if (gimple_uid (stmt1_info->stmt) < gimple_uid (stmt2_info->stmt))
+ return stmt1_info;
-/* Return the later statement between STMT1 and STMT2. */
+/* Return the later statement between STMT1_INFO and STMT2_INFO. */
-static inline gimple *
-get_later_stmt (gimple *stmt1, gimple *stmt2)
+static inline stmt_vec_info
+get_later_stmt (stmt_vec_info stmt1_info, stmt_vec_info stmt2_info)
- unsigned int uid1, uid2;
-
- if (stmt1 == NULL)
- return stmt2;
-
- if (stmt2 == NULL)
- return stmt1;
-
- uid1 = gimple_uid (stmt1);
- uid2 = gimple_uid (stmt2);
-
- if (uid1 == 0 || uid2 == 0)
- return NULL;
-
- gcc_assert (uid1 <= stmt_vec_info_vec->length ()
- && uid2 <= stmt_vec_info_vec->length ());
- gcc_checking_assert ((STMT_VINFO_IN_PATTERN_P (vinfo_for_stmt (stmt1))
- || !STMT_VINFO_RELATED_STMT (vinfo_for_stmt (stmt1)))
- && (STMT_VINFO_IN_PATTERN_P (vinfo_for_stmt (stmt2))
- || !STMT_VINFO_RELATED_STMT (vinfo_for_stmt (stmt2))));
+ gcc_checking_assert ((STMT_VINFO_IN_PATTERN_P (stmt1_info)
+ || !STMT_VINFO_RELATED_STMT (stmt1_info))
+ && (STMT_VINFO_IN_PATTERN_P (stmt2_info)
+ || !STMT_VINFO_RELATED_STMT (stmt2_info)));
- if (uid1 > uid2)
- return stmt1;
+ if (gimple_uid (stmt1_info->stmt) > gimple_uid (stmt2_info->stmt))
+ return stmt1_info;
}
/* Return TRUE if a statement represented by STMT_INFO is a part of a
}
/* Return TRUE if a statement represented by STMT_INFO is a part of a
extern void vect_detect_hybrid_slp (loop_vec_info);
extern void vect_get_slp_defs (vec<tree> , slp_tree, vec<vec<tree> > *);
extern bool vect_slp_bb (basic_block);
extern void vect_detect_hybrid_slp (loop_vec_info);
extern void vect_get_slp_defs (vec<tree> , slp_tree, vec<vec<tree> > *);
extern bool vect_slp_bb (basic_block);
-extern gimple *vect_find_last_scalar_stmt_in_slp (slp_tree);
+extern stmt_vec_info vect_find_last_scalar_stmt_in_slp (slp_tree);
extern bool is_simple_and_all_uses_invariant (gimple *, loop_vec_info);
extern bool can_duplicate_and_interleave_p (unsigned int, machine_mode,
unsigned int * = NULL,
extern bool is_simple_and_all_uses_invariant (gimple *, loop_vec_info);
extern bool can_duplicate_and_interleave_p (unsigned int, machine_mode,
unsigned int * = NULL,