As discussed on IRC, the
r13-2299-g68c61c2daa1f bug only got missed
because dump_printf_loc had incorrect format attribute and therefore
almost no -Wformat=* checking was performed on it.
3, 0 are suitable for function with (whatever, whatever, const char *, va_list)
arguments, not for (whatever, whatever, const char *, ...), that one should
use 3, 4.
The following patch fixes that and adjusts all spots to fix warnings.
In many cases it is just through an ugly cast (for %G casts to gimple *
from gassign */gphi * and the like and for %p casts to void * from slp_node
etc.).
There are 3 spots where the mismatch was worse though, two using %u or %d
for unsigned HOST_WIDE_INT argument and one %T for enum argument (promoted
to int).
2022-09-01 Jakub Jelinek <jakub@redhat.com>
PR other/106782
* dumpfile.h (dump_printf_loc): Use ATTRIBUTE_GCC_DUMP_PRINTF (3, 4)
instead of ATTRIBUTE_GCC_DUMP_PRINTF (3, 0).
* tree-parloops.cc (parloops_is_slp_reduction): Cast pointers to
derived types of gimple to gimple * to avoid -Wformat warnings.
* tree-vect-loop-manip.cc (vect_set_loop_condition,
vect_update_ivs_after_vectorizer): Likewise.
* tree-vect-stmts.cc (vectorizable_load): Likewise.
* tree-vect-patterns.cc (vect_split_statement,
vect_recog_mulhs_pattern, vect_recog_average_pattern,
vect_determine_precisions_from_range,
vect_determine_precisions_from_users): Likewise.
* gimple-loop-versioning.cc
(loop_versioning::analyze_term_using_scevs): Likewise.
* tree-vect-slp.cc (vect_build_slp_tree_1): Likewise.
(vect_build_slp_tree): Cast slp_tree to void * to avoid
-Wformat warnings.
(optimize_load_redistribution_1, vect_match_slp_patterns,
vect_build_slp_instance, vect_optimize_slp_pass::materialize,
vect_optimize_slp_pass::dump, vect_slp_convert_to_external,
vect_slp_analyze_node_operations, vect_bb_partition_graph): Likewise.
(vect_print_slp_tree): Likewise. Also use
HOST_WIDE_INT_PRINT_UNSIGNED instead of %u.
* tree-vect-loop.cc (vect_determine_vectorization_factor,
vect_analyze_scalar_cycles_1, vect_analyze_loop_operations,
vectorizable_induction, vect_transform_loop): Cast pointers to derived
types of gimple to gimple * to avoid -Wformat warnings.
(vect_analyze_loop_2): Cast slp_tree to void * to avoid
-Wformat warnings.
(vect_estimate_min_profitable_iters): Use HOST_WIDE_INT_PRINT_UNSIGNED
instead of %d.
* tree-vect-slp-patterns.cc (vect_pattern_validate_optab): Use %G
instead of %T and STMT_VINFO_STMT (SLP_TREE_REPRESENTATIVE (node))
instead of SLP_TREE_DEF_TYPE (node).
extern void dump_printf_loc (const dump_metadata_t &, const dump_user_location_t &,
const char *, ...)
- ATTRIBUTE_GCC_DUMP_PRINTF (3, 0);
+ ATTRIBUTE_GCC_DUMP_PRINTF (3, 4);
extern void dump_function (int phase, tree fn);
extern void dump_basic_block (dump_flags_t, basic_block, int);
extern void dump_generic_expr_loc (const dump_metadata_t &,
{
if (dump_enabled_p ())
dump_printf_loc (MSG_NOTE, address.stmt,
- "looking through %G", assign);
+ "looking through %G", (gimple *) assign);
stride = strip_casts (gimple_assign_rhs1 (assign));
}
&& parloops_valid_reduction_input_p (def_stmt_info))
{
if (dump_enabled_p ())
- dump_printf_loc (MSG_NOTE, vect_location, "swapping oprnds: %G",
- next_stmt);
+ dump_printf_loc (MSG_NOTE, vect_location,
+ "swapping oprnds: %G", (gimple *) next_stmt);
swap_ssa_operands (next_stmt,
gimple_assign_rhs1_ptr (next_stmt),
if (dump_enabled_p ())
dump_printf_loc (MSG_NOTE, vect_location, "New loop exit condition: %G",
- cond_stmt);
+ (gimple *) cond_stmt);
}
/* Helper routine of slpeel_tree_duplicate_loop_to_edge_cfg.
stmt_vec_info phi_info = loop_vinfo->lookup_stmt (phi);
if (dump_enabled_p ())
dump_printf_loc (MSG_NOTE, vect_location,
- "vect_update_ivs_after_vectorizer: phi: %G", phi);
+ "vect_update_ivs_after_vectorizer: phi: %G",
+ (gimple *) phi);
/* Skip reduction and virtual phis. */
if (!iv_phi_p (phi_info))
stmt_info = loop_vinfo->lookup_stmt (phi);
if (dump_enabled_p ())
dump_printf_loc (MSG_NOTE, vect_location, "==> examining phi: %G",
- phi);
+ (gimple *) phi);
gcc_assert (stmt_info);
stmt_vec_info stmt_vinfo = loop_vinfo->lookup_stmt (phi);
if (dump_enabled_p ())
- dump_printf_loc (MSG_NOTE, vect_location, "Analyze phi: %G", phi);
+ dump_printf_loc (MSG_NOTE, vect_location, "Analyze phi: %G",
+ (gimple *) phi);
/* Skip virtual phi's. The data dependences that are associated with
virtual defs/uses (i.e., memory accesses) are analyzed elsewhere. */
tree def = PHI_RESULT (phi);
if (dump_enabled_p ())
- dump_printf_loc (MSG_NOTE, vect_location, "Analyze phi: %G", phi);
+ dump_printf_loc (MSG_NOTE, vect_location, "Analyze phi: %G",
+ (gimple *) phi);
gcc_assert (!virtual_operand_p (def)
&& STMT_VINFO_DEF_TYPE (stmt_vinfo) == vect_unknown_def_type);
stmt_info = loop_vinfo->lookup_stmt (phi);
if (dump_enabled_p ())
- dump_printf_loc (MSG_NOTE, vect_location, "examining phi: %G", phi);
+ dump_printf_loc (MSG_NOTE, vect_location, "examining phi: %G",
+ (gimple *) phi);
if (virtual_operand_p (gimple_phi_result (phi)))
continue;
if (can_use_lanes && dump_enabled_p ())
dump_printf_loc (MSG_NOTE, vect_location,
"SLP instance %p can use load/store-lanes\n",
- instance);
+ (void *) instance);
}
else
{
if (dump_enabled_p ())
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
"can't unroll as unrolled vectorization factor larger"
- " than maximum vectorization factor: %d\n",
+ " than maximum vectorization factor: "
+ HOST_WIDE_INT_PRINT_UNSIGNED "\n",
LOOP_VINFO_MAX_VECT_FACTOR (loop_vinfo));
*suggested_unroll_factor = 1;
}
if (dump_enabled_p ())
dump_printf_loc (MSG_NOTE, vect_location,
"transform induction: created def-use cycle: %G%G",
- induction_phi, SSA_NAME_DEF_STMT (vec_def));
+ (gimple *) induction_phi, SSA_NAME_DEF_STMT (vec_def));
return true;
}
gphi *phi = si.phi ();
if (dump_enabled_p ())
dump_printf_loc (MSG_NOTE, vect_location,
- "------>vectorizing phi: %G", phi);
+ "------>vectorizing phi: %G", (gimple *) phi);
stmt_info = loop_vinfo->lookup_stmt (phi);
if (!stmt_info)
continue;
{
dump_printf_loc (MSG_NOTE, vect_location,
"into pattern statements: %G", stmt1);
- dump_printf_loc (MSG_NOTE, vect_location, "and: %G", new_stmt2);
+ dump_printf_loc (MSG_NOTE, vect_location, "and: %G",
+ (gimple *) new_stmt2);
}
return true;
if (dump_enabled_p ())
dump_printf_loc (MSG_NOTE, vect_location,
- "created pattern stmt: %G", mulhrs_stmt);
+ "created pattern stmt: %G", (gimple *) mulhrs_stmt);
return vect_convert_output (vinfo, last_stmt_info, lhs_type,
mulhrs_stmt, new_vectype);
if (dump_enabled_p ())
dump_printf_loc (MSG_NOTE, vect_location,
- "created pattern stmt: %G", average_stmt);
+ "created pattern stmt: %G", (gimple *) average_stmt);
return vect_convert_output (vinfo, last_stmt_info,
type, average_stmt, new_vectype);
dump_printf_loc (MSG_NOTE, vect_location, "can narrow to %s:%d"
" without loss of precision: %G",
sign == SIGNED ? "signed" : "unsigned",
- value_precision, stmt);
+ value_precision, (gimple *) stmt);
vect_set_operation_type (stmt_info, type, value_precision, sign);
vect_set_min_input_precision (stmt_info, type, value_precision);
dump_printf_loc (MSG_NOTE, vect_location, "can narrow to %s:%d"
" without affecting users: %G",
TYPE_UNSIGNED (type) ? "unsigned" : "signed",
- operation_precision, stmt);
+ operation_precision, (gimple *) stmt);
vect_set_operation_type (stmt_info, type, operation_precision,
TYPE_SIGN (type));
}
{
if (!vectype)
dump_printf_loc (MSG_NOTE, vect_location,
- "Target does not support vector type for %T\n",
- SLP_TREE_DEF_TYPE (node));
+ "Target does not support vector type for %G\n",
+ STMT_VINFO_STMT (SLP_TREE_REPRESENTATIVE (node)));
else
dump_printf_loc (MSG_NOTE, vect_location,
"Target does not support %s for vector type "
if (dump_enabled_p ())
dump_printf_loc (MSG_MISSED_OPTIMIZATION, vect_location,
"Build SLP failed: unsupported call type %G",
- call_stmt);
+ (gimple *) call_stmt);
if (is_a <bb_vec_info> (vinfo) && i != 0)
continue;
/* Fatal mismatch. */
{
if (dump_enabled_p ())
dump_printf_loc (MSG_NOTE, vect_location, "re-using %sSLP tree %p\n",
- !(*leader)->failed ? "" : "failed ", *leader);
+ !(*leader)->failed ? "" : "failed ",
+ (void *) *leader);
if (!(*leader)->failed)
{
SLP_TREE_REF_COUNT (*leader)++;
if (dump_enabled_p ())
dump_printf_loc (MSG_NOTE, vect_location,
- "starting SLP discovery for node %p\n", res);
+ "starting SLP discovery for node %p\n", (void *) res);
poly_uint64 this_max_nunits = 1;
slp_tree res_ = vect_build_slp_tree_2 (vinfo, res, stmts, group_size,
{
if (dump_enabled_p ())
dump_printf_loc (MSG_NOTE, vect_location,
- "SLP discovery for node %p failed\n", res);
+ "SLP discovery for node %p failed\n", (void *) res);
/* Mark the node invalid so we can detect those when still in use
as backedge destinations. */
SLP_TREE_SCALAR_STMTS (res) = vNULL;
{
if (dump_enabled_p ())
dump_printf_loc (MSG_NOTE, vect_location,
- "SLP discovery for node %p succeeded\n", res);
+ "SLP discovery for node %p succeeded\n",
+ (void *) res);
gcc_assert (res_ == res);
res->max_nunits = this_max_nunits;
vect_update_max_nunits (max_nunits, this_max_nunits);
dump_metadata_t metadata (dump_kind, loc.get_impl_location ());
dump_user_location_t user_loc = loc.get_user_location ();
- dump_printf_loc (metadata, user_loc, "node%s %p (max_nunits=%u, refcnt=%u)",
+ dump_printf_loc (metadata, user_loc,
+ "node%s %p (max_nunits=" HOST_WIDE_INT_PRINT_UNSIGNED
+ ", refcnt=%u)",
SLP_TREE_DEF_TYPE (node) == vect_external_def
? " (external)"
: (SLP_TREE_DEF_TYPE (node) == vect_constant_def
? " (constant)"
- : ""), node,
+ : ""), (void *) node,
estimated_poly_value (node->max_nunits),
SLP_TREE_REF_COUNT (node));
if (SLP_TREE_VECTYPE (node))
if (dump_enabled_p ())
dump_printf_loc (MSG_NOTE, vect_location,
- "converting stmts on permute node %p\n", root);
+ "converting stmts on permute node %p\n",
+ (void *) root);
bool *matches = XALLOCAVEC (bool, group_size);
poly_uint64 max_nunits = 1;
if (dump_enabled_p ())
dump_printf_loc (MSG_NOTE, vect_location,
"Analyzing SLP tree %p for patterns\n",
- SLP_INSTANCE_TREE (instance));
+ (void *) SLP_INSTANCE_TREE (instance));
return vect_match_slp_patterns_2 (ref_node, vinfo, perm_cache, compat_cache,
visited);
if (dump_enabled_p ())
{
dump_printf_loc (MSG_NOTE, vect_location,
- "Final SLP tree for instance %p:\n", new_instance);
+ "Final SLP tree for instance %p:\n",
+ (void *) new_instance);
vect_print_slp_graph (MSG_NOTE, vect_location,
SLP_INSTANCE_TREE (new_instance));
}
dump_printf_loc (MSG_NOTE, vect_location,
"duplicating permutation node %p with"
" layout %d\n",
- node, to_layout_i);
+ (void *) node, to_layout_i);
else
dump_printf_loc (MSG_NOTE, vect_location,
"inserting permutation node in place of %p\n",
- node);
+ (void *) node);
}
unsigned int num_lanes = SLP_TREE_LANES (node);
&& !std::equal (tmp_perm.begin (), tmp_perm.end (),
perm.begin ()))
dump_printf_loc (MSG_NOTE, vect_location,
- "absorbing input layouts into %p\n", node);
+ "absorbing input layouts into %p\n",
+ (void *) node);
std::copy (tmp_perm.begin (), tmp_perm.end (), perm.begin ());
bitmap_set_bit (fully_folded, node_i);
}
if (dump_enabled_p ())
dump_printf_loc (MSG_NOTE, vect_location,
"failed to absorb input layouts into %p\n",
- node);
+ (void *) node);
change_vec_perm_layout (nullptr, perm, layout_i, layout_i);
}
}
if (other_vertex.partition < vertex.partition)
dump_printf_loc (MSG_NOTE, vect_location,
" - %p [%d] --> %p\n",
- other_vertex.node, other_vertex.partition,
- vertex.node);
+ (void *) other_vertex.node,
+ other_vertex.partition,
+ (void *) vertex.node);
else
dump_printf_loc (MSG_NOTE, vect_location,
" - %p --> [%d] %p\n",
- vertex.node, other_vertex.partition,
- other_vertex.node);
+ (void *) vertex.node,
+ other_vertex.partition,
+ (void *) other_vertex.node);
};
for_each_partition_edge (node_i, print_edge);
}
if (dump_enabled_p ())
dump_printf_loc (MSG_NOTE, vect_location,
- "Building vector operands of %p from scalars instead\n", node);
+ "Building vector operands of %p from scalars instead\n",
+ (void *) node);
/* Don't remove and free the child nodes here, since they could be
referenced by other structures. The analysis and scheduling phases
{
if (dump_enabled_p ())
dump_printf_loc (MSG_NOTE, vect_location,
- "Failed cyclic SLP reference in %p\n", node);
+ "Failed cyclic SLP reference in %p\n", (void *) node);
return false;
}
gcc_assert (SLP_TREE_DEF_TYPE (node) == vect_internal_def);
{
if (dump_enabled_p ())
dump_printf_loc (MSG_NOTE, vect_location,
- "Cannot vectorize all-constant op node %p\n", node);
+ "Cannot vectorize all-constant op node %p\n",
+ (void *) node);
res = false;
}
&& leader != instance)
dump_printf_loc (MSG_NOTE, vect_location,
"instance %p is leader of %p\n",
- leader, instance);
+ (void *) leader, (void *) instance);
}
}
gassign *stmt = as_a <gassign *> (stmt_info->stmt);
if (dump_enabled_p ())
dump_printf_loc (MSG_NOTE, vect_location,
- "hoisting out of the vectorized loop: %G", stmt);
+ "hoisting out of the vectorized loop: %G",
+ (gimple *) stmt);
scalar_dest = copy_ssa_name (scalar_dest);
tree rhs = unshare_expr (gimple_assign_rhs1 (stmt));
edge pe = loop_preheader_edge (loop);