#include "analyzer/access-diagram.h"
#include "text-art/ruler.h"
#include "fold-const.h"
+#include "analyzer/analyzer-selftests.h"
#if ENABLE_ANALYZER
access_range accessed_range,
tree type)
{
- bit_size_expr num_bits;
- if (accessed_range.get_size (op.m_model, &num_bits))
+ bit_size_expr num_bits (accessed_range.get_size (op.m_model.get_manager ()));
+ if (type)
{
- if (type)
+ styled_string s;
+ pretty_printer pp;
+ pp_format_decoder (&pp) = default_tree_printer;
+ if (num_bits.maybe_print_for_user (&pp, op.m_model))
{
- styled_string s;
-
- pretty_printer pp;
- num_bits.print (&pp);
-
if (op.m_dir == DIR_READ)
return fmt_styled_string (sm,
_("read of %qT (%s)"),
type,
pp_formatted_text (&pp));
}
- if (op.m_dir == DIR_READ)
- return num_bits.get_formatted_str (sm,
- _("read of %wi bit"),
- _("read of %wi bits"),
- _("read of %wi byte"),
- _("read of %wi bytes"),
- _("read of %qE bits"),
- _("read of %qE bytes"));
- else
- return num_bits.get_formatted_str (sm,
- _("write of %wi bit"),
- _("write of %wi bits"),
- _("write of %wi byte"),
- _("write of %wi bytes"),
- _("write of %qE bits"),
- _("write of %qE bytes"));
+ }
+ if (op.m_dir == DIR_READ)
+ {
+ if (auto p
+ = num_bits.maybe_get_formatted_str (sm, op.m_model,
+ _("read of %wi bit"),
+ _("read of %wi bits"),
+ _("read of %wi byte"),
+ _("read of %wi bytes"),
+ _("read of %qs bits"),
+ _("read of %qs bytes")))
+ return std::move (*p.get ());
+ }
+ else
+ {
+ if (auto p
+ = num_bits.maybe_get_formatted_str (sm, op.m_model,
+ _("write of %wi bit"),
+ _("write of %wi bits"),
+ _("write of %wi byte"),
+ _("write of %wi bytes"),
+ _("write of %qs bits"),
+ _("write of %qs bytes")))
+ return std::move (*p.get ());
}
if (type)
return expr;
}
-/* Subroutine of clean_up_for_diagram. */
+/* Duplicate EXPR, replacing any SSA names with the underlying variable. */
-static tree
+tree
remove_ssa_names (tree expr)
{
if (TREE_CODE (expr) == SSA_NAME
/* struct bit_size_expr. */
-text_art::styled_string
-bit_size_expr::get_formatted_str (text_art::style_manager &sm,
- const char *concrete_single_bit_fmt,
- const char *concrete_plural_bits_fmt,
- const char *concrete_single_byte_fmt,
- const char *concrete_plural_bytes_fmt,
- const char *symbolic_bits_fmt,
- const char *symbolic_bytes_fmt) const
+/* Attempt to generate a user-facing styled string that mentions this
+ bit_size_expr.
+ Use MODEL for extracting representative tree values where necessary.
+ The CONCRETE_* format strings should contain a single %wi.
+ The SYMBOLIC_* format strings should contain a single %qs.
+ Return nullptr if unable to represent the expression. */
+
+std::unique_ptr<text_art::styled_string>
+bit_size_expr::maybe_get_formatted_str (text_art::style_manager &sm,
+ const region_model &model,
+ const char *concrete_single_bit_fmt,
+ const char *concrete_plural_bits_fmt,
+ const char *concrete_single_byte_fmt,
+ const char *concrete_plural_bytes_fmt,
+ const char *symbolic_bits_fmt,
+ const char *symbolic_bytes_fmt) const
{
- if (TREE_CODE (m_num_bits) == INTEGER_CST)
+ region_model_manager &mgr = *model.get_manager ();
+ if (const svalue *num_bytes = maybe_get_as_bytes (mgr))
{
- bit_size_t concrete_num_bits = wi::to_offset (m_num_bits);
- if (concrete_num_bits % BITS_PER_UNIT == 0)
+ if (tree cst = num_bytes->maybe_get_constant ())
{
- byte_size_t concrete_num_bytes = concrete_num_bits / BITS_PER_UNIT;
+ byte_size_t concrete_num_bytes = wi::to_offset (cst);
if (concrete_num_bytes == 1)
- return fmt_styled_string (sm, concrete_single_byte_fmt,
- concrete_num_bytes.to_uhwi ());
+ return ::make_unique <text_art::styled_string>
+ (fmt_styled_string (sm, concrete_single_byte_fmt,
+ concrete_num_bytes.to_uhwi ()));
else
- return fmt_styled_string (sm, concrete_plural_bytes_fmt,
- concrete_num_bytes.to_uhwi ());
+ return ::make_unique <text_art::styled_string>
+ (fmt_styled_string (sm, concrete_plural_bytes_fmt,
+ concrete_num_bytes.to_uhwi ()));
}
else
{
- if (concrete_num_bits == 1)
- return fmt_styled_string (sm, concrete_single_bit_fmt,
- concrete_num_bits.to_uhwi ());
- else
- return fmt_styled_string (sm, concrete_plural_bits_fmt,
- concrete_num_bits.to_uhwi ());
+ pretty_printer pp;
+ pp_format_decoder (&pp) = default_tree_printer;
+ if (!num_bytes->maybe_print_for_user (&pp, model))
+ return nullptr;
+ return ::make_unique <text_art::styled_string>
+ (fmt_styled_string (sm, symbolic_bytes_fmt,
+ pp_formatted_text (&pp)));
}
}
+ else if (tree cst = m_num_bits.maybe_get_constant ())
+ {
+ bit_size_t concrete_num_bits = wi::to_offset (cst);
+ if (concrete_num_bits == 1)
+ return ::make_unique <text_art::styled_string>
+ (fmt_styled_string (sm, concrete_single_bit_fmt,
+ concrete_num_bits.to_uhwi ()));
+ else
+ return ::make_unique <text_art::styled_string>
+ (fmt_styled_string (sm, concrete_plural_bits_fmt,
+ concrete_num_bits.to_uhwi ()));
+ }
else
{
- if (tree bytes_expr = maybe_get_as_bytes ())
- return fmt_styled_string (sm,
- symbolic_bytes_fmt,
- clean_up_for_diagram (bytes_expr));
- return fmt_styled_string (sm,
- symbolic_bits_fmt,
- clean_up_for_diagram (m_num_bits));
+ pretty_printer pp;
+ pp_format_decoder (&pp) = default_tree_printer;
+ if (!m_num_bits.maybe_print_for_user (&pp, model))
+ return nullptr;
+ return ::make_unique <text_art::styled_string>
+ (fmt_styled_string (sm, symbolic_bits_fmt,
+ pp_formatted_text (&pp)));
}
}
-void
-bit_size_expr::print (pretty_printer *pp) const
+bool
+bit_size_expr::maybe_print_for_user (pretty_printer *pp,
+ const region_model &model) const
{
- if (TREE_CODE (m_num_bits) == INTEGER_CST)
+ if (tree cst = m_num_bits.maybe_get_constant ())
{
- bit_size_t concrete_num_bits = wi::to_offset (m_num_bits);
+ bit_size_t concrete_num_bits = wi::to_offset (cst);
pp_bit_size_t (pp, concrete_num_bits);
+ return true;
}
else
{
- if (tree bytes_expr = maybe_get_as_bytes ())
- pp_printf (pp, _("%qE bytes"), bytes_expr);
+ if (const svalue *num_bytes = maybe_get_as_bytes (*model.get_manager ()))
+ {
+ pretty_printer tmp_pp;
+ pp_format_decoder (&tmp_pp) = default_tree_printer;
+ if (!num_bytes->maybe_print_for_user (&tmp_pp, model))
+ return false;
+ pp_printf (pp, _("%qs bytes"), pp_formatted_text (&tmp_pp));
+ return true;
+ }
else
- pp_printf (pp, _("%qE bits"), m_num_bits);
+ {
+ pretty_printer tmp_pp;
+ pp_format_decoder (&tmp_pp) = default_tree_printer;
+ if (!m_num_bits.maybe_print_for_user (&tmp_pp, model))
+ return false;
+ pp_printf (pp, _("%qs bits"), pp_formatted_text (&tmp_pp));
+ return true;
+ }
}
}
-tree
-bit_size_expr::maybe_get_as_bytes () const
+/* Attempt to get a symbolic value for this symbolic bit size,
+ expressed in bytes.
+ Return null if it's not known to divide exactly. */
+
+const svalue *
+bit_size_expr::maybe_get_as_bytes (region_model_manager &mgr) const
{
- switch (TREE_CODE (m_num_bits))
+ if (tree cst = m_num_bits.maybe_get_constant ())
{
- default:
- break;
- case INTEGER_CST:
- {
- const bit_size_t num_bits = wi::to_offset (m_num_bits);
- if (num_bits % BITS_PER_UNIT != 0)
- return NULL_TREE;
- const bit_size_t num_bytes = num_bits / BITS_PER_UNIT;
- return wide_int_to_tree (size_type_node, num_bytes);
- }
- break;
- case PLUS_EXPR:
- case MINUS_EXPR:
- {
- bit_size_expr op0
- = bit_size_expr (TREE_OPERAND (m_num_bits, 0));
- tree op0_as_bytes = op0.maybe_get_as_bytes ();
- if (!op0_as_bytes)
- return NULL_TREE;
- bit_size_expr op1
- = bit_size_expr (TREE_OPERAND (m_num_bits, 1));
- tree op1_as_bytes = op1.maybe_get_as_bytes ();
- if (!op1_as_bytes)
- return NULL_TREE;
- return fold_build2 (TREE_CODE (m_num_bits), size_type_node,
- op0_as_bytes, op1_as_bytes);
- }
- break;
- case MULT_EXPR:
- {
- bit_size_expr op1
- = bit_size_expr (TREE_OPERAND (m_num_bits, 1));
- if (tree op1_as_bytes = op1.maybe_get_as_bytes ())
- return fold_build2 (MULT_EXPR, size_type_node,
- TREE_OPERAND (m_num_bits, 0),
- op1_as_bytes);
- }
- break;
+ bit_offset_t concrete_bits = wi::to_offset (cst);
+ if (concrete_bits % BITS_PER_UNIT != 0)
+ /* Not an exact multiple, so fail. */
+ return nullptr;
}
- return NULL_TREE;
+ const svalue *bits_per_byte
+ = mgr.get_or_create_int_cst (NULL_TREE, BITS_PER_UNIT);
+ return mgr.maybe_fold_binop (NULL_TREE, EXACT_DIV_EXPR,
+ &m_num_bits, bits_per_byte);
}
/* struct access_range. */
}
access_range::access_range (const region ®, region_model_manager *mgr)
-: m_start (reg.get_offset (mgr)),
- m_next (reg.get_next_offset (mgr))
+: m_start (strip_types (reg.get_offset (mgr), *mgr)),
+ m_next (strip_types (reg.get_next_offset (mgr), *mgr))
{
}
-bool
-access_range::get_size (const region_model &model, bit_size_expr *out) const
+bit_size_expr
+access_range::get_size (region_model_manager *mgr) const
{
- tree start_expr = m_start.calc_symbolic_bit_offset (model);
- if (!start_expr)
- return false;
- tree next_expr = m_next.calc_symbolic_bit_offset (model);
- if (!next_expr)
- return false;
- *out = bit_size_expr (fold_build2 (MINUS_EXPR, size_type_node,
- next_expr, start_expr));
- return true;
+ const svalue &start_bit_offset = m_start.calc_symbolic_bit_offset (mgr);
+ const svalue &next_bit_offset = m_next.calc_symbolic_bit_offset (mgr);
+ return bit_size_expr
+ (*mgr->get_or_create_binop (NULL_TREE, MINUS_EXPR,
+ &next_bit_offset, &start_bit_offset));
}
bool
const svalue *capacity_in_bytes_sval = m_model.get_capacity (m_base_region);
return access_range
(region_offset::make_concrete (m_base_region, 0),
- region_offset::make_byte_offset (m_base_region, capacity_in_bytes_sval));
+ region_offset::make_byte_offset (m_base_region, capacity_in_bytes_sval),
+ *get_manager ());
}
access_range
else if (actual_bits.m_next > valid_bits.m_start)
{
/* Get part of accessed range that's before the valid range. */
- *out = access_range (actual_bits.m_start, valid_bits.m_start);
+ *out = access_range (actual_bits.m_start, valid_bits.m_start,
+ *get_manager ());
return true;
}
else
else if (actual_bits.m_start < valid_bits.m_next)
{
/* Get part of accessed range that's after the valid range. */
- *out = access_range (valid_bits.m_next, actual_bits.m_next);
+ *out = access_range (valid_bits.m_next, actual_bits.m_next,
+ *get_manager ());
return true;
}
else
void add (const region ®, region_model_manager *mgr, enum kind kind)
{
add (access_range (reg.get_offset (mgr),
- reg.get_next_offset (mgr)),
+ reg.get_next_offset (mgr),
+ *mgr),
kind);
}
{
public:
/* Populate m_table_x_for_bit and m_bit_for_table_x. */
- void populate (const boundaries &boundaries, logger *logger)
+ void populate (const boundaries &boundaries,
+ region_model_manager &mgr,
+ logger *logger)
{
LOG_SCOPE (logger);
{
const region_offset &next_offset = vec_boundaries[idx + 1];
m_table_x_for_prev_offset[next_offset] = table_x;
- m_range_for_table_x[table_x] = access_range (offset, next_offset);
+ m_range_for_table_x[table_x]
+ = access_range (offset, next_offset, mgr);
}
table_x += 1;
}
We don't create a column in the table for the final offset, but we
do populate it, so that looking at the table_x of one beyond the
final table column gives us the upper bound offset. */
- m_btm.populate (*m_boundaries, logger);
+ m_btm.populate (*m_boundaries, *m_op.get_manager (), logger);
/* Gracefully reject cases where the boundary sorting has gone wrong
(due to awkward combinations of symbolic values). */
lower.log ("lower", *m_logger);
upper.log ("upper", *m_logger);
}
- tree lower_next = lower.m_next.calc_symbolic_bit_offset (m_op.m_model);
- if (!lower_next)
- {
- if (m_logger)
- m_logger->log ("failed to get lower_next");
- return;
- }
- tree upper_start = upper.m_start.calc_symbolic_bit_offset (m_op.m_model);
- if (!upper_start)
- {
- if (m_logger)
- m_logger->log ("failed to get upper_start");
- return;
- }
- tree num_bits_gap = fold_build2 (MINUS_EXPR,
- size_type_node,
- upper_start, lower_next);
+ region_model_manager *mgr = m_op.get_manager ();
+ const svalue &lower_next = lower.m_next.calc_symbolic_bit_offset (mgr);
+ const svalue &upper_start = upper.m_start.calc_symbolic_bit_offset (mgr);
+ const svalue *num_bits_gap
+ = mgr->get_or_create_binop (NULL_TREE, MINUS_EXPR,
+ &upper_start, &lower_next);
if (m_logger)
- m_logger->log ("num_bits_gap: %qE", num_bits_gap);
- tree zero = build_int_cst (size_type_node, 0);
+ m_logger->log ("num_bits_gap: %qs", num_bits_gap->get_desc ().get ());
+
+ const svalue *zero = mgr->get_or_create_int_cst (NULL_TREE, 0);
tristate ts_gt_zero = m_op.m_model.eval_condition (num_bits_gap,
GT_EXPR,
- zero,
- NULL);
+ zero);
if (ts_gt_zero.is_false ())
{
if (m_logger)
return;
}
- bit_size_expr num_bits (num_bits_gap);
- styled_string label = num_bits.get_formatted_str (m_sm,
- _("%wi bit"),
- _("%wi bits"),
- _("%wi byte"),
- _("%wi bytes"),
- _("%qE bits"),
- _("%qE bytes"));
- w->add_range (m_btm.get_table_x_for_range (access_range (lower.m_next,
- upper.m_start)),
- std::move (label),
- style::id_plain);
+ bit_size_expr num_bits (*num_bits_gap);
+ if (auto p = num_bits.maybe_get_formatted_str (m_sm, m_op.m_model,
+ _("%wi bit"),
+ _("%wi bits"),
+ _("%wi byte"),
+ _("%wi bytes"),
+ _("%qs bits"),
+ _("%qs bytes")))
+ {
+ styled_string label = std::move (*p.get ());
+ w->add_range (m_btm.get_table_x_for_range
+ (access_range (lower.m_next,
+ upper.m_start,
+ *mgr)),
+ std::move (label),
+ style::id_plain);
+ }
}
styled_string
{
if (m_logger)
invalid_before_bits.log ("invalid_before_bits", *m_logger);
- bit_size_expr num_before_bits;
- if (invalid_before_bits.get_size (m_op.m_model, &num_before_bits))
- {
- styled_string label;
- if (m_op.m_dir == DIR_READ)
- label = num_before_bits.get_formatted_str
- (m_sm,
- _("under-read of %wi bit"),
- _("under-read of %wi bits"),
- _("under-read of %wi byte"),
- _("under-read of %wi bytes"),
- _("under-read of %qE bits"),
- _("under-read of %qE bytes"));
- else
- label = num_before_bits.get_formatted_str
- (m_sm,
- _("underwrite of %wi bit"),
- _("underwrite of %wi bits"),
- _("underwrite of %wi byte"),
- _("underwrite of %wi bytes"),
- _("underwrite of %qE bits"),
- _("underwrite of %qE bytes"));
- w->add_range (m_btm.get_table_x_for_range (invalid_before_bits),
- make_warning_string (std::move (label)),
- m_invalid_style_id);
- }
+ bit_size_expr num_before_bits
+ (invalid_before_bits.get_size (m_op.get_manager ()));
+ std::unique_ptr<styled_string> label;
+ if (m_op.m_dir == DIR_READ)
+ label = num_before_bits.maybe_get_formatted_str
+ (m_sm, m_op.m_model,
+ _("under-read of %wi bit"),
+ _("under-read of %wi bits"),
+ _("under-read of %wi byte"),
+ _("under-read of %wi bytes"),
+ _("under-read of %qs bits"),
+ _("under-read of %qs bytes"));
+ else
+ label = num_before_bits.maybe_get_formatted_str
+ (m_sm, m_op.m_model,
+ _("underwrite of %wi bit"),
+ _("underwrite of %wi bits"),
+ _("underwrite of %wi byte"),
+ _("underwrite of %wi bytes"),
+ _("underwrite of %qs bits"),
+ _("underwrite of %qs bytes"));
+ if (label)
+ w->add_range (m_btm.get_table_x_for_range (invalid_before_bits),
+ make_warning_string (std::move (*label)),
+ m_invalid_style_id);
}
else
{
but std::optional is C++17. */
bool got_valid_bits = false;
access_range valid_bits (m_op.get_valid_bits ());
- bit_size_expr num_valid_bits;
- if (valid_bits.get_size (m_op.m_model, &num_valid_bits))
- {
- if (m_logger)
- valid_bits.log ("valid_bits", *m_logger);
-
- got_valid_bits = true;
- maybe_add_gap (w, invalid_before_bits, valid_bits);
-
- styled_string label;
- if (m_op.m_dir == DIR_READ)
- label = num_valid_bits.get_formatted_str (m_sm,
- _("size: %wi bit"),
- _("size: %wi bits"),
- _("size: %wi byte"),
- _("size: %wi bytes"),
- _("size: %qE bits"),
- _("size: %qE bytes"));
- else
- label = num_valid_bits.get_formatted_str (m_sm,
- _("capacity: %wi bit"),
- _("capacity: %wi bits"),
- _("capacity: %wi byte"),
- _("capacity: %wi bytes"),
- _("capacity: %qE bits"),
- _("capacity: %qE bytes"));
- w->add_range (m_btm.get_table_x_for_range (m_op.get_valid_bits ()),
- std::move (label),
- m_valid_style_id);
- }
+ bit_size_expr num_valid_bits (valid_bits.get_size (m_op.get_manager ()));
+ if (m_logger)
+ valid_bits.log ("valid_bits", *m_logger);
+
+ got_valid_bits = true;
+ maybe_add_gap (w, invalid_before_bits, valid_bits);
+
+ std::unique_ptr<styled_string> label;
+ if (m_op.m_dir == DIR_READ)
+ label = num_valid_bits.maybe_get_formatted_str (m_sm,
+ m_op.m_model,
+ _("size: %wi bit"),
+ _("size: %wi bits"),
+ _("size: %wi byte"),
+ _("size: %wi bytes"),
+ _("size: %qs bits"),
+ _("size: %qs bytes"));
+ else
+ label
+ = num_valid_bits.maybe_get_formatted_str (m_sm,
+ m_op.m_model,
+ _("capacity: %wi bit"),
+ _("capacity: %wi bits"),
+ _("capacity: %wi byte"),
+ _("capacity: %wi bytes"),
+ _("capacity: %qs bits"),
+ _("capacity: %qs bytes"));
+ if (label)
+ w->add_range (m_btm.get_table_x_for_range (m_op.get_valid_bits ()),
+ std::move (*label),
+ m_valid_style_id);
access_range invalid_after_bits;
if (m_op.maybe_get_invalid_after_bits (&invalid_after_bits))
if (m_logger)
invalid_before_bits.log ("invalid_after_bits", *m_logger);
- bit_size_expr num_after_bits;
- if (invalid_after_bits.get_size (m_op.m_model, &num_after_bits))
- {
- styled_string label;
- if (m_op.m_dir == DIR_READ)
- label = num_after_bits.get_formatted_str
- (m_sm,
- _("over-read of %wi bit"),
- _("over-read of %wi bits"),
- _("over-read of %wi byte"),
- _("over-read of %wi bytes"),
- _("over-read of %qE bits"),
- _("over-read of %qE bytes"));
- else
- label = num_after_bits.get_formatted_str
- (m_sm,
- _("overflow of %wi bit"),
- _("overflow of %wi bits"),
- _("overflow of %wi byte"),
- _("overflow of %wi bytes"),
- _("over-read of %qE bits"),
- _("overflow of %qE bytes"));
- w->add_range (m_btm.get_table_x_for_range (invalid_after_bits),
- make_warning_string (std::move (label)),
- m_invalid_style_id);
- }
+ bit_size_expr num_after_bits
+ (invalid_after_bits.get_size (m_op.get_manager ()));
+ std::unique_ptr<styled_string> label;
+ if (m_op.m_dir == DIR_READ)
+ label = num_after_bits.maybe_get_formatted_str
+ (m_sm, m_op.m_model,
+ _("over-read of %wi bit"),
+ _("over-read of %wi bits"),
+ _("over-read of %wi byte"),
+ _("over-read of %wi bytes"),
+ _("over-read of %qs bits"),
+ _("over-read of %qs bytes"));
+ else
+ label = num_after_bits.maybe_get_formatted_str
+ (m_sm, m_op.m_model,
+ _("overflow of %wi bit"),
+ _("overflow of %wi bits"),
+ _("overflow of %wi byte"),
+ _("overflow of %wi bytes"),
+ _("overflow of %qs bits"),
+ _("overflow of %qs bytes"));
+ if (label)
+ w->add_range (m_btm.get_table_x_for_range (invalid_after_bits),
+ make_warning_string (std::move (*label)),
+ m_invalid_style_id);
}
else
{
{
}
+#if CHECKING_P
+
+namespace selftest {
+
+/* Implementation detail of ASSERT_EQ_TYPELESS_INTEGER. */
+
+static void
+assert_eq_typeless_integer (const location &loc,
+ const svalue *sval,
+ int expected_int_val)
+{
+ ASSERT_NE_AT (loc, sval, nullptr);
+ ASSERT_EQ_AT (loc, sval->get_kind (), SK_CONSTANT);
+ ASSERT_EQ_AT (loc,
+ wi::to_offset (sval->maybe_get_constant ()),
+ expected_int_val);
+ ASSERT_EQ_AT (loc, sval->get_type (), NULL_TREE);
+}
+
+/* Assert that SVAL is a constant_svalue equal to EXPECTED_INT_VAL,
+ with NULL_TREE as its type. */
+
+#define ASSERT_EQ_TYPELESS_INTEGER(SVAL, EXPECTED_INT_VAL) \
+ SELFTEST_BEGIN_STMT \
+ assert_eq_typeless_integer ((SELFTEST_LOCATION), \
+ (SVAL), \
+ (EXPECTED_INT_VAL)); \
+ SELFTEST_END_STMT
+
+
+/* Various tests of bit_size_expr::maybe_get_as_bytes. */
+
+static void
+test_bit_size_expr_to_bytes ()
+{
+ region_model_manager mgr;
+
+ /* 40 bits: should be 5 bytes. */
+ {
+ bit_size_expr num_bits (*mgr.get_or_create_int_cst (NULL_TREE, 40));
+ const svalue *as_bytes = num_bits.maybe_get_as_bytes (mgr);
+ ASSERT_EQ_TYPELESS_INTEGER (as_bytes, 5);
+ }
+
+ /* 41 bits: should not convert to bytes. */
+ {
+ bit_size_expr num_bits (*mgr.get_or_create_int_cst (NULL_TREE, 41));
+ const svalue *as_bytes = num_bits.maybe_get_as_bytes (mgr);
+ ASSERT_EQ (as_bytes, nullptr);
+ }
+
+ tree n = build_global_decl ("n", size_type_node);
+
+ const svalue *init_n
+ = mgr.get_or_create_initial_value (mgr.get_region_for_global (n));
+
+ const svalue *n_times_8
+ = mgr.get_or_create_binop (NULL_TREE, MULT_EXPR,
+ init_n,
+ mgr.get_or_create_int_cst (NULL_TREE, 8));
+
+ /* (n * 8) bits should be n bytes */
+ {
+ bit_size_expr num_bits (*n_times_8);
+ const svalue *as_bytes = num_bits.maybe_get_as_bytes (mgr);
+ ASSERT_EQ (as_bytes, mgr.get_or_create_cast (NULL_TREE, init_n));
+ }
+
+ /* (n * 8) + 16 bits should be n + 2 bytes */
+ {
+ bit_size_expr num_bits
+ (*mgr.get_or_create_binop (NULL_TREE, PLUS_EXPR,
+ n_times_8,
+ mgr.get_or_create_int_cst (NULL_TREE, 16)));
+ const svalue *as_bytes = num_bits.maybe_get_as_bytes (mgr);
+ ASSERT_EQ (as_bytes->get_kind (), SK_BINOP);
+ const binop_svalue *binop = as_bytes->dyn_cast_binop_svalue ();
+ ASSERT_EQ (binop->get_op (), PLUS_EXPR);
+ ASSERT_EQ (binop->get_arg0 (), mgr.get_or_create_cast (NULL_TREE, init_n));
+ ASSERT_EQ_TYPELESS_INTEGER (binop->get_arg1 (), 2);
+ }
+}
+
+/* Run all of the selftests within this file. */
+
+void
+analyzer_access_diagram_cc_tests ()
+{
+ test_bit_size_expr_to_bytes ();
+}
+
+} // namespace selftest
+
+#endif /* CHECKING_P */
+
} // namespace ana
#endif /* #if ENABLE_ANALYZER */
class bit_size_expr
{
public:
- bit_size_expr () : m_num_bits (NULL) {}
- bit_size_expr (tree num_bits) : m_num_bits (num_bits) {}
-
- text_art::styled_string
- get_formatted_str (text_art::style_manager &sm,
- const char *concrete_single_bit_fmt,
- const char *concrete_plural_bits_fmt,
- const char *concrete_single_byte_fmt,
- const char *concrete_plural_bytes_fmt,
- const char *symbolic_bits_fmt,
- const char *symbolic_bytes_fmt) const;
- void print (pretty_printer *pp) const;
-
- tree maybe_get_as_bytes () const;
+ bit_size_expr (const svalue &num_bits) : m_num_bits (num_bits) {}
+
+ std::unique_ptr<text_art::styled_string>
+ maybe_get_formatted_str (text_art::style_manager &sm,
+ const region_model &model,
+ const char *concrete_single_bit_fmt,
+ const char *concrete_plural_bits_fmt,
+ const char *concrete_single_byte_fmt,
+ const char *concrete_plural_bytes_fmt,
+ const char *symbolic_bits_fmt,
+ const char *symbolic_bytes_fmt) const;
+ bool maybe_print_for_user (pretty_printer *pp,
+ const region_model &model) const;
+
+ const svalue *maybe_get_as_bytes (region_model_manager &mgr) const;
private:
- tree m_num_bits;
+ const svalue &m_num_bits;
};
/* A range of bits within a base region, where each endpoint
: m_start (), m_next ()
{
}
- access_range (region_offset start, region_offset next)
- : m_start (start), m_next (next)
+ access_range (region_offset start, region_offset next,
+ region_model_manager &mgr)
+ : m_start (strip_types (start, mgr)), m_next (strip_types (next, mgr))
{}
access_range (const region *base_region, const bit_range &bits);
access_range (const region *base_region, const byte_range &bytes);
bool empty_p () const;
- bool get_size (const region_model &model, bit_size_expr *out) const;
+ bit_size_expr get_size (region_model_manager *mgr) const;
bool get_size_in_bits (bit_size_t *out) const
{
run_analyzer_selftests ()
{
#if ENABLE_ANALYZER
+ analyzer_access_diagram_cc_tests ();
analyzer_constraint_manager_cc_tests ();
analyzer_function_set_cc_tests ();
analyzer_program_point_cc_tests ();
/* Declarations for specific families of tests (by source file), in
alphabetical order. */
-extern void analyzer_checker_script_cc_tests ();
+extern void analyzer_access_diagram_cc_tests ();
extern void analyzer_constraint_manager_cc_tests ();
extern void analyzer_function_set_cc_tests ();
extern void analyzer_program_point_cc_tests ();
extern void dump_tree (pretty_printer *pp, tree t);
extern void dump_quoted_tree (pretty_printer *pp, tree t);
extern void print_quoted_type (pretty_printer *pp, tree t);
+extern void print_expr_for_user (pretty_printer *pp, tree t);
extern int readability_comparator (const void *p1, const void *p2);
extern int tree_cmp (const void *p1, const void *p2);
extern tree fixup_tree_for_diagnostic (tree);
return m_sym_offset;
}
- tree calc_symbolic_bit_offset (const region_model &model) const;
+ const svalue &calc_symbolic_bit_offset (region_model_manager *mgr) const;
const svalue *calc_symbolic_byte_offset (region_model_manager *mgr) const;
bool operator== (const region_offset &other) const
extern tree
get_ssa_default_def (const function &fun, tree var);
+extern const svalue *
+strip_types (const svalue *sval, region_model_manager &mgr);
+
+extern region_offset
+strip_types (const region_offset &offset, region_model_manager &mgr);
+
+extern tree remove_ssa_names (tree expr);
+
} // namespace ana
extern bool is_special_named_call_p (const gcall *call, const char *funcname,
enum access_direction get_dir () const final override { return DIR_READ; }
};
+const svalue *
+strip_types (const svalue *sval,
+ region_model_manager &mgr)
+{
+ switch (sval->get_kind ())
+ {
+ default:
+ gcc_unreachable ();
+ case SK_REGION:
+ {
+ const region_svalue *region_sval = (const region_svalue *)sval;
+ return mgr.get_ptr_svalue (NULL_TREE, region_sval->get_pointee ());
+ }
+ case SK_CONSTANT:
+ return sval;
+ case SK_UNKNOWN:
+ return mgr.get_or_create_unknown_svalue (NULL_TREE);
+ case SK_POISONED:
+ {
+ const poisoned_svalue *poisoned_sval = (const poisoned_svalue *)sval;
+ return mgr.get_or_create_poisoned_svalue
+ (poisoned_sval->get_poison_kind (),
+ NULL_TREE);
+ }
+ case SK_SETJMP:
+ return sval;
+ case SK_INITIAL:
+ return sval;
+ case SK_UNARYOP:
+ {
+ const unaryop_svalue *unaryop_sval = (const unaryop_svalue *)sval;
+ const enum tree_code op = unaryop_sval->get_op ();
+ if (op == VIEW_CONVERT_EXPR || op == NOP_EXPR)
+ return strip_types (unaryop_sval->get_arg (), mgr);
+ return mgr.get_or_create_unaryop
+ (NULL_TREE,
+ op,
+ strip_types (unaryop_sval->get_arg (), mgr));
+ }
+ case SK_BINOP:
+ {
+ const binop_svalue *binop_sval = (const binop_svalue *)sval;
+ const enum tree_code op = binop_sval->get_op ();
+ return mgr.get_or_create_binop
+ (NULL_TREE,
+ op,
+ strip_types (binop_sval->get_arg0 (), mgr),
+ strip_types (binop_sval->get_arg1 (), mgr));
+ }
+ case SK_SUB:
+ {
+ const sub_svalue *sub_sval = (const sub_svalue *)sval;
+ return mgr.get_or_create_sub_svalue
+ (NULL_TREE,
+ strip_types (sub_sval->get_parent (), mgr),
+ sub_sval->get_subregion ());
+ }
+ case SK_REPEATED:
+ {
+ const repeated_svalue *repeated_sval = (const repeated_svalue *)sval;
+ return mgr.get_or_create_repeated_svalue
+ (NULL_TREE,
+ strip_types (repeated_sval->get_outer_size (), mgr),
+ strip_types (repeated_sval->get_inner_svalue (), mgr));
+ }
+ case SK_BITS_WITHIN:
+ {
+ const bits_within_svalue *bits_within_sval
+ = (const bits_within_svalue *)sval;
+ return mgr.get_or_create_bits_within
+ (NULL_TREE,
+ bits_within_sval->get_bits (),
+ strip_types (bits_within_sval->get_inner_svalue (), mgr));
+ }
+ case SK_UNMERGEABLE:
+ {
+ const unmergeable_svalue *unmergeable_sval
+ = (const unmergeable_svalue *)sval;
+ return mgr.get_or_create_unmergeable
+ (strip_types (unmergeable_sval->get_arg (), mgr));
+ }
+ case SK_PLACEHOLDER:
+ return sval;
+ case SK_WIDENING:
+ {
+ const widening_svalue *widening_sval = (const widening_svalue *)sval;
+ return mgr.get_or_create_widening_svalue
+ (NULL_TREE,
+ widening_sval->get_point (),
+ strip_types (widening_sval->get_base_svalue (), mgr),
+ strip_types (widening_sval->get_iter_svalue (), mgr));
+ }
+ case SK_COMPOUND:
+ {
+ const compound_svalue *compound_sval = (const compound_svalue *)sval;
+ binding_map typeless_map;
+ for (auto iter : compound_sval->get_map ())
+ {
+ const binding_key *key = iter.first;
+ const svalue *bound_sval = iter.second;
+ typeless_map.put (key, strip_types (bound_sval, mgr));
+ }
+ return mgr.get_or_create_compound_svalue (NULL_TREE, typeless_map);
+ }
+ case SK_CONJURED:
+ return sval;
+ case SK_ASM_OUTPUT:
+ {
+ const asm_output_svalue *asm_output_sval
+ = (const asm_output_svalue *)sval;
+ auto_vec<const svalue *> typeless_inputs
+ (asm_output_sval->get_num_inputs ());
+ for (unsigned idx = 0; idx < asm_output_sval->get_num_inputs (); idx++)
+ typeless_inputs.quick_push
+ (strip_types (asm_output_sval->get_input (idx),
+ mgr));
+ return mgr.get_or_create_asm_output_svalue
+ (NULL_TREE,
+ asm_output_sval->get_asm_string (),
+ asm_output_sval->get_output_idx (),
+ asm_output_sval->get_num_outputs (),
+ typeless_inputs);
+ }
+ case SK_CONST_FN_RESULT:
+ {
+ const const_fn_result_svalue *const_fn_result_sval
+ = (const const_fn_result_svalue *)sval;
+ auto_vec<const svalue *> typeless_inputs
+ (const_fn_result_sval->get_num_inputs ());
+ for (unsigned idx = 0;
+ idx < const_fn_result_sval->get_num_inputs ();
+ idx++)
+ typeless_inputs.quick_push
+ (strip_types (const_fn_result_sval->get_input (idx),
+ mgr));
+ return mgr.get_or_create_const_fn_result_svalue
+ (NULL_TREE,
+ const_fn_result_sval->get_fndecl (),
+ typeless_inputs);
+ }
+ }
+}
+
/* Check whether an access is past the end of the BASE_REG.
Return TRUE if the access was valid, FALSE otherwise. */
gcc_assert (ctxt);
const svalue *next_byte
- = m_mgr->get_or_create_binop (num_bytes_sval->get_type (), PLUS_EXPR,
+ = m_mgr->get_or_create_binop (NULL_TREE, PLUS_EXPR,
sym_byte_offset, num_bytes_sval);
+ next_byte = strip_types (next_byte, *m_mgr);
+ capacity = strip_types (capacity, *m_mgr);
+
if (eval_condition (next_byte, GT_EXPR, capacity).is_true ())
{
tree diag_arg = get_representative_tree (base_reg);
of trees */
const svalue *
-region_model_manager::get_or_create_constant_svalue (tree cst_expr)
+region_model_manager::get_or_create_constant_svalue (tree type, tree cst_expr)
{
gcc_assert (cst_expr);
gcc_assert (CONSTANT_CLASS_P (cst_expr));
+ gcc_assert (type == TREE_TYPE (cst_expr) || type == NULL_TREE);
- constant_svalue **slot = m_constants_map.get (cst_expr);
+ constant_svalue::key_t key (type, cst_expr);
+ constant_svalue **slot = m_constants_map.get (key);
if (slot)
return *slot;
constant_svalue *cst_sval
- = new constant_svalue (alloc_symbol_id (), cst_expr);
+ = new constant_svalue (alloc_symbol_id (), type, cst_expr);
RETURN_UNKNOWN_IF_TOO_COMPLEX (cst_sval);
- m_constants_map.put (cst_expr, cst_sval);
+ m_constants_map.put (key, cst_sval);
return cst_sval;
}
+const svalue *
+region_model_manager::get_or_create_constant_svalue (tree cst_expr)
+{
+ return get_or_create_constant_svalue (TREE_TYPE (cst_expr), cst_expr);
+}
+
/* Return the svalue * for a constant_svalue for the INTEGER_CST
for VAL of type TYPE, creating it if necessary. */
region_model_manager::get_or_create_int_cst (tree type,
const poly_wide_int_ref &cst)
{
- gcc_assert (type);
- gcc_assert (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type));
- tree tree_cst = wide_int_to_tree (type, cst);
- return get_or_create_constant_svalue (tree_cst);
+ tree effective_type = type;
+ if (!type)
+ effective_type = ptrdiff_type_node;
+ gcc_assert (INTEGRAL_TYPE_P (effective_type)
+ || POINTER_TYPE_P (effective_type));
+ tree tree_cst = wide_int_to_tree (effective_type, cst);
+ return get_or_create_constant_svalue (type, tree_cst);
}
/* Return the svalue * for the constant_svalue for the NULL pointer
case VIEW_CONVERT_EXPR:
case NOP_EXPR:
{
+ if (!type)
+ return nullptr;
+
/* Handle redundant casts. */
if (arg->get_type ()
&& useless_type_conversion_p (arg->get_type (), type))
unless INNER_TYPE is narrower than TYPE. */
if (const svalue *innermost_arg = arg->maybe_undo_cast ())
{
- tree inner_type = arg->get_type ();
- if (TYPE_SIZE (type)
- && TYPE_SIZE (inner_type)
- && (fold_binary (LE_EXPR, boolean_type_node,
- TYPE_SIZE (type), TYPE_SIZE (inner_type))
- == boolean_true_node))
- return maybe_fold_unaryop (type, op, innermost_arg);
+ if (tree inner_type = arg->get_type ())
+ if (TYPE_SIZE (type)
+ && TYPE_SIZE (inner_type)
+ && (fold_binary (LE_EXPR, boolean_type_node,
+ TYPE_SIZE (type), TYPE_SIZE (inner_type))
+ == boolean_true_node))
+ return maybe_fold_unaryop (type, op, innermost_arg);
}
/* Avoid creating symbolic regions for pointer casts by
simplifying (T*)(®ION) to ((T*)®ION). */
static enum tree_code
get_code_for_cast (tree dst_type, tree src_type)
{
- gcc_assert (dst_type);
+ if (!dst_type)
+ return NOP_EXPR;
if (!src_type)
return NOP_EXPR;
const svalue *
region_model_manager::get_or_create_cast (tree type, const svalue *arg)
{
- gcc_assert (type);
-
/* No-op if the types are the same. */
if (type == arg->get_type ())
return arg;
/* Don't attempt to handle casts involving vector types for now. */
- if (VECTOR_TYPE_P (type)
- || (arg->get_type ()
- && VECTOR_TYPE_P (arg->get_type ())))
- return get_or_create_unknown_svalue (type);
+ if (type)
+ if (VECTOR_TYPE_P (type)
+ || (arg->get_type ()
+ && VECTOR_TYPE_P (arg->get_type ())))
+ return get_or_create_unknown_svalue (type);
enum tree_code op = get_code_for_cast (type, arg->get_type ());
return get_or_create_unaryop (type, op, arg);
/* (CST OP CST). */
if (cst0 && cst1)
{
- if (tree result = fold_binary (op, type, cst0, cst1))
- if (CONSTANT_CLASS_P (result))
- return get_or_create_constant_svalue (result);
+ if (type)
+ {
+ if (tree result = fold_binary (op, type, cst0, cst1))
+ if (CONSTANT_CLASS_P (result))
+ return get_or_create_constant_svalue (result);
+ }
+ else
+ {
+ if (tree result = int_const_binop (op, cst0, cst1, -1))
+ return get_or_create_constant_svalue (NULL_TREE, result);
+ }
}
if ((type && FLOAT_TYPE_P (type))
break;
case MULT_EXPR:
/* (VAL * 0). */
- if (cst1 && zerop (cst1) && INTEGRAL_TYPE_P (type))
- return get_or_create_constant_svalue (build_int_cst (type, 0));
+ if (cst1
+ && zerop (cst1)
+ && (type == NULL_TREE || INTEGRAL_TYPE_P (type)))
+ return get_or_create_int_cst (type, 0);
/* (VAL * 1) -> VAL. */
if (cst1 && integer_onep (cst1))
- /* TODO: we ought to have a cast to TYPE here, but doing so introduces
- regressions; see PR analyzer/110902. */
- return arg0;
+ return get_or_create_cast (type, arg0);
break;
case BIT_AND_EXPR:
if (cst1)
{
- if (zerop (cst1) && INTEGRAL_TYPE_P (type))
+ if (zerop (cst1)
+ && (type == NULL_TREE || INTEGRAL_TYPE_P (type)))
/* "(ARG0 & 0)" -> "0". */
- return get_or_create_constant_svalue (build_int_cst (type, 0));
+ return get_or_create_int_cst (type, 0);
if (const compound_svalue *compound_sval
= arg0->dyn_cast_compound_svalue ())
binop->get_arg1 (), arg1));
}
+
+ /* Typeless operations, assumed to be effectively arbitrary sized
+ integers following normal arithmetic rules. */
+ if (!type)
+ switch (op)
+ {
+ default:
+ break;
+ case MINUS_EXPR:
+ {
+ /* (X - X) -> 0. */
+ if (arg0 == arg1)
+ return get_or_create_int_cst (type, 0);
+
+ /* (X + A) - (A + B) -> (A - B). */
+ if (const binop_svalue *binop0 = arg0->dyn_cast_binop_svalue ())
+ if (const binop_svalue *binop1 = arg1->dyn_cast_binop_svalue ())
+ if (binop0->get_op () == PLUS_EXPR
+ && binop1->get_op () == PLUS_EXPR
+ && binop0->get_arg0 () == binop1->get_arg0 ())
+ return get_or_create_binop (NULL_TREE, op,
+ binop0->get_arg1 (),
+ binop1->get_arg1 ());
+ }
+ break;
+
+ case EXACT_DIV_EXPR:
+ {
+ if (const unaryop_svalue *unaryop0 = arg0->dyn_cast_unaryop_svalue ())
+ {
+ if (unaryop0->get_op () == NOP_EXPR)
+ if (const svalue *sval = maybe_fold_binop (NULL_TREE, op,
+ unaryop0->get_arg (),
+ arg1))
+ return sval;
+ }
+ if (const binop_svalue *binop0 = arg0->dyn_cast_binop_svalue ())
+ {
+ switch (binop0->get_op ())
+ {
+ default:
+ break;
+
+ case PLUS_EXPR:
+ case MINUS_EXPR:
+ /* (A op B) / C -> (A / C) op (B / C). */
+ {
+ if (const svalue *op_on_a
+ = maybe_fold_binop (NULL_TREE, op,
+ binop0->get_arg0 (), arg1))
+ if (const svalue *op_on_b
+ = maybe_fold_binop (NULL_TREE, op,
+ binop0->get_arg1 (), arg1))
+ return get_or_create_binop (NULL_TREE,
+ binop0->get_op (),
+ op_on_a, op_on_b);
+ }
+ break;
+
+ case MULT_EXPR:
+ /* (A * B) / C -> A * (B / C) if C is a divisor of B.
+ In particular, this should also handle the case
+ (A * B) / B -> A. */
+ if (const svalue *b_div_c
+ = maybe_fold_binop (NULL_TREE, op,
+ binop0->get_arg1 (), arg1))
+ return get_or_create_binop (NULL_TREE, binop0->get_op (),
+ binop0->get_arg0 (), b_div_c);
+ }
+ }
+ }
+ break;
+ }
+
/* etc. */
return NULL;
tree fndecl,
const vec<const svalue *> &inputs)
{
- gcc_assert (type);
gcc_assert (fndecl);
gcc_assert (DECL_P (fndecl));
gcc_assert (TREE_READONLY (fndecl));
}
/* svalue consolidation. */
+ const svalue *get_or_create_constant_svalue (tree type, tree cst_expr);
const svalue *get_or_create_constant_svalue (tree cst_expr);
const svalue *get_or_create_int_cst (tree type, const poly_wide_int_ref &cst);
const svalue *get_or_create_null_ptr (tree pointer_type);
void dump_untracked_regions () const;
+ const svalue *maybe_fold_binop (tree type, enum tree_code op,
+ const svalue *arg0, const svalue *arg1);
private:
bool too_complex_p (const complexity &c) const;
bool reject_if_too_complex (svalue *sval);
const svalue *maybe_fold_unaryop (tree type, enum tree_code op,
const svalue *arg);
- const svalue *maybe_fold_binop (tree type, enum tree_code op,
- const svalue *arg0, const svalue *arg1);
const svalue *maybe_fold_sub_svalue (tree type,
const svalue *parent_svalue,
const region *subregion);
heap_region m_heap_region;
/* svalue consolidation. */
- typedef hash_map<tree, constant_svalue *> constants_map_t;
+ typedef hash_map<constant_svalue::key_t, constant_svalue *> constants_map_t;
constants_map_t m_constants_map;
typedef hash_map<tree, unknown_svalue *> unknowns_map_t;
void
print_quoted_type (pretty_printer *pp, tree t)
{
+ if (!t)
+ return;
pp_begin_quote (pp, pp_show_color (pp));
dump_generic_node (pp, t, 0, TDF_SLIM, 0);
pp_end_quote (pp, pp_show_color (pp));
}
+/* Print EXPR to PP, without quotes.
+ For use within svalue::maybe_print_for_user
+ and region::maybe_print_for_user. */
+
+void
+print_expr_for_user (pretty_printer *pp, tree expr)
+{
+ /* Workaround for C++'s lang_hooks.decl_printable_name,
+ which unhelpfully (for us) prefixes the decl with its
+ type. */
+ if (DECL_P (expr))
+ dump_generic_node (pp, expr, 0, TDF_SLIM, 0);
+ else
+ pp_printf (pp, "%E", expr);
+}
+
/* class region_to_value_map. */
/* Assignment operator for region_to_value_map. */
const region *arr_i_reg = model.get_lvalue (arr_i, NULL);
region_offset offset = arr_i_reg->get_offset (&mgr);
ASSERT_EQ (offset.get_base_region (), model.get_lvalue (arr, NULL));
- ASSERT_EQ (offset.get_symbolic_byte_offset ()->get_kind (), SK_BINOP);
+ const svalue *offset_sval = offset.get_symbolic_byte_offset ();
+ if (const svalue *cast = offset_sval->maybe_undo_cast ())
+ offset_sval = cast;
+ ASSERT_EQ (offset_sval->get_kind (), SK_BINOP);
}
/* "arr[i] = i;" - this should remove the earlier bindings. */
}
}
-tree
-region_offset::calc_symbolic_bit_offset (const region_model &model) const
+const svalue &
+region_offset::calc_symbolic_bit_offset (region_model_manager *mgr) const
{
if (symbolic_p ())
{
- tree num_bytes_expr = model.get_representative_tree (m_sym_offset);
- if (!num_bytes_expr)
- return NULL_TREE;
- tree bytes_to_bits_scale = build_int_cst (size_type_node, BITS_PER_UNIT);
- return fold_build2 (MULT_EXPR, size_type_node,
- num_bytes_expr, bytes_to_bits_scale);
+ const svalue *bits_per_byte
+ = mgr->get_or_create_int_cst (NULL_TREE, BITS_PER_UNIT);
+ return *mgr->get_or_create_binop (NULL_TREE, MULT_EXPR,
+ m_sym_offset, bits_per_byte);
}
else
- {
- tree cst = wide_int_to_tree (size_type_node, m_offset);
- return cst;
- }
+ return *mgr->get_or_create_int_cst (size_type_node, m_offset);
}
const svalue *
return b <= a;
}
+region_offset
+strip_types (const region_offset &offset, region_model_manager &mgr)
+{
+ if (offset.symbolic_p ())
+ return region_offset::make_symbolic
+ (offset.get_base_region (),
+ strip_types (offset.get_symbolic_byte_offset (),
+ mgr));
+ else
+ return offset;
+}
+
/* class region and its various subclasses. */
/* class region. */
const svalue *sval
= iter_region->get_relative_symbolic_offset (mgr);
accum_byte_sval
- = mgr->get_or_create_binop (sval->get_type (), PLUS_EXPR,
+ = mgr->get_or_create_binop (ptrdiff_type_node, PLUS_EXPR,
accum_byte_sval, sval);
iter_region = iter_region->get_parent_region ();
}
accumulated bits to a svalue in bytes and revisit the
iter_region collecting the symbolic value. */
byte_offset_t byte_offset = accum_bit_offset / BITS_PER_UNIT;
- tree offset_tree = wide_int_to_tree (integer_type_node,
+ tree offset_tree = wide_int_to_tree (ptrdiff_type_node,
byte_offset);
accum_byte_sval
= mgr->get_or_create_constant_svalue (offset_tree);
return reg_js;
}
+bool
+region::maybe_print_for_user (pretty_printer *pp,
+ const region_model &) const
+{
+ switch (get_kind ())
+ {
+ default:
+ break;
+ case RK_DECL:
+ {
+ const decl_region *reg = (const decl_region *)this;
+ tree decl = reg->get_decl ();
+ if (TREE_CODE (decl) == SSA_NAME)
+ decl = SSA_NAME_VAR (decl);
+ print_expr_for_user (pp, decl);
+ return true;
+ }
+ }
+
+ return false;
+}
+
/* Generate a description of this region. */
DEBUG_FUNCTION label_text
hwi_byte_size);
const svalue *byte_size_sval
= mgr->get_or_create_constant_svalue (byte_size_tree);
- return mgr->get_or_create_binop (ptrdiff_type_node, MULT_EXPR,
+ return mgr->get_or_create_binop (NULL_TREE, MULT_EXPR,
m_index, byte_size_sval);
}
return mgr->get_or_create_unknown_svalue (ptrdiff_type_node);
offset_region::get_bit_offset (region_model_manager *mgr) const
{
const svalue *bits_per_byte_sval
- = mgr->get_or_create_int_cst (size_type_node, BITS_PER_UNIT);
- return mgr->get_or_create_binop (size_type_node, MULT_EXPR,
+ = mgr->get_or_create_int_cst (NULL_TREE, BITS_PER_UNIT);
+ return mgr->get_or_create_binop (NULL_TREE, MULT_EXPR,
m_byte_offset, bits_per_byte_sval);
}
sized_region::get_bit_size_sval (region_model_manager *mgr) const
{
const svalue *bits_per_byte_sval
- = mgr->get_or_create_int_cst (size_type_node, BITS_PER_UNIT);
- return mgr->get_or_create_binop (size_type_node, MULT_EXPR,
+ = mgr->get_or_create_int_cst (NULL_TREE, BITS_PER_UNIT);
+ return mgr->get_or_create_binop (NULL_TREE, MULT_EXPR,
m_byte_size_sval, bits_per_byte_sval);
}
json::value *to_json () const;
+ bool maybe_print_for_user (pretty_printer *pp,
+ const region_model &model) const;
+
bool non_null_p () const;
static int cmp_ptr_ptr (const void *, const void *);
return sval_js;
}
+/* Class for optionally adding open/close paren pairs within
+ svalue::maybe_print_for_user. */
+
+class auto_add_parens
+{
+public:
+ auto_add_parens (pretty_printer *pp,
+ const svalue *outer_sval,
+ const svalue &inner_sval)
+ : m_pp (pp),
+ m_needs_parens (needs_parens_p (outer_sval, inner_sval))
+ {
+ if (m_needs_parens)
+ pp_string (m_pp, "(");
+ }
+ ~auto_add_parens ()
+ {
+ if (m_needs_parens)
+ pp_string (m_pp, ")");
+ }
+
+private:
+ static bool needs_parens_p (const svalue *outer_sval,
+ const svalue &inner_sval)
+ {
+ if (!outer_sval)
+ return false;
+ if (inner_sval.get_kind () == SK_BINOP)
+ return true;
+ return false;
+ }
+
+ pretty_printer *m_pp;
+ bool m_needs_parens;
+};
+
+/* Attempt to print a user-facing description of this svalue to PP,
+ using MODEL for extracting representative tree values if necessary.
+ Use OUTER_SVAL (which can be null) to determine if we need to wrap
+ this value in parentheses. */
+
+bool
+svalue::maybe_print_for_user (pretty_printer *pp,
+ const region_model &model,
+ const svalue *outer_sval) const
+{
+ auto_add_parens p (pp, outer_sval, *this);
+
+ switch (get_kind ())
+ {
+ default:
+ break;
+ case SK_CONSTANT:
+ {
+ const constant_svalue *sval = (const constant_svalue *)this;
+ pp_printf (pp, "%E", sval->get_constant ());
+ return true;
+ }
+ case SK_INITIAL:
+ {
+ const initial_svalue *sval = (const initial_svalue *)this;
+ return sval->get_region ()->maybe_print_for_user (pp, model);
+ }
+ case SK_UNARYOP:
+ {
+ const unaryop_svalue *sval = (const unaryop_svalue *)this;
+ if (sval->get_op () == NOP_EXPR)
+ {
+ if (!sval->get_arg ()->maybe_print_for_user (pp, model, outer_sval))
+ return false;
+ return true;
+ }
+ }
+ break;
+ case SK_BINOP:
+ {
+ const binop_svalue *sval = (const binop_svalue *)this;
+ switch (sval->get_op ())
+ {
+ default:
+ break;
+
+ case PLUS_EXPR:
+ case MINUS_EXPR:
+ case MULT_EXPR:
+ {
+ if (!sval->get_arg0 ()->maybe_print_for_user (pp, model, this))
+ return false;
+ pp_printf (pp, " %s ", op_symbol_code (sval->get_op ()));
+ if (!sval->get_arg1 ()->maybe_print_for_user (pp, model, this))
+ return false;
+ return true;
+ }
+ }
+ }
+ break;
+ }
+
+ if (tree expr = model.get_representative_tree (this))
+ {
+ expr = remove_ssa_names (expr);
+ print_expr_for_user (pp, expr);
+ return true;
+ }
+
+ return false;
+}
+
/* If this svalue is a constant_svalue, return the underlying tree constant.
Otherwise return NULL_TREE. */
const constant_svalue *constant_sval2 = (const constant_svalue *)sval2;
const_tree cst1 = constant_sval1->get_constant ();
const_tree cst2 = constant_sval2->get_constant ();
- return cmp_csts_same_type (cst1, cst2);
+ /* The svalues have the same type, but the underlying trees
+ might not (for the case where both svalues are typeless). */
+ return cmp_csts_and_types (cst1, cst2);
}
break;
case SK_UNKNOWN:
return true;
}
+/* Given EXPR, a non-NULL expression of boolean type, convert to
+ a tristate based on whether this is known to be true, false,
+ or is not known. */
+
+static tristate
+tristate_from_boolean_tree_node (tree expr)
+{
+ gcc_assert (TREE_TYPE (expr) == boolean_type_node);
+
+ if (expr == boolean_true_node)
+ return tristate (tristate::TS_TRUE);
+ else if (expr == boolean_false_node)
+ return tristate (tristate::TS_FALSE);
+ else
+ return tristate (tristate::TS_UNKNOWN);
+}
+
/* Evaluate the condition LHS OP RHS.
Subroutine of region_model::eval_condition for when we have a pair of
constants. */
tristate
constant_svalue::eval_condition (const constant_svalue *lhs,
- enum tree_code op,
- const constant_svalue *rhs)
+ enum tree_code op,
+ const constant_svalue *rhs)
{
tree lhs_const = lhs->get_constant ();
tree rhs_const = rhs->get_constant ();
gcc_assert (CONSTANT_CLASS_P (lhs_const));
gcc_assert (CONSTANT_CLASS_P (rhs_const));
+ if ((lhs->get_type () == NULL_TREE || rhs->get_type () == NULL_TREE)
+ && TREE_CODE (lhs_const) == INTEGER_CST
+ && TREE_CODE (rhs_const) == INTEGER_CST
+ )
+ {
+ if (tree tree_cmp = const_binop (op, boolean_type_node,
+ lhs_const, rhs_const))
+ {
+ tristate ts = tristate_from_boolean_tree_node (tree_cmp);
+ if (ts.is_known ())
+ return ts;
+ }
+ }
+
/* Check for comparable types. */
if (types_compatible_p (TREE_TYPE (lhs_const), TREE_TYPE (rhs_const)))
{
- tree comparison
+ tree tree_cmp
= fold_binary (op, boolean_type_node, lhs_const, rhs_const);
- if (comparison == boolean_true_node)
- return tristate (tristate::TS_TRUE);
- if (comparison == boolean_false_node)
- return tristate (tristate::TS_FALSE);
+ tristate ts = tristate_from_boolean_tree_node (tree_cmp);
+ if (ts.is_known ())
+ return ts;
}
return tristate::TS_UNKNOWN;
}
const region *maybe_get_deref_base_region () const;
+ bool maybe_print_for_user (pretty_printer *pp,
+ const region_model &model,
+ const svalue *outer_sval = nullptr) const;
+
protected:
svalue (complexity c, symbol::id_t id, tree type)
: symbol (c, id), m_type (type)
namespace ana {
-/* Concrete subclass of svalue representing a specific constant value. */
+/* Concrete subclass of svalue representing a specific constant value.
+ The type will either be the same as that of the underlying tree constant,
+ or NULL_TREE indicating the constant is intended to be "typeless". */
class constant_svalue : public svalue
{
public:
- constant_svalue (symbol::id_t id, tree cst_expr)
- : svalue (complexity (1, 1), id, TREE_TYPE (cst_expr)), m_cst_expr (cst_expr)
+ /* A support class for uniquifying instances of region_svalue. */
+ struct key_t
+ {
+ key_t (tree type, tree cst)
+ : m_type (type), m_cst (cst)
+ {}
+
+ hashval_t hash () const
+ {
+ inchash::hash hstate;
+ hstate.add_ptr (m_type);
+ hstate.add_ptr (m_cst);
+ return hstate.end ();
+ }
+
+ bool operator== (const key_t &other) const
+ {
+ return (m_type == other.m_type && m_cst == other.m_cst);
+ }
+
+ void mark_deleted () { m_type = reinterpret_cast<tree> (1); }
+ void mark_empty () { m_type = reinterpret_cast<tree> (2); }
+ bool is_deleted () const { return m_type == reinterpret_cast<tree> (1); }
+ bool is_empty () const { return m_type == reinterpret_cast<tree> (2); }
+
+ tree m_type;
+ tree m_cst;
+ };
+
+ constant_svalue (symbol::id_t id, tree type, tree cst_expr)
+ : svalue (complexity (1, 1), id, type),
+ m_cst_expr (cst_expr)
{
gcc_assert (cst_expr);
gcc_assert (CONSTANT_CLASS_P (cst_expr));
+ gcc_assert (type == TREE_TYPE (cst_expr) || type == NULL_TREE);
}
enum svalue_kind get_kind () const final override { return SK_CONSTANT; }
return sval->get_kind () == SK_CONSTANT;
}
+template <> struct default_hash_traits<constant_svalue::key_t>
+: public member_function_hash_traits<constant_svalue::key_t>
+{
+ static const bool empty_zero_p = false;
+};
+
namespace ana {
/* Concrete subclass of svalue representing an unknowable value, the bottom
::selftest::fail (SELFTEST_LOCATION, desc_); \
SELFTEST_END_STMT
+/* Like ASSERT_NE, but treat LOC as the effective location of the
+ selftest. */
+
+#define ASSERT_NE_AT(LOC, VAL1, VAL2) \
+ SELFTEST_BEGIN_STMT \
+ const char *desc_ = "ASSERT_NE (" #VAL1 ", " #VAL2 ")"; \
+ if ((VAL1) != (VAL2)) \
+ ::selftest::pass ((LOC), desc_); \
+ else \
+ ::selftest::fail ((LOC), desc_); \
+ SELFTEST_END_STMT
+
/* Evaluate VAL1 and VAL2 and compare them with maybe_ne, calling
::selftest::pass if they might be non-equal,
::selftest::fail if they are known to be equal. */
--- /dev/null
+/* Reduced from analyzer ICE seen with git-2.39.0's pack-bitmap.c
+ when bounds-checking the result of __builtin_ctzll. */
+
+#include <stdint.h>
+#include <stddef.h>
+
+typedef uint64_t eword_t;
+struct ewah_bitmap;
+struct ewah_iterator
+{
+ /* [...] */
+};
+struct bitmap;
+
+void ewah_iterator_init(struct ewah_iterator *it, struct ewah_bitmap *parent);
+int ewah_iterator_next(eword_t *next, struct ewah_iterator *it);
+void bitmap_set(struct bitmap *self, size_t pos);
+
+int rebuild_bitmap(const uint32_t *reposition,
+ struct ewah_bitmap *source,
+ struct bitmap *dest)
+{
+ uint32_t pos = 0;
+ struct ewah_iterator it;
+ eword_t word;
+
+ ewah_iterator_init(&it, source);
+
+ while (ewah_iterator_next(&word, &it)) {
+ uint32_t offset, bit_pos;
+
+ for (offset = 0; offset < (sizeof(eword_t) * 8); ++offset) {
+ if ((word >> offset) == 0)
+ break;
+
+ offset += __builtin_ctzll(word >> offset);
+
+ bit_pos = reposition[pos + offset];
+ if (bit_pos > 0)
+ bitmap_set(dest, bit_pos - 1);
+ else
+ return -1;
+ }
+
+ pos += (sizeof(eword_t) * 8);
+ }
+ return 0;
+}
│ │ ╭──────────┴─────────╮
│ │ │over-read of 4 bytes│
│ │ ╰────────────────────╯
- ╭───────────┴──────────╮ ╭────────┴────────╮
- │size: 'size * 4' bytes│ │'size * 12' bytes│
- ╰──────────────────────╯ ╰─────────────────╯
+ │ ╭────────────────┴───────────────╮
+ │ │'(size * 16) - (size * 4)' bytes│
+ │ ╰────────────────────────────────╯
+ ╭───────────┴──────────╮
+ │size: 'size * 4' bytes│
+ ╰──────────────────────╯
{ dg-end-multiline-output "" } */
└──────────────────────────────────────────────────┘└──────────────────┘
├────────────────────────┬─────────────────────────┤├────────┬─────────┤
│ │
- ╭───────────────┴──────────────╮ ╭─────────┴────────╮
- │capacity: 'size * 4 + 3' bytes│ │overflow of 1 byte│
- ╰──────────────────────────────╯ ╰──────────────────╯
+ ╭────────────────┴───────────────╮ ╭─────────┴────────╮
+ │capacity: '(size * 4) + 3' bytes│ │overflow of 1 byte│
+ ╰────────────────────────────────╯ ╰──────────────────╯
{ dg-end-multiline-output "" } */
--- /dev/null
+/* Verify we don't ICE generating out-of-bounds diagram. */
+
+/* { dg-additional-options "-O0 -fdiagnostics-text-art-charset=unicode" } */
+
+#include <stdio.h>
+#include <stdint.h>
+
+uint64_t d(int32_t h) {
+ uint64_t j[2][6];
+ int32_t k;
+ for (k = 1;;) {
+ printf("FLAG\n");
+ if (h < 106 || j[k][h]) /* { dg-warning "stack-based buffer over-read" } */
+ return 0;
+ }
+}
+int16_t e() {
+ int32_t f[5];
+ for (f[2] = 3; f[2]; --f[2])
+ d(0);
+}
+
+int main() { e(); }
+
+/* We don't care about the exact diagram, just that we don't ICE. */
+
+/* { dg-allow-blank-lines-in-output 1 } */
+/* { dg-prune-output ".*" } */
--- /dev/null
+/* Verify we don't ICE generating out-of-bounds diagram. */
+
+/* { dg-additional-options "-O0 -fdiagnostics-text-art-charset=unicode" } */
+
+#include <stdio.h>
+#include <stdint.h>
+
+struct a {
+ uint32_t b;
+};
+union c {
+ int8_t b;
+};
+
+int32_t *d( int32_t *j, int32_t k, struct a l) {
+ int64_t m[1]= {0};
+ for (l.b = 0; l.b <= 0; l.b++) {
+ printf("FLAG\n");
+ l.b == 12 && m[l.b]; /* { dg-bogus "stack-based buffer over-read" } */
+ }
+}
+
+/* We don't care about the exact diagram, just that we don't ICE. */
+
+/* { dg-allow-blank-lines-in-output 1 } */
+/* { dg-prune-output ".*" } */
--- /dev/null
+/* Verify we don't ICE generating out-of-bounds diagram. */
+
+/* { dg-additional-options "-fdiagnostics-text-art-charset=unicode" } */
+
+#include "analyzer-decls.h"
+
+void f() {
+ int a[] = {3, 0, 3, 3, 0, 3, 40883};
+ for (int c = 6; c; c--) {
+ __analyzer_describe(0, a[c]);
+ 90 > c || a[c]; /* { dg-bogus "stack-based buffer over-read" } */
+ }
+}
+int main() { f(); }
+
+/* We don't care about the exact diagram, just that we don't ICE. */
+
+/* { dg-allow-blank-lines-in-output 1 } */
+/* { dg-prune-output ".*" } */