static bool
should_emit_struct_debug (tree type, enum debug_info_usage usage)
{
+ if (debug_info_level <= DINFO_LEVEL_TERSE)
+ return false;
+
enum debug_struct_file criterion;
tree type_decl;
bool generic = lang_hooks.types.generic_p (type);
case dw_val_class_vms_delta:
return (!strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1)
- && !strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1));
+ && !strcmp (a->v.val_vms_delta.lbl2, b->v.val_vms_delta.lbl2));
case dw_val_class_discr_value:
return (a->v.val_discr_value.pos == b->v.val_discr_value.pos
static bool add_const_value_attribute (dw_die_ref, rtx);
static void insert_int (HOST_WIDE_INT, unsigned, unsigned char *);
static void insert_wide_int (const wide_int &, unsigned char *, int);
-static void insert_float (const_rtx, unsigned char *);
+static unsigned insert_float (const_rtx, unsigned char *);
static rtx rtl_for_decl_location (tree);
static bool add_location_or_const_value_attribute (dw_die_ref, tree, bool);
static bool tree_add_const_value_attribute (dw_die_ref, tree);
fi = fnad->files + fnad->used_files++;
+ f = remap_debug_filename (d->filename);
+
/* Skip all leading "./". */
- f = d->filename;
while (f[0] == '.' && IS_DIR_SEPARATOR (f[1]))
f += 2;
scalar_float_mode float_mode = as_a <scalar_float_mode> (mode);
unsigned int length = GET_MODE_SIZE (float_mode);
unsigned char *array = ggc_vec_alloc<unsigned char> (length);
+ unsigned int elt_size = insert_float (rtl, array);
- insert_float (rtl, array);
mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
- mem_loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
- mem_loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
+ mem_loc_result->dw_loc_oprnd2.v.val_vec.length
+ = length / elt_size;
+ mem_loc_result->dw_loc_oprnd2.v.val_vec.elt_size = elt_size;
mem_loc_result->dw_loc_oprnd2.v.val_vec.array = array;
}
}
{
unsigned int length = GET_MODE_SIZE (smode);
unsigned char *array = ggc_vec_alloc<unsigned char> (length);
+ unsigned int elt_size = insert_float (rtl, array);
- insert_float (rtl, array);
loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
- loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
- loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
+ loc_result->dw_loc_oprnd2.v.val_vec.length = length / elt_size;
+ loc_result->dw_loc_oprnd2.v.val_vec.elt_size = elt_size;
loc_result->dw_loc_oprnd2.v.val_vec.array = array;
}
}
case FIX_TRUNC_EXPR:
return 0;
+ case COMPOUND_LITERAL_EXPR:
+ return loc_list_from_tree_1 (COMPOUND_LITERAL_EXPR_DECL (loc),
+ 0, context);
+
default:
/* Leave front-end specific codes as simply unknown. This comes
up, for instance, with the C STMT_EXPR. */
/* Writes floating point values to dw_vec_const array. */
-static void
+static unsigned
insert_float (const_rtx rtl, unsigned char *array)
{
long val[4];
real_to_target (val, CONST_DOUBLE_REAL_VALUE (rtl), mode);
/* real_to_target puts 32-bit pieces in each long. Pack them. */
+ if (GET_MODE_SIZE (mode) < 4)
+ {
+ gcc_assert (GET_MODE_SIZE (mode) == 2);
+ insert_int (val[0], 2, array);
+ return 2;
+ }
+
for (i = 0; i < GET_MODE_SIZE (mode) / 4; i++)
{
insert_int (val[i], 4, array);
array += 4;
}
+ return 4;
}
/* Attach a DW_AT_const_value attribute for a variable or a parameter which
scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
unsigned int length = GET_MODE_SIZE (mode);
unsigned char *array = ggc_vec_alloc<unsigned char> (length);
+ unsigned int elt_size = insert_float (rtl, array);
- insert_float (rtl, array);
- add_AT_vec (die, DW_AT_const_value, length / 4, 4, array);
+ add_AT_vec (die, DW_AT_const_value, length / elt_size, elt_size,
+ array);
}
return true;
/* ??? The C++ FE emits debug information for using decls, so
putting gcc_unreachable here falls over. See PR31899. For now
be conservative. */
- else if (!symtab->global_info_ready && VAR_OR_FUNCTION_DECL_P (*tp))
+ else if (!symtab->global_info_ready && VAR_P (*tp))
return *tp;
else if (VAR_P (*tp))
{
optimizing and gimplifying the CU by now.
So if *TP has no call graph node associated
to it, it means *TP will not be emitted. */
- if (!cgraph_node::get (*tp))
+ if (!symtab->global_info_ready || !cgraph_node::get (*tp))
return *tp;
}
else if (TREE_CODE (*tp) == STRING_CST && !TREE_ASM_WRITTEN (*tp))
return true;
}
}
- if (! early_dwarf)
- {
- rtl = rtl_for_decl_init (init, type);
- if (rtl)
- return add_const_value_attribute (die, rtl);
- }
+ /* Generate the RTL even if early_dwarf to force mangling of all refered to
+ symbols. */
+ rtl = rtl_for_decl_init (init, type);
+ if (rtl && !early_dwarf)
+ return add_const_value_attribute (die, rtl);
/* If the host and target are sane, try harder. */
if (CHAR_BIT == 8 && BITS_PER_UNIT == 8
&& initializer_constant_valid_p (init, type))
else
add_AT_int (die, attr, TREE_INT_CST_LOW (value));
}
- else
+ else if (dwarf_version >= 5
+ && TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (value))) == 128)
/* Otherwise represent the bound as an unsigned value with
the precision of its type. The precision and signedness
of the type will be necessary to re-interpret it
unambiguously. */
add_AT_wide (die, attr, wi::to_wide (value));
+ else
+ {
+ rtx v = immed_wide_int_const (wi::to_wide (value),
+ TYPE_MODE (TREE_TYPE (value)));
+ dw_loc_descr_ref loc
+ = loc_descriptor (v, TYPE_MODE (TREE_TYPE (value)),
+ VAR_INIT_STATUS_INITIALIZED);
+ if (loc)
+ add_AT_loc (die, attr, loc);
+ }
return;
}
enum tree_code code = TREE_CODE (type);
dw_die_ref type_die = NULL;
+ if (debug_info_level <= DINFO_LEVEL_TERSE)
+ return;
+
/* ??? If this type is an unnamed subrange type of an integral, floating-point
or fixed-point type, use the inner type. This is because we have no
support for unnamed types in base_type_die. This can happen if this is
tree origin = decl_ultimate_origin (decl);
dw_die_ref subr_die;
dw_die_ref old_die = lookup_decl_die (decl);
+ bool old_die_had_no_children = false;
/* This function gets called multiple times for different stages of
the debug process. For example, for func() in this code:
available.
*/
int declaration = (current_function_decl != decl
+ || (!DECL_INITIAL (decl) && !origin)
|| class_or_namespace_scope_p (context_die));
/* A declaration that has been previously dumped needs no
if (old_die && declaration)
return;
+ if (in_lto_p && old_die && old_die->die_child == NULL)
+ old_die_had_no_children = true;
+
/* Now that the C++ front end lazily declares artificial member fns, we
might need to retrofit the declaration into its class. */
if (!declaration && !origin && !old_die
!= (unsigned) s.column))
add_AT_unsigned (subr_die, DW_AT_decl_column, s.column);
- /* If the prototype had an 'auto' or 'decltype(auto)' return type,
- emit the real type on the definition die. */
+ /* If the prototype had an 'auto' or 'decltype(auto)' in
+ the return type, emit the real type on the definition die. */
if (is_cxx () && debug_info_level > DINFO_LEVEL_TERSE)
{
dw_die_ref die = get_AT_ref (old_die, DW_AT_type);
+ while (die
+ && (die->die_tag == DW_TAG_reference_type
+ || die->die_tag == DW_TAG_rvalue_reference_type
+ || die->die_tag == DW_TAG_pointer_type
+ || die->die_tag == DW_TAG_const_type
+ || die->die_tag == DW_TAG_volatile_type
+ || die->die_tag == DW_TAG_restrict_type
+ || die->die_tag == DW_TAG_array_type
+ || die->die_tag == DW_TAG_ptr_to_member_type
+ || die->die_tag == DW_TAG_subroutine_type))
+ die = get_AT_ref (die, DW_AT_type);
if (die == auto_die || die == decltype_auto_die)
add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
TYPE_UNQUALIFIED, false, context_die);
else if (DECL_INITIAL (decl) == NULL_TREE)
gen_unspecified_parameters_die (decl, subr_die);
}
+ else if ((subr_die != old_die || old_die_had_no_children)
+ && prototype_p (TREE_TYPE (decl))
+ && stdarg_p (TREE_TYPE (decl)))
+ gen_unspecified_parameters_die (decl, subr_die);
}
if (subr_die != old_die)
&& DECL_RTL_SET_P (decl_or_origin))))
{
if (early_dwarf)
- add_pubname (decl_or_origin, var_die);
+ {
+ add_pubname (decl_or_origin, var_die);
+ /* For global register variables, emit DW_AT_location if possible
+ already during early_dwarf, as late_global_decl won't be usually
+ called. */
+ if (DECL_HARD_REGISTER (decl_or_origin)
+ && TREE_STATIC (decl_or_origin)
+ && !decl_by_reference_p (decl_or_origin)
+ && !get_AT (var_die, DW_AT_location)
+ && !get_AT (var_die, DW_AT_const_value)
+ && DECL_RTL_SET_P (decl_or_origin)
+ && REG_P (DECL_RTL (decl_or_origin)))
+ {
+ dw_loc_descr_ref descr
+ = reg_loc_descriptor (DECL_RTL (decl_or_origin),
+ VAR_INIT_STATUS_INITIALIZED);
+ if (descr)
+ add_AT_loc (var_die, DW_AT_location, descr);
+ }
+ }
else
add_location_or_const_value_attribute (var_die, decl_or_origin,
decl == NULL);
case VAR_DECL:
case RESULT_DECL:
/* If we are in terse mode, don't generate any DIEs to represent any
- variable declarations or definitions. */
- if (debug_info_level <= DINFO_LEVEL_TERSE)
+ variable declarations or definitions unless it is external. */
+ if (debug_info_level < DINFO_LEVEL_TERSE
+ || (debug_info_level == DINFO_LEVEL_TERSE
+ && !TREE_PUBLIC (decl_or_origin)))
break;
- /* Avoid generating stray type DIEs during late dwarf dumping.
- All types have been dumped early. */
- if (early_dwarf
- /* ??? But in LTRANS we cannot annotate early created variably
- modified type DIEs without copying them and adjusting all
- references to them. Dump them again as happens for inlining
- which copies both the decl and the types. */
- /* ??? And even non-LTO needs to re-visit type DIEs to fill
- in VLA bound information for example. */
- || (decl && variably_modified_type_p (TREE_TYPE (decl),
- current_function_decl)))
+ if (debug_info_level > DINFO_LEVEL_TERSE)
{
- /* Output any DIEs that are needed to specify the type of this data
- object. */
- if (decl_by_reference_p (decl_or_origin))
- gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
- else
- gen_type_die (TREE_TYPE (decl_or_origin), context_die);
- }
+ /* Avoid generating stray type DIEs during late dwarf dumping.
+ All types have been dumped early. */
+ if (early_dwarf
+ /* ??? But in LTRANS we cannot annotate early created variably
+ modified type DIEs without copying them and adjusting all
+ references to them. Dump them again as happens for inlining
+ which copies both the decl and the types. */
+ /* ??? And even non-LTO needs to re-visit type DIEs to fill
+ in VLA bound information for example. */
+ || (decl && variably_modified_type_p (TREE_TYPE (decl),
+ current_function_decl)))
+ {
+ /* Output any DIEs that are needed to specify the type of this data
+ object. */
+ if (decl_by_reference_p (decl_or_origin))
+ gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
+ else
+ gen_type_die (TREE_TYPE (decl_or_origin), context_die);
+ }
- if (early_dwarf)
- {
- /* And its containing type. */
- class_origin = decl_class_context (decl_or_origin);
- if (class_origin != NULL_TREE)
- gen_type_die_for_member (class_origin, decl_or_origin, context_die);
+ if (early_dwarf)
+ {
+ /* And its containing type. */
+ class_origin = decl_class_context (decl_or_origin);
+ if (class_origin != NULL_TREE)
+ gen_type_die_for_member (class_origin, decl_or_origin, context_die);
- /* And its containing namespace. */
- context_die = declare_in_namespace (decl_or_origin, context_die);
+ /* And its containing namespace. */
+ context_die = declare_in_namespace (decl_or_origin, context_die);
+ }
}
/* Now output the DIE to represent the data object itself. This gets
context_die = lookup_decl_die (DECL_CONTEXT (decl));
/* If we are in terse mode, don't generate any DIEs to represent any
- variable declarations or definitions. */
- if (debug_info_level <= DINFO_LEVEL_TERSE)
+ variable declarations or definitions unless it is external. */
+ if (debug_info_level < DINFO_LEVEL_TERSE
+ || (debug_info_level == DINFO_LEVEL_TERSE
+ && !TREE_PUBLIC (decl)))
return;
break;
/* One above highest N where .LVLN label might be equal to .Ltext0 label. */
static unsigned int first_loclabel_num_not_at_text_label;
-/* Look ahead for a real insn, or for a begin stmt marker. */
+/* Look ahead for a real insn. */
static rtx_insn *
dwarf2out_next_real_insn (rtx_insn *loc_note)
{
char loclabel[MAX_ARTIFICIAL_LABEL_BYTES + 2];
struct var_loc_node *newloc;
- rtx_insn *next_real, *next_note;
+ rtx_insn *next_real;
rtx_insn *call_insn = NULL;
static const char *last_label;
static const char *last_postcall_label;
var_loc_p = false;
next_real = dwarf2out_next_real_insn (call_insn);
- next_note = NULL;
cached_next_real_insn = NULL;
goto create_label;
}
var_loc_p = false;
next_real = dwarf2out_next_real_insn (call_insn);
- next_note = NULL;
cached_next_real_insn = NULL;
goto create_label;
}
next_real = NULL;
}
- next_note = NEXT_INSN (loc_note);
- if (! next_note
- || next_note->deleted ()
- || ! NOTE_P (next_note)
- || (NOTE_KIND (next_note) != NOTE_INSN_VAR_LOCATION
- && NOTE_KIND (next_note) != NOTE_INSN_BEGIN_STMT
- && NOTE_KIND (next_note) != NOTE_INSN_INLINE_ENTRY))
- next_note = NULL;
-
if (! next_real)
next_real = dwarf2out_next_real_insn (loc_note);
- if (next_note)
+ if (next_real)
{
- expected_next_loc_note = next_note;
- cached_next_real_insn = next_real;
+ rtx_insn *next_note = NEXT_INSN (loc_note);
+ while (next_note != next_real)
+ {
+ if (! next_note->deleted ()
+ && NOTE_P (next_note)
+ && NOTE_KIND (next_note) == NOTE_INSN_VAR_LOCATION)
+ break;
+ next_note = NEXT_INSN (next_note);
+ }
+
+ if (next_note == next_real)
+ cached_next_real_insn = NULL;
+ else
+ {
+ expected_next_loc_note = next_note;
+ cached_next_real_insn = next_real;
+ }
}
else
cached_next_real_insn = NULL;
static void
dwarf2out_size_function (tree decl)
{
+ set_early_dwarf s;
function_to_dwarf_procedure (decl);
}
for (i = 0; base_types.iterate (i, &base_type); i++)
prune_unused_types_mark (base_type, 1);
- /* For -fvar-tracking-assignments, also set the mark on nodes that could be
- referenced by DW_TAG_call_site DW_AT_call_origin (i.e. direct call
- callees). */
+ /* Also set the mark on nodes that could be referenced by
+ DW_TAG_call_site DW_AT_call_origin (i.e. direct call callees) or
+ by DW_TAG_inlined_subroutine origins. */
cgraph_node *cnode;
FOR_EACH_FUNCTION (cnode)
if (cnode->referred_to_p (false))
if (die == NULL || die->die_mark)
continue;
for (cgraph_edge *e = cnode->callers; e; e = e->next_caller)
- if (e->caller != cnode
- && opt_for_fn (e->caller->decl, flag_var_tracking_assignments))
+ if (e->caller != cnode)
{
prune_unused_types_mark (die, 1);
break;
sure to adjust the phase after annotating the LTRANS CU DIE. */
if (in_lto_p)
{
- /* Force DW_TAG_imported_unit to be created now, otherwise
- we might end up without it or ordered after DW_TAG_inlined_subroutine
- referencing DIEs from it. */
- if (! flag_wpa && flag_incremental_link != INCREMENTAL_LINK_LTO)
- {
- unsigned i;
- tree tu;
- if (external_die_map)
- FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, tu)
- if (sym_off_pair *desc = external_die_map->get (tu))
- {
- dw_die_ref import = new_die (DW_TAG_imported_unit,
- comp_unit_die (), NULL_TREE);
- add_AT_external_die_ref (import, DW_AT_import,
- desc->sym, desc->off);
- }
- }
-
early_dwarf_finished = true;
if (dump_file)
{
emit full debugging info for them. */
retry_incomplete_types ();
+ gen_scheduled_generic_parms_dies ();
+ gen_remaining_tmpl_value_param_die_attribute ();
+
/* The point here is to flush out the limbo list so that it is empty
and we don't need to stream it for LTO. */
flush_limbo_die_list ();
- gen_scheduled_generic_parms_dies ();
- gen_remaining_tmpl_value_param_die_attribute ();
-
/* Add DW_AT_linkage_name for all deferred DIEs. */
for (limbo_die_node *node = deferred_asm_name; node; node = node->next)
{
location related output removed and some LTO specific changes.
Some refactoring might make both smaller and easier to match up. */
- /* Traverse the DIE's and add add sibling attributes to those DIE's
+ /* Traverse the DIE's and add sibling attributes to those DIE's
that have children. */
add_sibling_attributes (comp_unit_die ());
for (limbo_die_node *node = limbo_die_list; node; node = node->next)