]> gcc.gnu.org Git - gcc.git/blob - gcc/dwarf2out.c
tree-core.h (tree_block::abstract_flag): Remove.
[gcc.git] / gcc / dwarf2out.c
1 /* Output Dwarf2 format symbol table information from GCC.
2 Copyright (C) 1992-2018 Free Software Foundation, Inc.
3 Contributed by Gary Funck (gary@intrepid.com).
4 Derived from DWARF 1 implementation of Ron Guilmette (rfg@monkeys.com).
5 Extensively modified by Jason Merrill (jason@cygnus.com).
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* TODO: Emit .debug_line header even when there are no functions, since
24 the file numbers are used by .debug_info. Alternately, leave
25 out locations for types and decls.
26 Avoid talking about ctors and op= for PODs.
27 Factor out common prologue sequences into multiple CIEs. */
28
29 /* The first part of this file deals with the DWARF 2 frame unwind
30 information, which is also used by the GCC efficient exception handling
31 mechanism. The second part, controlled only by an #ifdef
32 DWARF2_DEBUGGING_INFO, deals with the other DWARF 2 debugging
33 information. */
34
35 /* DWARF2 Abbreviation Glossary:
36
37 CFA = Canonical Frame Address
38 a fixed address on the stack which identifies a call frame.
39 We define it to be the value of SP just before the call insn.
40 The CFA register and offset, which may change during the course
41 of the function, are used to calculate its value at runtime.
42
43 CFI = Call Frame Instruction
44 an instruction for the DWARF2 abstract machine
45
46 CIE = Common Information Entry
47 information describing information common to one or more FDEs
48
49 DIE = Debugging Information Entry
50
51 FDE = Frame Description Entry
52 information describing the stack call frame, in particular,
53 how to restore registers
54
55 DW_CFA_... = DWARF2 CFA call frame instruction
56 DW_TAG_... = DWARF2 DIE tag */
57
58 #include "config.h"
59 #include "system.h"
60 #include "coretypes.h"
61 #include "target.h"
62 #include "function.h"
63 #include "rtl.h"
64 #include "tree.h"
65 #include "memmodel.h"
66 #include "tm_p.h"
67 #include "stringpool.h"
68 #include "insn-config.h"
69 #include "ira.h"
70 #include "cgraph.h"
71 #include "diagnostic.h"
72 #include "fold-const.h"
73 #include "stor-layout.h"
74 #include "varasm.h"
75 #include "version.h"
76 #include "flags.h"
77 #include "rtlhash.h"
78 #include "reload.h"
79 #include "output.h"
80 #include "expr.h"
81 #include "dwarf2out.h"
82 #include "dwarf2asm.h"
83 #include "toplev.h"
84 #include "md5.h"
85 #include "tree-pretty-print.h"
86 #include "print-rtl.h"
87 #include "debug.h"
88 #include "common/common-target.h"
89 #include "langhooks.h"
90 #include "lra.h"
91 #include "dumpfile.h"
92 #include "opts.h"
93 #include "tree-dfa.h"
94 #include "gdb/gdb-index.h"
95 #include "rtl-iter.h"
96 #include "stringpool.h"
97 #include "attribs.h"
98 #include "file-prefix-map.h" /* remap_debug_filename() */
99
100 static void dwarf2out_source_line (unsigned int, unsigned int, const char *,
101 int, bool);
102 static rtx_insn *last_var_location_insn;
103 static rtx_insn *cached_next_real_insn;
104 static void dwarf2out_decl (tree);
105 static bool is_redundant_typedef (const_tree);
106
107 #ifndef XCOFF_DEBUGGING_INFO
108 #define XCOFF_DEBUGGING_INFO 0
109 #endif
110
111 #ifndef HAVE_XCOFF_DWARF_EXTRAS
112 #define HAVE_XCOFF_DWARF_EXTRAS 0
113 #endif
114
115 #ifdef VMS_DEBUGGING_INFO
116 int vms_file_stats_name (const char *, long long *, long *, char *, int *);
117
118 /* Define this macro to be a nonzero value if the directory specifications
119 which are output in the debug info should end with a separator. */
120 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 1
121 /* Define this macro to evaluate to a nonzero value if GCC should refrain
122 from generating indirect strings in DWARF2 debug information, for instance
123 if your target is stuck with an old version of GDB that is unable to
124 process them properly or uses VMS Debug. */
125 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 1
126 #else
127 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 0
128 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 0
129 #endif
130
131 /* ??? Poison these here until it can be done generically. They've been
132 totally replaced in this file; make sure it stays that way. */
133 #undef DWARF2_UNWIND_INFO
134 #undef DWARF2_FRAME_INFO
135 #if (GCC_VERSION >= 3000)
136 #pragma GCC poison DWARF2_UNWIND_INFO DWARF2_FRAME_INFO
137 #endif
138
139 /* The size of the target's pointer type. */
140 #ifndef PTR_SIZE
141 #define PTR_SIZE (POINTER_SIZE / BITS_PER_UNIT)
142 #endif
143
144 /* Array of RTXes referenced by the debugging information, which therefore
145 must be kept around forever. */
146 static GTY(()) vec<rtx, va_gc> *used_rtx_array;
147
148 /* A pointer to the base of a list of incomplete types which might be
149 completed at some later time. incomplete_types_list needs to be a
150 vec<tree, va_gc> *because we want to tell the garbage collector about
151 it. */
152 static GTY(()) vec<tree, va_gc> *incomplete_types;
153
154 /* Pointers to various DWARF2 sections. */
155 static GTY(()) section *debug_info_section;
156 static GTY(()) section *debug_skeleton_info_section;
157 static GTY(()) section *debug_abbrev_section;
158 static GTY(()) section *debug_skeleton_abbrev_section;
159 static GTY(()) section *debug_aranges_section;
160 static GTY(()) section *debug_addr_section;
161 static GTY(()) section *debug_macinfo_section;
162 static const char *debug_macinfo_section_name;
163 static unsigned macinfo_label_base = 1;
164 static GTY(()) section *debug_line_section;
165 static GTY(()) section *debug_skeleton_line_section;
166 static GTY(()) section *debug_loc_section;
167 static GTY(()) section *debug_pubnames_section;
168 static GTY(()) section *debug_pubtypes_section;
169 static GTY(()) section *debug_str_section;
170 static GTY(()) section *debug_line_str_section;
171 static GTY(()) section *debug_str_dwo_section;
172 static GTY(()) section *debug_str_offsets_section;
173 static GTY(()) section *debug_ranges_section;
174 static GTY(()) section *debug_frame_section;
175
176 /* Maximum size (in bytes) of an artificially generated label. */
177 #define MAX_ARTIFICIAL_LABEL_BYTES 40
178
179 /* According to the (draft) DWARF 3 specification, the initial length
180 should either be 4 or 12 bytes. When it's 12 bytes, the first 4
181 bytes are 0xffffffff, followed by the length stored in the next 8
182 bytes.
183
184 However, the SGI/MIPS ABI uses an initial length which is equal to
185 DWARF_OFFSET_SIZE. It is defined (elsewhere) accordingly. */
186
187 #ifndef DWARF_INITIAL_LENGTH_SIZE
188 #define DWARF_INITIAL_LENGTH_SIZE (DWARF_OFFSET_SIZE == 4 ? 4 : 12)
189 #endif
190
191 #ifndef DWARF_INITIAL_LENGTH_SIZE_STR
192 #define DWARF_INITIAL_LENGTH_SIZE_STR (DWARF_OFFSET_SIZE == 4 ? "-4" : "-12")
193 #endif
194
195 /* Round SIZE up to the nearest BOUNDARY. */
196 #define DWARF_ROUND(SIZE,BOUNDARY) \
197 ((((SIZE) + (BOUNDARY) - 1) / (BOUNDARY)) * (BOUNDARY))
198
199 /* CIE identifier. */
200 #if HOST_BITS_PER_WIDE_INT >= 64
201 #define DWARF_CIE_ID \
202 (unsigned HOST_WIDE_INT) (DWARF_OFFSET_SIZE == 4 ? DW_CIE_ID : DW64_CIE_ID)
203 #else
204 #define DWARF_CIE_ID DW_CIE_ID
205 #endif
206
207
208 /* A vector for a table that contains frame description
209 information for each routine. */
210 #define NOT_INDEXED (-1U)
211 #define NO_INDEX_ASSIGNED (-2U)
212
213 static GTY(()) vec<dw_fde_ref, va_gc> *fde_vec;
214
215 struct GTY((for_user)) indirect_string_node {
216 const char *str;
217 unsigned int refcount;
218 enum dwarf_form form;
219 char *label;
220 unsigned int index;
221 };
222
223 struct indirect_string_hasher : ggc_ptr_hash<indirect_string_node>
224 {
225 typedef const char *compare_type;
226
227 static hashval_t hash (indirect_string_node *);
228 static bool equal (indirect_string_node *, const char *);
229 };
230
231 static GTY (()) hash_table<indirect_string_hasher> *debug_str_hash;
232
233 static GTY (()) hash_table<indirect_string_hasher> *debug_line_str_hash;
234
235 /* With split_debug_info, both the comp_dir and dwo_name go in the
236 main object file, rather than the dwo, similar to the force_direct
237 parameter elsewhere but with additional complications:
238
239 1) The string is needed in both the main object file and the dwo.
240 That is, the comp_dir and dwo_name will appear in both places.
241
242 2) Strings can use four forms: DW_FORM_string, DW_FORM_strp,
243 DW_FORM_line_strp or DW_FORM_strx/GNU_str_index.
244
245 3) GCC chooses the form to use late, depending on the size and
246 reference count.
247
248 Rather than forcing the all debug string handling functions and
249 callers to deal with these complications, simply use a separate,
250 special-cased string table for any attribute that should go in the
251 main object file. This limits the complexity to just the places
252 that need it. */
253
254 static GTY (()) hash_table<indirect_string_hasher> *skeleton_debug_str_hash;
255
256 static GTY(()) int dw2_string_counter;
257
258 /* True if the compilation unit places functions in more than one section. */
259 static GTY(()) bool have_multiple_function_sections = false;
260
261 /* Whether the default text and cold text sections have been used at all. */
262 static GTY(()) bool text_section_used = false;
263 static GTY(()) bool cold_text_section_used = false;
264
265 /* The default cold text section. */
266 static GTY(()) section *cold_text_section;
267
268 /* The DIE for C++14 'auto' in a function return type. */
269 static GTY(()) dw_die_ref auto_die;
270
271 /* The DIE for C++14 'decltype(auto)' in a function return type. */
272 static GTY(()) dw_die_ref decltype_auto_die;
273
274 /* Forward declarations for functions defined in this file. */
275
276 static void output_call_frame_info (int);
277 static void dwarf2out_note_section_used (void);
278
279 /* Personality decl of current unit. Used only when assembler does not support
280 personality CFI. */
281 static GTY(()) rtx current_unit_personality;
282
283 /* Whether an eh_frame section is required. */
284 static GTY(()) bool do_eh_frame = false;
285
286 /* .debug_rnglists next index. */
287 static unsigned int rnglist_idx;
288
289 /* Data and reference forms for relocatable data. */
290 #define DW_FORM_data (DWARF_OFFSET_SIZE == 8 ? DW_FORM_data8 : DW_FORM_data4)
291 #define DW_FORM_ref (DWARF_OFFSET_SIZE == 8 ? DW_FORM_ref8 : DW_FORM_ref4)
292
293 #ifndef DEBUG_FRAME_SECTION
294 #define DEBUG_FRAME_SECTION ".debug_frame"
295 #endif
296
297 #ifndef FUNC_BEGIN_LABEL
298 #define FUNC_BEGIN_LABEL "LFB"
299 #endif
300
301 #ifndef FUNC_SECOND_SECT_LABEL
302 #define FUNC_SECOND_SECT_LABEL "LFSB"
303 #endif
304
305 #ifndef FUNC_END_LABEL
306 #define FUNC_END_LABEL "LFE"
307 #endif
308
309 #ifndef PROLOGUE_END_LABEL
310 #define PROLOGUE_END_LABEL "LPE"
311 #endif
312
313 #ifndef EPILOGUE_BEGIN_LABEL
314 #define EPILOGUE_BEGIN_LABEL "LEB"
315 #endif
316
317 #ifndef FRAME_BEGIN_LABEL
318 #define FRAME_BEGIN_LABEL "Lframe"
319 #endif
320 #define CIE_AFTER_SIZE_LABEL "LSCIE"
321 #define CIE_END_LABEL "LECIE"
322 #define FDE_LABEL "LSFDE"
323 #define FDE_AFTER_SIZE_LABEL "LASFDE"
324 #define FDE_END_LABEL "LEFDE"
325 #define LINE_NUMBER_BEGIN_LABEL "LSLT"
326 #define LINE_NUMBER_END_LABEL "LELT"
327 #define LN_PROLOG_AS_LABEL "LASLTP"
328 #define LN_PROLOG_END_LABEL "LELTP"
329 #define DIE_LABEL_PREFIX "DW"
330 \f
331 /* Match the base name of a file to the base name of a compilation unit. */
332
333 static int
334 matches_main_base (const char *path)
335 {
336 /* Cache the last query. */
337 static const char *last_path = NULL;
338 static int last_match = 0;
339 if (path != last_path)
340 {
341 const char *base;
342 int length = base_of_path (path, &base);
343 last_path = path;
344 last_match = (length == main_input_baselength
345 && memcmp (base, main_input_basename, length) == 0);
346 }
347 return last_match;
348 }
349
350 #ifdef DEBUG_DEBUG_STRUCT
351
352 static int
353 dump_struct_debug (tree type, enum debug_info_usage usage,
354 enum debug_struct_file criterion, int generic,
355 int matches, int result)
356 {
357 /* Find the type name. */
358 tree type_decl = TYPE_STUB_DECL (type);
359 tree t = type_decl;
360 const char *name = 0;
361 if (TREE_CODE (t) == TYPE_DECL)
362 t = DECL_NAME (t);
363 if (t)
364 name = IDENTIFIER_POINTER (t);
365
366 fprintf (stderr, " struct %d %s %s %s %s %d %p %s\n",
367 criterion,
368 DECL_IN_SYSTEM_HEADER (type_decl) ? "sys" : "usr",
369 matches ? "bas" : "hdr",
370 generic ? "gen" : "ord",
371 usage == DINFO_USAGE_DFN ? ";" :
372 usage == DINFO_USAGE_DIR_USE ? "." : "*",
373 result,
374 (void*) type_decl, name);
375 return result;
376 }
377 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
378 dump_struct_debug (type, usage, criterion, generic, matches, result)
379
380 #else
381
382 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
383 (result)
384
385 #endif
386
387 /* Get the number of HOST_WIDE_INTs needed to represent the precision
388 of the number. Some constants have a large uniform precision, so
389 we get the precision needed for the actual value of the number. */
390
391 static unsigned int
392 get_full_len (const wide_int &op)
393 {
394 int prec = wi::min_precision (op, UNSIGNED);
395 return ((prec + HOST_BITS_PER_WIDE_INT - 1)
396 / HOST_BITS_PER_WIDE_INT);
397 }
398
399 static bool
400 should_emit_struct_debug (tree type, enum debug_info_usage usage)
401 {
402 enum debug_struct_file criterion;
403 tree type_decl;
404 bool generic = lang_hooks.types.generic_p (type);
405
406 if (generic)
407 criterion = debug_struct_generic[usage];
408 else
409 criterion = debug_struct_ordinary[usage];
410
411 if (criterion == DINFO_STRUCT_FILE_NONE)
412 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
413 if (criterion == DINFO_STRUCT_FILE_ANY)
414 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
415
416 type_decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (type));
417
418 if (type_decl != NULL)
419 {
420 if (criterion == DINFO_STRUCT_FILE_SYS && DECL_IN_SYSTEM_HEADER (type_decl))
421 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
422
423 if (matches_main_base (DECL_SOURCE_FILE (type_decl)))
424 return DUMP_GSTRUCT (type, usage, criterion, generic, true, true);
425 }
426
427 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
428 }
429 \f
430 /* Switch [BACK] to eh_frame_section. If we don't have an eh_frame_section,
431 switch to the data section instead, and write out a synthetic start label
432 for collect2 the first time around. */
433
434 static void
435 switch_to_eh_frame_section (bool back ATTRIBUTE_UNUSED)
436 {
437 if (eh_frame_section == 0)
438 {
439 int flags;
440
441 if (EH_TABLES_CAN_BE_READ_ONLY)
442 {
443 int fde_encoding;
444 int per_encoding;
445 int lsda_encoding;
446
447 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1,
448 /*global=*/0);
449 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2,
450 /*global=*/1);
451 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0,
452 /*global=*/0);
453 flags = ((! flag_pic
454 || ((fde_encoding & 0x70) != DW_EH_PE_absptr
455 && (fde_encoding & 0x70) != DW_EH_PE_aligned
456 && (per_encoding & 0x70) != DW_EH_PE_absptr
457 && (per_encoding & 0x70) != DW_EH_PE_aligned
458 && (lsda_encoding & 0x70) != DW_EH_PE_absptr
459 && (lsda_encoding & 0x70) != DW_EH_PE_aligned))
460 ? 0 : SECTION_WRITE);
461 }
462 else
463 flags = SECTION_WRITE;
464
465 #ifdef EH_FRAME_SECTION_NAME
466 eh_frame_section = get_section (EH_FRAME_SECTION_NAME, flags, NULL);
467 #else
468 eh_frame_section = ((flags == SECTION_WRITE)
469 ? data_section : readonly_data_section);
470 #endif /* EH_FRAME_SECTION_NAME */
471 }
472
473 switch_to_section (eh_frame_section);
474
475 #ifdef EH_FRAME_THROUGH_COLLECT2
476 /* We have no special eh_frame section. Emit special labels to guide
477 collect2. */
478 if (!back)
479 {
480 tree label = get_file_function_name ("F");
481 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
482 targetm.asm_out.globalize_label (asm_out_file,
483 IDENTIFIER_POINTER (label));
484 ASM_OUTPUT_LABEL (asm_out_file, IDENTIFIER_POINTER (label));
485 }
486 #endif
487 }
488
489 /* Switch [BACK] to the eh or debug frame table section, depending on
490 FOR_EH. */
491
492 static void
493 switch_to_frame_table_section (int for_eh, bool back)
494 {
495 if (for_eh)
496 switch_to_eh_frame_section (back);
497 else
498 {
499 if (!debug_frame_section)
500 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
501 SECTION_DEBUG, NULL);
502 switch_to_section (debug_frame_section);
503 }
504 }
505
506 /* Describe for the GTY machinery what parts of dw_cfi_oprnd1 are used. */
507
508 enum dw_cfi_oprnd_type
509 dw_cfi_oprnd1_desc (enum dwarf_call_frame_info cfi)
510 {
511 switch (cfi)
512 {
513 case DW_CFA_nop:
514 case DW_CFA_GNU_window_save:
515 case DW_CFA_remember_state:
516 case DW_CFA_restore_state:
517 return dw_cfi_oprnd_unused;
518
519 case DW_CFA_set_loc:
520 case DW_CFA_advance_loc1:
521 case DW_CFA_advance_loc2:
522 case DW_CFA_advance_loc4:
523 case DW_CFA_MIPS_advance_loc8:
524 return dw_cfi_oprnd_addr;
525
526 case DW_CFA_offset:
527 case DW_CFA_offset_extended:
528 case DW_CFA_def_cfa:
529 case DW_CFA_offset_extended_sf:
530 case DW_CFA_def_cfa_sf:
531 case DW_CFA_restore:
532 case DW_CFA_restore_extended:
533 case DW_CFA_undefined:
534 case DW_CFA_same_value:
535 case DW_CFA_def_cfa_register:
536 case DW_CFA_register:
537 case DW_CFA_expression:
538 case DW_CFA_val_expression:
539 return dw_cfi_oprnd_reg_num;
540
541 case DW_CFA_def_cfa_offset:
542 case DW_CFA_GNU_args_size:
543 case DW_CFA_def_cfa_offset_sf:
544 return dw_cfi_oprnd_offset;
545
546 case DW_CFA_def_cfa_expression:
547 return dw_cfi_oprnd_loc;
548
549 default:
550 gcc_unreachable ();
551 }
552 }
553
554 /* Describe for the GTY machinery what parts of dw_cfi_oprnd2 are used. */
555
556 enum dw_cfi_oprnd_type
557 dw_cfi_oprnd2_desc (enum dwarf_call_frame_info cfi)
558 {
559 switch (cfi)
560 {
561 case DW_CFA_def_cfa:
562 case DW_CFA_def_cfa_sf:
563 case DW_CFA_offset:
564 case DW_CFA_offset_extended_sf:
565 case DW_CFA_offset_extended:
566 return dw_cfi_oprnd_offset;
567
568 case DW_CFA_register:
569 return dw_cfi_oprnd_reg_num;
570
571 case DW_CFA_expression:
572 case DW_CFA_val_expression:
573 return dw_cfi_oprnd_loc;
574
575 case DW_CFA_def_cfa_expression:
576 return dw_cfi_oprnd_cfa_loc;
577
578 default:
579 return dw_cfi_oprnd_unused;
580 }
581 }
582
583 /* Output one FDE. */
584
585 static void
586 output_fde (dw_fde_ref fde, bool for_eh, bool second,
587 char *section_start_label, int fde_encoding, char *augmentation,
588 bool any_lsda_needed, int lsda_encoding)
589 {
590 const char *begin, *end;
591 static unsigned int j;
592 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
593
594 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, for_eh,
595 /* empty */ 0);
596 targetm.asm_out.internal_label (asm_out_file, FDE_LABEL,
597 for_eh + j);
598 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_AFTER_SIZE_LABEL, for_eh + j);
599 ASM_GENERATE_INTERNAL_LABEL (l2, FDE_END_LABEL, for_eh + j);
600 if (!XCOFF_DEBUGGING_INFO || for_eh)
601 {
602 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
603 dw2_asm_output_data (4, 0xffffffff, "Initial length escape value"
604 " indicating 64-bit DWARF extension");
605 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
606 "FDE Length");
607 }
608 ASM_OUTPUT_LABEL (asm_out_file, l1);
609
610 if (for_eh)
611 dw2_asm_output_delta (4, l1, section_start_label, "FDE CIE offset");
612 else
613 dw2_asm_output_offset (DWARF_OFFSET_SIZE, section_start_label,
614 debug_frame_section, "FDE CIE offset");
615
616 begin = second ? fde->dw_fde_second_begin : fde->dw_fde_begin;
617 end = second ? fde->dw_fde_second_end : fde->dw_fde_end;
618
619 if (for_eh)
620 {
621 rtx sym_ref = gen_rtx_SYMBOL_REF (Pmode, begin);
622 SYMBOL_REF_FLAGS (sym_ref) |= SYMBOL_FLAG_LOCAL;
623 dw2_asm_output_encoded_addr_rtx (fde_encoding, sym_ref, false,
624 "FDE initial location");
625 dw2_asm_output_delta (size_of_encoded_value (fde_encoding),
626 end, begin, "FDE address range");
627 }
628 else
629 {
630 dw2_asm_output_addr (DWARF2_ADDR_SIZE, begin, "FDE initial location");
631 dw2_asm_output_delta (DWARF2_ADDR_SIZE, end, begin, "FDE address range");
632 }
633
634 if (augmentation[0])
635 {
636 if (any_lsda_needed)
637 {
638 int size = size_of_encoded_value (lsda_encoding);
639
640 if (lsda_encoding == DW_EH_PE_aligned)
641 {
642 int offset = ( 4 /* Length */
643 + 4 /* CIE offset */
644 + 2 * size_of_encoded_value (fde_encoding)
645 + 1 /* Augmentation size */ );
646 int pad = -offset & (PTR_SIZE - 1);
647
648 size += pad;
649 gcc_assert (size_of_uleb128 (size) == 1);
650 }
651
652 dw2_asm_output_data_uleb128 (size, "Augmentation size");
653
654 if (fde->uses_eh_lsda)
655 {
656 ASM_GENERATE_INTERNAL_LABEL (l1, second ? "LLSDAC" : "LLSDA",
657 fde->funcdef_number);
658 dw2_asm_output_encoded_addr_rtx (lsda_encoding,
659 gen_rtx_SYMBOL_REF (Pmode, l1),
660 false,
661 "Language Specific Data Area");
662 }
663 else
664 {
665 if (lsda_encoding == DW_EH_PE_aligned)
666 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
667 dw2_asm_output_data (size_of_encoded_value (lsda_encoding), 0,
668 "Language Specific Data Area (none)");
669 }
670 }
671 else
672 dw2_asm_output_data_uleb128 (0, "Augmentation size");
673 }
674
675 /* Loop through the Call Frame Instructions associated with this FDE. */
676 fde->dw_fde_current_label = begin;
677 {
678 size_t from, until, i;
679
680 from = 0;
681 until = vec_safe_length (fde->dw_fde_cfi);
682
683 if (fde->dw_fde_second_begin == NULL)
684 ;
685 else if (!second)
686 until = fde->dw_fde_switch_cfi_index;
687 else
688 from = fde->dw_fde_switch_cfi_index;
689
690 for (i = from; i < until; i++)
691 output_cfi ((*fde->dw_fde_cfi)[i], fde, for_eh);
692 }
693
694 /* If we are to emit a ref/link from function bodies to their frame tables,
695 do it now. This is typically performed to make sure that tables
696 associated with functions are dragged with them and not discarded in
697 garbage collecting links. We need to do this on a per function basis to
698 cope with -ffunction-sections. */
699
700 #ifdef ASM_OUTPUT_DWARF_TABLE_REF
701 /* Switch to the function section, emit the ref to the tables, and
702 switch *back* into the table section. */
703 switch_to_section (function_section (fde->decl));
704 ASM_OUTPUT_DWARF_TABLE_REF (section_start_label);
705 switch_to_frame_table_section (for_eh, true);
706 #endif
707
708 /* Pad the FDE out to an address sized boundary. */
709 ASM_OUTPUT_ALIGN (asm_out_file,
710 floor_log2 ((for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE)));
711 ASM_OUTPUT_LABEL (asm_out_file, l2);
712
713 j += 2;
714 }
715
716 /* Return true if frame description entry FDE is needed for EH. */
717
718 static bool
719 fde_needed_for_eh_p (dw_fde_ref fde)
720 {
721 if (flag_asynchronous_unwind_tables)
722 return true;
723
724 if (TARGET_USES_WEAK_UNWIND_INFO && DECL_WEAK (fde->decl))
725 return true;
726
727 if (fde->uses_eh_lsda)
728 return true;
729
730 /* If exceptions are enabled, we have collected nothrow info. */
731 if (flag_exceptions && (fde->all_throwers_are_sibcalls || fde->nothrow))
732 return false;
733
734 return true;
735 }
736
737 /* Output the call frame information used to record information
738 that relates to calculating the frame pointer, and records the
739 location of saved registers. */
740
741 static void
742 output_call_frame_info (int for_eh)
743 {
744 unsigned int i;
745 dw_fde_ref fde;
746 dw_cfi_ref cfi;
747 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
748 char section_start_label[MAX_ARTIFICIAL_LABEL_BYTES];
749 bool any_lsda_needed = false;
750 char augmentation[6];
751 int augmentation_size;
752 int fde_encoding = DW_EH_PE_absptr;
753 int per_encoding = DW_EH_PE_absptr;
754 int lsda_encoding = DW_EH_PE_absptr;
755 int return_reg;
756 rtx personality = NULL;
757 int dw_cie_version;
758
759 /* Don't emit a CIE if there won't be any FDEs. */
760 if (!fde_vec)
761 return;
762
763 /* Nothing to do if the assembler's doing it all. */
764 if (dwarf2out_do_cfi_asm ())
765 return;
766
767 /* If we don't have any functions we'll want to unwind out of, don't emit
768 any EH unwind information. If we make FDEs linkonce, we may have to
769 emit an empty label for an FDE that wouldn't otherwise be emitted. We
770 want to avoid having an FDE kept around when the function it refers to
771 is discarded. Example where this matters: a primary function template
772 in C++ requires EH information, an explicit specialization doesn't. */
773 if (for_eh)
774 {
775 bool any_eh_needed = false;
776
777 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
778 {
779 if (fde->uses_eh_lsda)
780 any_eh_needed = any_lsda_needed = true;
781 else if (fde_needed_for_eh_p (fde))
782 any_eh_needed = true;
783 else if (TARGET_USES_WEAK_UNWIND_INFO)
784 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, 1, 1);
785 }
786
787 if (!any_eh_needed)
788 return;
789 }
790
791 /* We're going to be generating comments, so turn on app. */
792 if (flag_debug_asm)
793 app_enable ();
794
795 /* Switch to the proper frame section, first time. */
796 switch_to_frame_table_section (for_eh, false);
797
798 ASM_GENERATE_INTERNAL_LABEL (section_start_label, FRAME_BEGIN_LABEL, for_eh);
799 ASM_OUTPUT_LABEL (asm_out_file, section_start_label);
800
801 /* Output the CIE. */
802 ASM_GENERATE_INTERNAL_LABEL (l1, CIE_AFTER_SIZE_LABEL, for_eh);
803 ASM_GENERATE_INTERNAL_LABEL (l2, CIE_END_LABEL, for_eh);
804 if (!XCOFF_DEBUGGING_INFO || for_eh)
805 {
806 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
807 dw2_asm_output_data (4, 0xffffffff,
808 "Initial length escape value indicating 64-bit DWARF extension");
809 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
810 "Length of Common Information Entry");
811 }
812 ASM_OUTPUT_LABEL (asm_out_file, l1);
813
814 /* Now that the CIE pointer is PC-relative for EH,
815 use 0 to identify the CIE. */
816 dw2_asm_output_data ((for_eh ? 4 : DWARF_OFFSET_SIZE),
817 (for_eh ? 0 : DWARF_CIE_ID),
818 "CIE Identifier Tag");
819
820 /* Use the CIE version 3 for DWARF3; allow DWARF2 to continue to
821 use CIE version 1, unless that would produce incorrect results
822 due to overflowing the return register column. */
823 return_reg = DWARF2_FRAME_REG_OUT (DWARF_FRAME_RETURN_COLUMN, for_eh);
824 dw_cie_version = 1;
825 if (return_reg >= 256 || dwarf_version > 2)
826 dw_cie_version = 3;
827 dw2_asm_output_data (1, dw_cie_version, "CIE Version");
828
829 augmentation[0] = 0;
830 augmentation_size = 0;
831
832 personality = current_unit_personality;
833 if (for_eh)
834 {
835 char *p;
836
837 /* Augmentation:
838 z Indicates that a uleb128 is present to size the
839 augmentation section.
840 L Indicates the encoding (and thus presence) of
841 an LSDA pointer in the FDE augmentation.
842 R Indicates a non-default pointer encoding for
843 FDE code pointers.
844 P Indicates the presence of an encoding + language
845 personality routine in the CIE augmentation. */
846
847 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1, /*global=*/0);
848 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
849 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
850
851 p = augmentation + 1;
852 if (personality)
853 {
854 *p++ = 'P';
855 augmentation_size += 1 + size_of_encoded_value (per_encoding);
856 assemble_external_libcall (personality);
857 }
858 if (any_lsda_needed)
859 {
860 *p++ = 'L';
861 augmentation_size += 1;
862 }
863 if (fde_encoding != DW_EH_PE_absptr)
864 {
865 *p++ = 'R';
866 augmentation_size += 1;
867 }
868 if (p > augmentation + 1)
869 {
870 augmentation[0] = 'z';
871 *p = '\0';
872 }
873
874 /* Ug. Some platforms can't do unaligned dynamic relocations at all. */
875 if (personality && per_encoding == DW_EH_PE_aligned)
876 {
877 int offset = ( 4 /* Length */
878 + 4 /* CIE Id */
879 + 1 /* CIE version */
880 + strlen (augmentation) + 1 /* Augmentation */
881 + size_of_uleb128 (1) /* Code alignment */
882 + size_of_sleb128 (DWARF_CIE_DATA_ALIGNMENT)
883 + 1 /* RA column */
884 + 1 /* Augmentation size */
885 + 1 /* Personality encoding */ );
886 int pad = -offset & (PTR_SIZE - 1);
887
888 augmentation_size += pad;
889
890 /* Augmentations should be small, so there's scarce need to
891 iterate for a solution. Die if we exceed one uleb128 byte. */
892 gcc_assert (size_of_uleb128 (augmentation_size) == 1);
893 }
894 }
895
896 dw2_asm_output_nstring (augmentation, -1, "CIE Augmentation");
897 if (dw_cie_version >= 4)
898 {
899 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "CIE Address Size");
900 dw2_asm_output_data (1, 0, "CIE Segment Size");
901 }
902 dw2_asm_output_data_uleb128 (1, "CIE Code Alignment Factor");
903 dw2_asm_output_data_sleb128 (DWARF_CIE_DATA_ALIGNMENT,
904 "CIE Data Alignment Factor");
905
906 if (dw_cie_version == 1)
907 dw2_asm_output_data (1, return_reg, "CIE RA Column");
908 else
909 dw2_asm_output_data_uleb128 (return_reg, "CIE RA Column");
910
911 if (augmentation[0])
912 {
913 dw2_asm_output_data_uleb128 (augmentation_size, "Augmentation size");
914 if (personality)
915 {
916 dw2_asm_output_data (1, per_encoding, "Personality (%s)",
917 eh_data_format_name (per_encoding));
918 dw2_asm_output_encoded_addr_rtx (per_encoding,
919 personality,
920 true, NULL);
921 }
922
923 if (any_lsda_needed)
924 dw2_asm_output_data (1, lsda_encoding, "LSDA Encoding (%s)",
925 eh_data_format_name (lsda_encoding));
926
927 if (fde_encoding != DW_EH_PE_absptr)
928 dw2_asm_output_data (1, fde_encoding, "FDE Encoding (%s)",
929 eh_data_format_name (fde_encoding));
930 }
931
932 FOR_EACH_VEC_ELT (*cie_cfi_vec, i, cfi)
933 output_cfi (cfi, NULL, for_eh);
934
935 /* Pad the CIE out to an address sized boundary. */
936 ASM_OUTPUT_ALIGN (asm_out_file,
937 floor_log2 (for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE));
938 ASM_OUTPUT_LABEL (asm_out_file, l2);
939
940 /* Loop through all of the FDE's. */
941 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
942 {
943 unsigned int k;
944
945 /* Don't emit EH unwind info for leaf functions that don't need it. */
946 if (for_eh && !fde_needed_for_eh_p (fde))
947 continue;
948
949 for (k = 0; k < (fde->dw_fde_second_begin ? 2 : 1); k++)
950 output_fde (fde, for_eh, k, section_start_label, fde_encoding,
951 augmentation, any_lsda_needed, lsda_encoding);
952 }
953
954 if (for_eh && targetm.terminate_dw2_eh_frame_info)
955 dw2_asm_output_data (4, 0, "End of Table");
956
957 /* Turn off app to make assembly quicker. */
958 if (flag_debug_asm)
959 app_disable ();
960 }
961
962 /* Emit .cfi_startproc and .cfi_personality/.cfi_lsda if needed. */
963
964 static void
965 dwarf2out_do_cfi_startproc (bool second)
966 {
967 int enc;
968 rtx ref;
969
970 fprintf (asm_out_file, "\t.cfi_startproc\n");
971
972 /* .cfi_personality and .cfi_lsda are only relevant to DWARF2
973 eh unwinders. */
974 if (targetm_common.except_unwind_info (&global_options) != UI_DWARF2)
975 return;
976
977 rtx personality = get_personality_function (current_function_decl);
978
979 if (personality)
980 {
981 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
982 ref = personality;
983
984 /* ??? The GAS support isn't entirely consistent. We have to
985 handle indirect support ourselves, but PC-relative is done
986 in the assembler. Further, the assembler can't handle any
987 of the weirder relocation types. */
988 if (enc & DW_EH_PE_indirect)
989 ref = dw2_force_const_mem (ref, true);
990
991 fprintf (asm_out_file, "\t.cfi_personality %#x,", enc);
992 output_addr_const (asm_out_file, ref);
993 fputc ('\n', asm_out_file);
994 }
995
996 if (crtl->uses_eh_lsda)
997 {
998 char lab[MAX_ARTIFICIAL_LABEL_BYTES];
999
1000 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
1001 ASM_GENERATE_INTERNAL_LABEL (lab, second ? "LLSDAC" : "LLSDA",
1002 current_function_funcdef_no);
1003 ref = gen_rtx_SYMBOL_REF (Pmode, lab);
1004 SYMBOL_REF_FLAGS (ref) = SYMBOL_FLAG_LOCAL;
1005
1006 if (enc & DW_EH_PE_indirect)
1007 ref = dw2_force_const_mem (ref, true);
1008
1009 fprintf (asm_out_file, "\t.cfi_lsda %#x,", enc);
1010 output_addr_const (asm_out_file, ref);
1011 fputc ('\n', asm_out_file);
1012 }
1013 }
1014
1015 /* Allocate CURRENT_FDE. Immediately initialize all we can, noting that
1016 this allocation may be done before pass_final. */
1017
1018 dw_fde_ref
1019 dwarf2out_alloc_current_fde (void)
1020 {
1021 dw_fde_ref fde;
1022
1023 fde = ggc_cleared_alloc<dw_fde_node> ();
1024 fde->decl = current_function_decl;
1025 fde->funcdef_number = current_function_funcdef_no;
1026 fde->fde_index = vec_safe_length (fde_vec);
1027 fde->all_throwers_are_sibcalls = crtl->all_throwers_are_sibcalls;
1028 fde->uses_eh_lsda = crtl->uses_eh_lsda;
1029 fde->nothrow = crtl->nothrow;
1030 fde->drap_reg = INVALID_REGNUM;
1031 fde->vdrap_reg = INVALID_REGNUM;
1032
1033 /* Record the FDE associated with this function. */
1034 cfun->fde = fde;
1035 vec_safe_push (fde_vec, fde);
1036
1037 return fde;
1038 }
1039
1040 /* Output a marker (i.e. a label) for the beginning of a function, before
1041 the prologue. */
1042
1043 void
1044 dwarf2out_begin_prologue (unsigned int line ATTRIBUTE_UNUSED,
1045 unsigned int column ATTRIBUTE_UNUSED,
1046 const char *file ATTRIBUTE_UNUSED)
1047 {
1048 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1049 char * dup_label;
1050 dw_fde_ref fde;
1051 section *fnsec;
1052 bool do_frame;
1053
1054 current_function_func_begin_label = NULL;
1055
1056 do_frame = dwarf2out_do_frame ();
1057
1058 /* ??? current_function_func_begin_label is also used by except.c for
1059 call-site information. We must emit this label if it might be used. */
1060 if (!do_frame
1061 && (!flag_exceptions
1062 || targetm_common.except_unwind_info (&global_options) == UI_SJLJ))
1063 return;
1064
1065 fnsec = function_section (current_function_decl);
1066 switch_to_section (fnsec);
1067 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_BEGIN_LABEL,
1068 current_function_funcdef_no);
1069 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, FUNC_BEGIN_LABEL,
1070 current_function_funcdef_no);
1071 dup_label = xstrdup (label);
1072 current_function_func_begin_label = dup_label;
1073
1074 /* We can elide FDE allocation if we're not emitting frame unwind info. */
1075 if (!do_frame)
1076 return;
1077
1078 /* Unlike the debug version, the EH version of frame unwind info is a per-
1079 function setting so we need to record whether we need it for the unit. */
1080 do_eh_frame |= dwarf2out_do_eh_frame ();
1081
1082 /* Cater to the various TARGET_ASM_OUTPUT_MI_THUNK implementations that
1083 emit insns as rtx but bypass the bulk of rest_of_compilation, which
1084 would include pass_dwarf2_frame. If we've not created the FDE yet,
1085 do so now. */
1086 fde = cfun->fde;
1087 if (fde == NULL)
1088 fde = dwarf2out_alloc_current_fde ();
1089
1090 /* Initialize the bits of CURRENT_FDE that were not available earlier. */
1091 fde->dw_fde_begin = dup_label;
1092 fde->dw_fde_current_label = dup_label;
1093 fde->in_std_section = (fnsec == text_section
1094 || (cold_text_section && fnsec == cold_text_section));
1095
1096 /* We only want to output line number information for the genuine dwarf2
1097 prologue case, not the eh frame case. */
1098 #ifdef DWARF2_DEBUGGING_INFO
1099 if (file)
1100 dwarf2out_source_line (line, column, file, 0, true);
1101 #endif
1102
1103 if (dwarf2out_do_cfi_asm ())
1104 dwarf2out_do_cfi_startproc (false);
1105 else
1106 {
1107 rtx personality = get_personality_function (current_function_decl);
1108 if (!current_unit_personality)
1109 current_unit_personality = personality;
1110
1111 /* We cannot keep a current personality per function as without CFI
1112 asm, at the point where we emit the CFI data, there is no current
1113 function anymore. */
1114 if (personality && current_unit_personality != personality)
1115 sorry ("multiple EH personalities are supported only with assemblers "
1116 "supporting .cfi_personality directive");
1117 }
1118 }
1119
1120 /* Output a marker (i.e. a label) for the end of the generated code
1121 for a function prologue. This gets called *after* the prologue code has
1122 been generated. */
1123
1124 void
1125 dwarf2out_vms_end_prologue (unsigned int line ATTRIBUTE_UNUSED,
1126 const char *file ATTRIBUTE_UNUSED)
1127 {
1128 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1129
1130 /* Output a label to mark the endpoint of the code generated for this
1131 function. */
1132 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
1133 current_function_funcdef_no);
1134 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, PROLOGUE_END_LABEL,
1135 current_function_funcdef_no);
1136 cfun->fde->dw_fde_vms_end_prologue = xstrdup (label);
1137 }
1138
1139 /* Output a marker (i.e. a label) for the beginning of the generated code
1140 for a function epilogue. This gets called *before* the prologue code has
1141 been generated. */
1142
1143 void
1144 dwarf2out_vms_begin_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1145 const char *file ATTRIBUTE_UNUSED)
1146 {
1147 dw_fde_ref fde = cfun->fde;
1148 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1149
1150 if (fde->dw_fde_vms_begin_epilogue)
1151 return;
1152
1153 /* Output a label to mark the endpoint of the code generated for this
1154 function. */
1155 ASM_GENERATE_INTERNAL_LABEL (label, EPILOGUE_BEGIN_LABEL,
1156 current_function_funcdef_no);
1157 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, EPILOGUE_BEGIN_LABEL,
1158 current_function_funcdef_no);
1159 fde->dw_fde_vms_begin_epilogue = xstrdup (label);
1160 }
1161
1162 /* Output a marker (i.e. a label) for the absolute end of the generated code
1163 for a function definition. This gets called *after* the epilogue code has
1164 been generated. */
1165
1166 void
1167 dwarf2out_end_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1168 const char *file ATTRIBUTE_UNUSED)
1169 {
1170 dw_fde_ref fde;
1171 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1172
1173 last_var_location_insn = NULL;
1174 cached_next_real_insn = NULL;
1175
1176 if (dwarf2out_do_cfi_asm ())
1177 fprintf (asm_out_file, "\t.cfi_endproc\n");
1178
1179 /* Output a label to mark the endpoint of the code generated for this
1180 function. */
1181 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
1182 current_function_funcdef_no);
1183 ASM_OUTPUT_LABEL (asm_out_file, label);
1184 fde = cfun->fde;
1185 gcc_assert (fde != NULL);
1186 if (fde->dw_fde_second_begin == NULL)
1187 fde->dw_fde_end = xstrdup (label);
1188 }
1189
1190 void
1191 dwarf2out_frame_finish (void)
1192 {
1193 /* Output call frame information. */
1194 if (targetm.debug_unwind_info () == UI_DWARF2)
1195 output_call_frame_info (0);
1196
1197 /* Output another copy for the unwinder. */
1198 if (do_eh_frame)
1199 output_call_frame_info (1);
1200 }
1201
1202 /* Note that the current function section is being used for code. */
1203
1204 static void
1205 dwarf2out_note_section_used (void)
1206 {
1207 section *sec = current_function_section ();
1208 if (sec == text_section)
1209 text_section_used = true;
1210 else if (sec == cold_text_section)
1211 cold_text_section_used = true;
1212 }
1213
1214 static void var_location_switch_text_section (void);
1215 static void set_cur_line_info_table (section *);
1216
1217 void
1218 dwarf2out_switch_text_section (void)
1219 {
1220 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1221 section *sect;
1222 dw_fde_ref fde = cfun->fde;
1223
1224 gcc_assert (cfun && fde && fde->dw_fde_second_begin == NULL);
1225
1226 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_SECOND_SECT_LABEL,
1227 current_function_funcdef_no);
1228
1229 fde->dw_fde_second_begin = ggc_strdup (label);
1230 if (!in_cold_section_p)
1231 {
1232 fde->dw_fde_end = crtl->subsections.cold_section_end_label;
1233 fde->dw_fde_second_end = crtl->subsections.hot_section_end_label;
1234 }
1235 else
1236 {
1237 fde->dw_fde_end = crtl->subsections.hot_section_end_label;
1238 fde->dw_fde_second_end = crtl->subsections.cold_section_end_label;
1239 }
1240 have_multiple_function_sections = true;
1241
1242 /* There is no need to mark used sections when not debugging. */
1243 if (cold_text_section != NULL)
1244 dwarf2out_note_section_used ();
1245
1246 if (dwarf2out_do_cfi_asm ())
1247 fprintf (asm_out_file, "\t.cfi_endproc\n");
1248
1249 /* Now do the real section switch. */
1250 sect = current_function_section ();
1251 switch_to_section (sect);
1252
1253 fde->second_in_std_section
1254 = (sect == text_section
1255 || (cold_text_section && sect == cold_text_section));
1256
1257 if (dwarf2out_do_cfi_asm ())
1258 dwarf2out_do_cfi_startproc (true);
1259
1260 var_location_switch_text_section ();
1261
1262 if (cold_text_section != NULL)
1263 set_cur_line_info_table (sect);
1264 }
1265 \f
1266 /* And now, the subset of the debugging information support code necessary
1267 for emitting location expressions. */
1268
1269 /* Data about a single source file. */
1270 struct GTY((for_user)) dwarf_file_data {
1271 const char * filename;
1272 int emitted_number;
1273 };
1274
1275 /* Describe an entry into the .debug_addr section. */
1276
1277 enum ate_kind {
1278 ate_kind_rtx,
1279 ate_kind_rtx_dtprel,
1280 ate_kind_label
1281 };
1282
1283 struct GTY((for_user)) addr_table_entry {
1284 enum ate_kind kind;
1285 unsigned int refcount;
1286 unsigned int index;
1287 union addr_table_entry_struct_union
1288 {
1289 rtx GTY ((tag ("0"))) rtl;
1290 char * GTY ((tag ("1"))) label;
1291 }
1292 GTY ((desc ("%1.kind"))) addr;
1293 };
1294
1295 typedef unsigned int var_loc_view;
1296
1297 /* Location lists are ranges + location descriptions for that range,
1298 so you can track variables that are in different places over
1299 their entire life. */
1300 typedef struct GTY(()) dw_loc_list_struct {
1301 dw_loc_list_ref dw_loc_next;
1302 const char *begin; /* Label and addr_entry for start of range */
1303 addr_table_entry *begin_entry;
1304 const char *end; /* Label for end of range */
1305 char *ll_symbol; /* Label for beginning of location list.
1306 Only on head of list. */
1307 char *vl_symbol; /* Label for beginning of view list. Ditto. */
1308 const char *section; /* Section this loclist is relative to */
1309 dw_loc_descr_ref expr;
1310 var_loc_view vbegin, vend;
1311 hashval_t hash;
1312 /* True if all addresses in this and subsequent lists are known to be
1313 resolved. */
1314 bool resolved_addr;
1315 /* True if this list has been replaced by dw_loc_next. */
1316 bool replaced;
1317 /* True if it has been emitted into .debug_loc* / .debug_loclists*
1318 section. */
1319 unsigned char emitted : 1;
1320 /* True if hash field is index rather than hash value. */
1321 unsigned char num_assigned : 1;
1322 /* True if .debug_loclists.dwo offset has been emitted for it already. */
1323 unsigned char offset_emitted : 1;
1324 /* True if note_variable_value_in_expr has been called on it. */
1325 unsigned char noted_variable_value : 1;
1326 /* True if the range should be emitted even if begin and end
1327 are the same. */
1328 bool force;
1329 } dw_loc_list_node;
1330
1331 static dw_loc_descr_ref int_loc_descriptor (poly_int64);
1332 static dw_loc_descr_ref uint_loc_descriptor (unsigned HOST_WIDE_INT);
1333
1334 /* Convert a DWARF stack opcode into its string name. */
1335
1336 static const char *
1337 dwarf_stack_op_name (unsigned int op)
1338 {
1339 const char *name = get_DW_OP_name (op);
1340
1341 if (name != NULL)
1342 return name;
1343
1344 return "OP_<unknown>";
1345 }
1346
1347 /* Return TRUE iff we're to output location view lists as a separate
1348 attribute next to the location lists, as an extension compatible
1349 with DWARF 2 and above. */
1350
1351 static inline bool
1352 dwarf2out_locviews_in_attribute ()
1353 {
1354 return debug_variable_location_views == 1;
1355 }
1356
1357 /* Return TRUE iff we're to output location view lists as part of the
1358 location lists, as proposed for standardization after DWARF 5. */
1359
1360 static inline bool
1361 dwarf2out_locviews_in_loclist ()
1362 {
1363 #ifndef DW_LLE_view_pair
1364 return false;
1365 #else
1366 return debug_variable_location_views == -1;
1367 #endif
1368 }
1369
1370 /* Return a pointer to a newly allocated location description. Location
1371 descriptions are simple expression terms that can be strung
1372 together to form more complicated location (address) descriptions. */
1373
1374 static inline dw_loc_descr_ref
1375 new_loc_descr (enum dwarf_location_atom op, unsigned HOST_WIDE_INT oprnd1,
1376 unsigned HOST_WIDE_INT oprnd2)
1377 {
1378 dw_loc_descr_ref descr = ggc_cleared_alloc<dw_loc_descr_node> ();
1379
1380 descr->dw_loc_opc = op;
1381 descr->dw_loc_oprnd1.val_class = dw_val_class_unsigned_const;
1382 descr->dw_loc_oprnd1.val_entry = NULL;
1383 descr->dw_loc_oprnd1.v.val_unsigned = oprnd1;
1384 descr->dw_loc_oprnd2.val_class = dw_val_class_unsigned_const;
1385 descr->dw_loc_oprnd2.val_entry = NULL;
1386 descr->dw_loc_oprnd2.v.val_unsigned = oprnd2;
1387
1388 return descr;
1389 }
1390
1391 /* Add a location description term to a location description expression. */
1392
1393 static inline void
1394 add_loc_descr (dw_loc_descr_ref *list_head, dw_loc_descr_ref descr)
1395 {
1396 dw_loc_descr_ref *d;
1397
1398 /* Find the end of the chain. */
1399 for (d = list_head; (*d) != NULL; d = &(*d)->dw_loc_next)
1400 ;
1401
1402 *d = descr;
1403 }
1404
1405 /* Compare two location operands for exact equality. */
1406
1407 static bool
1408 dw_val_equal_p (dw_val_node *a, dw_val_node *b)
1409 {
1410 if (a->val_class != b->val_class)
1411 return false;
1412 switch (a->val_class)
1413 {
1414 case dw_val_class_none:
1415 return true;
1416 case dw_val_class_addr:
1417 return rtx_equal_p (a->v.val_addr, b->v.val_addr);
1418
1419 case dw_val_class_offset:
1420 case dw_val_class_unsigned_const:
1421 case dw_val_class_const:
1422 case dw_val_class_unsigned_const_implicit:
1423 case dw_val_class_const_implicit:
1424 case dw_val_class_range_list:
1425 /* These are all HOST_WIDE_INT, signed or unsigned. */
1426 return a->v.val_unsigned == b->v.val_unsigned;
1427
1428 case dw_val_class_loc:
1429 return a->v.val_loc == b->v.val_loc;
1430 case dw_val_class_loc_list:
1431 return a->v.val_loc_list == b->v.val_loc_list;
1432 case dw_val_class_view_list:
1433 return a->v.val_view_list == b->v.val_view_list;
1434 case dw_val_class_die_ref:
1435 return a->v.val_die_ref.die == b->v.val_die_ref.die;
1436 case dw_val_class_fde_ref:
1437 return a->v.val_fde_index == b->v.val_fde_index;
1438 case dw_val_class_symview:
1439 return strcmp (a->v.val_symbolic_view, b->v.val_symbolic_view) == 0;
1440 case dw_val_class_lbl_id:
1441 case dw_val_class_lineptr:
1442 case dw_val_class_macptr:
1443 case dw_val_class_loclistsptr:
1444 case dw_val_class_high_pc:
1445 return strcmp (a->v.val_lbl_id, b->v.val_lbl_id) == 0;
1446 case dw_val_class_str:
1447 return a->v.val_str == b->v.val_str;
1448 case dw_val_class_flag:
1449 return a->v.val_flag == b->v.val_flag;
1450 case dw_val_class_file:
1451 case dw_val_class_file_implicit:
1452 return a->v.val_file == b->v.val_file;
1453 case dw_val_class_decl_ref:
1454 return a->v.val_decl_ref == b->v.val_decl_ref;
1455
1456 case dw_val_class_const_double:
1457 return (a->v.val_double.high == b->v.val_double.high
1458 && a->v.val_double.low == b->v.val_double.low);
1459
1460 case dw_val_class_wide_int:
1461 return *a->v.val_wide == *b->v.val_wide;
1462
1463 case dw_val_class_vec:
1464 {
1465 size_t a_len = a->v.val_vec.elt_size * a->v.val_vec.length;
1466 size_t b_len = b->v.val_vec.elt_size * b->v.val_vec.length;
1467
1468 return (a_len == b_len
1469 && !memcmp (a->v.val_vec.array, b->v.val_vec.array, a_len));
1470 }
1471
1472 case dw_val_class_data8:
1473 return memcmp (a->v.val_data8, b->v.val_data8, 8) == 0;
1474
1475 case dw_val_class_vms_delta:
1476 return (!strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1)
1477 && !strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1));
1478
1479 case dw_val_class_discr_value:
1480 return (a->v.val_discr_value.pos == b->v.val_discr_value.pos
1481 && a->v.val_discr_value.v.uval == b->v.val_discr_value.v.uval);
1482 case dw_val_class_discr_list:
1483 /* It makes no sense comparing two discriminant value lists. */
1484 return false;
1485 }
1486 gcc_unreachable ();
1487 }
1488
1489 /* Compare two location atoms for exact equality. */
1490
1491 static bool
1492 loc_descr_equal_p_1 (dw_loc_descr_ref a, dw_loc_descr_ref b)
1493 {
1494 if (a->dw_loc_opc != b->dw_loc_opc)
1495 return false;
1496
1497 /* ??? This is only ever set for DW_OP_constNu, for N equal to the
1498 address size, but since we always allocate cleared storage it
1499 should be zero for other types of locations. */
1500 if (a->dtprel != b->dtprel)
1501 return false;
1502
1503 return (dw_val_equal_p (&a->dw_loc_oprnd1, &b->dw_loc_oprnd1)
1504 && dw_val_equal_p (&a->dw_loc_oprnd2, &b->dw_loc_oprnd2));
1505 }
1506
1507 /* Compare two complete location expressions for exact equality. */
1508
1509 bool
1510 loc_descr_equal_p (dw_loc_descr_ref a, dw_loc_descr_ref b)
1511 {
1512 while (1)
1513 {
1514 if (a == b)
1515 return true;
1516 if (a == NULL || b == NULL)
1517 return false;
1518 if (!loc_descr_equal_p_1 (a, b))
1519 return false;
1520
1521 a = a->dw_loc_next;
1522 b = b->dw_loc_next;
1523 }
1524 }
1525
1526
1527 /* Add a constant POLY_OFFSET to a location expression. */
1528
1529 static void
1530 loc_descr_plus_const (dw_loc_descr_ref *list_head, poly_int64 poly_offset)
1531 {
1532 dw_loc_descr_ref loc;
1533 HOST_WIDE_INT *p;
1534
1535 gcc_assert (*list_head != NULL);
1536
1537 if (known_eq (poly_offset, 0))
1538 return;
1539
1540 /* Find the end of the chain. */
1541 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
1542 ;
1543
1544 HOST_WIDE_INT offset;
1545 if (!poly_offset.is_constant (&offset))
1546 {
1547 loc->dw_loc_next = int_loc_descriptor (poly_offset);
1548 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_plus, 0, 0));
1549 return;
1550 }
1551
1552 p = NULL;
1553 if (loc->dw_loc_opc == DW_OP_fbreg
1554 || (loc->dw_loc_opc >= DW_OP_breg0 && loc->dw_loc_opc <= DW_OP_breg31))
1555 p = &loc->dw_loc_oprnd1.v.val_int;
1556 else if (loc->dw_loc_opc == DW_OP_bregx)
1557 p = &loc->dw_loc_oprnd2.v.val_int;
1558
1559 /* If the last operation is fbreg, breg{0..31,x}, optimize by adjusting its
1560 offset. Don't optimize if an signed integer overflow would happen. */
1561 if (p != NULL
1562 && ((offset > 0 && *p <= INTTYPE_MAXIMUM (HOST_WIDE_INT) - offset)
1563 || (offset < 0 && *p >= INTTYPE_MINIMUM (HOST_WIDE_INT) - offset)))
1564 *p += offset;
1565
1566 else if (offset > 0)
1567 loc->dw_loc_next = new_loc_descr (DW_OP_plus_uconst, offset, 0);
1568
1569 else
1570 {
1571 loc->dw_loc_next
1572 = uint_loc_descriptor (-(unsigned HOST_WIDE_INT) offset);
1573 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_minus, 0, 0));
1574 }
1575 }
1576
1577 /* Return a pointer to a newly allocated location description for
1578 REG and OFFSET. */
1579
1580 static inline dw_loc_descr_ref
1581 new_reg_loc_descr (unsigned int reg, poly_int64 offset)
1582 {
1583 HOST_WIDE_INT const_offset;
1584 if (offset.is_constant (&const_offset))
1585 {
1586 if (reg <= 31)
1587 return new_loc_descr ((enum dwarf_location_atom) (DW_OP_breg0 + reg),
1588 const_offset, 0);
1589 else
1590 return new_loc_descr (DW_OP_bregx, reg, const_offset);
1591 }
1592 else
1593 {
1594 dw_loc_descr_ref ret = new_reg_loc_descr (reg, 0);
1595 loc_descr_plus_const (&ret, offset);
1596 return ret;
1597 }
1598 }
1599
1600 /* Add a constant OFFSET to a location list. */
1601
1602 static void
1603 loc_list_plus_const (dw_loc_list_ref list_head, poly_int64 offset)
1604 {
1605 dw_loc_list_ref d;
1606 for (d = list_head; d != NULL; d = d->dw_loc_next)
1607 loc_descr_plus_const (&d->expr, offset);
1608 }
1609
1610 #define DWARF_REF_SIZE \
1611 (dwarf_version == 2 ? DWARF2_ADDR_SIZE : DWARF_OFFSET_SIZE)
1612
1613 /* The number of bits that can be encoded by largest DW_FORM_dataN.
1614 In DWARF4 and earlier it is DW_FORM_data8 with 64 bits, in DWARF5
1615 DW_FORM_data16 with 128 bits. */
1616 #define DWARF_LARGEST_DATA_FORM_BITS \
1617 (dwarf_version >= 5 ? 128 : 64)
1618
1619 /* Utility inline function for construction of ops that were GNU extension
1620 before DWARF 5. */
1621 static inline enum dwarf_location_atom
1622 dwarf_OP (enum dwarf_location_atom op)
1623 {
1624 switch (op)
1625 {
1626 case DW_OP_implicit_pointer:
1627 if (dwarf_version < 5)
1628 return DW_OP_GNU_implicit_pointer;
1629 break;
1630
1631 case DW_OP_entry_value:
1632 if (dwarf_version < 5)
1633 return DW_OP_GNU_entry_value;
1634 break;
1635
1636 case DW_OP_const_type:
1637 if (dwarf_version < 5)
1638 return DW_OP_GNU_const_type;
1639 break;
1640
1641 case DW_OP_regval_type:
1642 if (dwarf_version < 5)
1643 return DW_OP_GNU_regval_type;
1644 break;
1645
1646 case DW_OP_deref_type:
1647 if (dwarf_version < 5)
1648 return DW_OP_GNU_deref_type;
1649 break;
1650
1651 case DW_OP_convert:
1652 if (dwarf_version < 5)
1653 return DW_OP_GNU_convert;
1654 break;
1655
1656 case DW_OP_reinterpret:
1657 if (dwarf_version < 5)
1658 return DW_OP_GNU_reinterpret;
1659 break;
1660
1661 case DW_OP_addrx:
1662 if (dwarf_version < 5)
1663 return DW_OP_GNU_addr_index;
1664 break;
1665
1666 case DW_OP_constx:
1667 if (dwarf_version < 5)
1668 return DW_OP_GNU_const_index;
1669 break;
1670
1671 default:
1672 break;
1673 }
1674 return op;
1675 }
1676
1677 /* Similarly for attributes. */
1678 static inline enum dwarf_attribute
1679 dwarf_AT (enum dwarf_attribute at)
1680 {
1681 switch (at)
1682 {
1683 case DW_AT_call_return_pc:
1684 if (dwarf_version < 5)
1685 return DW_AT_low_pc;
1686 break;
1687
1688 case DW_AT_call_tail_call:
1689 if (dwarf_version < 5)
1690 return DW_AT_GNU_tail_call;
1691 break;
1692
1693 case DW_AT_call_origin:
1694 if (dwarf_version < 5)
1695 return DW_AT_abstract_origin;
1696 break;
1697
1698 case DW_AT_call_target:
1699 if (dwarf_version < 5)
1700 return DW_AT_GNU_call_site_target;
1701 break;
1702
1703 case DW_AT_call_target_clobbered:
1704 if (dwarf_version < 5)
1705 return DW_AT_GNU_call_site_target_clobbered;
1706 break;
1707
1708 case DW_AT_call_parameter:
1709 if (dwarf_version < 5)
1710 return DW_AT_abstract_origin;
1711 break;
1712
1713 case DW_AT_call_value:
1714 if (dwarf_version < 5)
1715 return DW_AT_GNU_call_site_value;
1716 break;
1717
1718 case DW_AT_call_data_value:
1719 if (dwarf_version < 5)
1720 return DW_AT_GNU_call_site_data_value;
1721 break;
1722
1723 case DW_AT_call_all_calls:
1724 if (dwarf_version < 5)
1725 return DW_AT_GNU_all_call_sites;
1726 break;
1727
1728 case DW_AT_call_all_tail_calls:
1729 if (dwarf_version < 5)
1730 return DW_AT_GNU_all_tail_call_sites;
1731 break;
1732
1733 case DW_AT_dwo_name:
1734 if (dwarf_version < 5)
1735 return DW_AT_GNU_dwo_name;
1736 break;
1737
1738 case DW_AT_addr_base:
1739 if (dwarf_version < 5)
1740 return DW_AT_GNU_addr_base;
1741 break;
1742
1743 default:
1744 break;
1745 }
1746 return at;
1747 }
1748
1749 /* And similarly for tags. */
1750 static inline enum dwarf_tag
1751 dwarf_TAG (enum dwarf_tag tag)
1752 {
1753 switch (tag)
1754 {
1755 case DW_TAG_call_site:
1756 if (dwarf_version < 5)
1757 return DW_TAG_GNU_call_site;
1758 break;
1759
1760 case DW_TAG_call_site_parameter:
1761 if (dwarf_version < 5)
1762 return DW_TAG_GNU_call_site_parameter;
1763 break;
1764
1765 default:
1766 break;
1767 }
1768 return tag;
1769 }
1770
1771 /* And similarly for forms. */
1772 static inline enum dwarf_form
1773 dwarf_FORM (enum dwarf_form form)
1774 {
1775 switch (form)
1776 {
1777 case DW_FORM_addrx:
1778 if (dwarf_version < 5)
1779 return DW_FORM_GNU_addr_index;
1780 break;
1781
1782 case DW_FORM_strx:
1783 if (dwarf_version < 5)
1784 return DW_FORM_GNU_str_index;
1785 break;
1786
1787 default:
1788 break;
1789 }
1790 return form;
1791 }
1792
1793 static unsigned long int get_base_type_offset (dw_die_ref);
1794
1795 /* Return the size of a location descriptor. */
1796
1797 static unsigned long
1798 size_of_loc_descr (dw_loc_descr_ref loc)
1799 {
1800 unsigned long size = 1;
1801
1802 switch (loc->dw_loc_opc)
1803 {
1804 case DW_OP_addr:
1805 size += DWARF2_ADDR_SIZE;
1806 break;
1807 case DW_OP_GNU_addr_index:
1808 case DW_OP_addrx:
1809 case DW_OP_GNU_const_index:
1810 case DW_OP_constx:
1811 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
1812 size += size_of_uleb128 (loc->dw_loc_oprnd1.val_entry->index);
1813 break;
1814 case DW_OP_const1u:
1815 case DW_OP_const1s:
1816 size += 1;
1817 break;
1818 case DW_OP_const2u:
1819 case DW_OP_const2s:
1820 size += 2;
1821 break;
1822 case DW_OP_const4u:
1823 case DW_OP_const4s:
1824 size += 4;
1825 break;
1826 case DW_OP_const8u:
1827 case DW_OP_const8s:
1828 size += 8;
1829 break;
1830 case DW_OP_constu:
1831 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1832 break;
1833 case DW_OP_consts:
1834 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1835 break;
1836 case DW_OP_pick:
1837 size += 1;
1838 break;
1839 case DW_OP_plus_uconst:
1840 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1841 break;
1842 case DW_OP_skip:
1843 case DW_OP_bra:
1844 size += 2;
1845 break;
1846 case DW_OP_breg0:
1847 case DW_OP_breg1:
1848 case DW_OP_breg2:
1849 case DW_OP_breg3:
1850 case DW_OP_breg4:
1851 case DW_OP_breg5:
1852 case DW_OP_breg6:
1853 case DW_OP_breg7:
1854 case DW_OP_breg8:
1855 case DW_OP_breg9:
1856 case DW_OP_breg10:
1857 case DW_OP_breg11:
1858 case DW_OP_breg12:
1859 case DW_OP_breg13:
1860 case DW_OP_breg14:
1861 case DW_OP_breg15:
1862 case DW_OP_breg16:
1863 case DW_OP_breg17:
1864 case DW_OP_breg18:
1865 case DW_OP_breg19:
1866 case DW_OP_breg20:
1867 case DW_OP_breg21:
1868 case DW_OP_breg22:
1869 case DW_OP_breg23:
1870 case DW_OP_breg24:
1871 case DW_OP_breg25:
1872 case DW_OP_breg26:
1873 case DW_OP_breg27:
1874 case DW_OP_breg28:
1875 case DW_OP_breg29:
1876 case DW_OP_breg30:
1877 case DW_OP_breg31:
1878 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1879 break;
1880 case DW_OP_regx:
1881 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1882 break;
1883 case DW_OP_fbreg:
1884 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1885 break;
1886 case DW_OP_bregx:
1887 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1888 size += size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1889 break;
1890 case DW_OP_piece:
1891 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1892 break;
1893 case DW_OP_bit_piece:
1894 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1895 size += size_of_uleb128 (loc->dw_loc_oprnd2.v.val_unsigned);
1896 break;
1897 case DW_OP_deref_size:
1898 case DW_OP_xderef_size:
1899 size += 1;
1900 break;
1901 case DW_OP_call2:
1902 size += 2;
1903 break;
1904 case DW_OP_call4:
1905 size += 4;
1906 break;
1907 case DW_OP_call_ref:
1908 case DW_OP_GNU_variable_value:
1909 size += DWARF_REF_SIZE;
1910 break;
1911 case DW_OP_implicit_value:
1912 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1913 + loc->dw_loc_oprnd1.v.val_unsigned;
1914 break;
1915 case DW_OP_implicit_pointer:
1916 case DW_OP_GNU_implicit_pointer:
1917 size += DWARF_REF_SIZE + size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1918 break;
1919 case DW_OP_entry_value:
1920 case DW_OP_GNU_entry_value:
1921 {
1922 unsigned long op_size = size_of_locs (loc->dw_loc_oprnd1.v.val_loc);
1923 size += size_of_uleb128 (op_size) + op_size;
1924 break;
1925 }
1926 case DW_OP_const_type:
1927 case DW_OP_GNU_const_type:
1928 {
1929 unsigned long o
1930 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1931 size += size_of_uleb128 (o) + 1;
1932 switch (loc->dw_loc_oprnd2.val_class)
1933 {
1934 case dw_val_class_vec:
1935 size += loc->dw_loc_oprnd2.v.val_vec.length
1936 * loc->dw_loc_oprnd2.v.val_vec.elt_size;
1937 break;
1938 case dw_val_class_const:
1939 size += HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT;
1940 break;
1941 case dw_val_class_const_double:
1942 size += HOST_BITS_PER_DOUBLE_INT / BITS_PER_UNIT;
1943 break;
1944 case dw_val_class_wide_int:
1945 size += (get_full_len (*loc->dw_loc_oprnd2.v.val_wide)
1946 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
1947 break;
1948 default:
1949 gcc_unreachable ();
1950 }
1951 break;
1952 }
1953 case DW_OP_regval_type:
1954 case DW_OP_GNU_regval_type:
1955 {
1956 unsigned long o
1957 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1958 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1959 + size_of_uleb128 (o);
1960 }
1961 break;
1962 case DW_OP_deref_type:
1963 case DW_OP_GNU_deref_type:
1964 {
1965 unsigned long o
1966 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1967 size += 1 + size_of_uleb128 (o);
1968 }
1969 break;
1970 case DW_OP_convert:
1971 case DW_OP_reinterpret:
1972 case DW_OP_GNU_convert:
1973 case DW_OP_GNU_reinterpret:
1974 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
1975 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1976 else
1977 {
1978 unsigned long o
1979 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1980 size += size_of_uleb128 (o);
1981 }
1982 break;
1983 case DW_OP_GNU_parameter_ref:
1984 size += 4;
1985 break;
1986 default:
1987 break;
1988 }
1989
1990 return size;
1991 }
1992
1993 /* Return the size of a series of location descriptors. */
1994
1995 unsigned long
1996 size_of_locs (dw_loc_descr_ref loc)
1997 {
1998 dw_loc_descr_ref l;
1999 unsigned long size;
2000
2001 /* If there are no skip or bra opcodes, don't fill in the dw_loc_addr
2002 field, to avoid writing to a PCH file. */
2003 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
2004 {
2005 if (l->dw_loc_opc == DW_OP_skip || l->dw_loc_opc == DW_OP_bra)
2006 break;
2007 size += size_of_loc_descr (l);
2008 }
2009 if (! l)
2010 return size;
2011
2012 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
2013 {
2014 l->dw_loc_addr = size;
2015 size += size_of_loc_descr (l);
2016 }
2017
2018 return size;
2019 }
2020
2021 /* Return the size of the value in a DW_AT_discr_value attribute. */
2022
2023 static int
2024 size_of_discr_value (dw_discr_value *discr_value)
2025 {
2026 if (discr_value->pos)
2027 return size_of_uleb128 (discr_value->v.uval);
2028 else
2029 return size_of_sleb128 (discr_value->v.sval);
2030 }
2031
2032 /* Return the size of the value in a DW_AT_discr_list attribute. */
2033
2034 static int
2035 size_of_discr_list (dw_discr_list_ref discr_list)
2036 {
2037 int size = 0;
2038
2039 for (dw_discr_list_ref list = discr_list;
2040 list != NULL;
2041 list = list->dw_discr_next)
2042 {
2043 /* One byte for the discriminant value descriptor, and then one or two
2044 LEB128 numbers, depending on whether it's a single case label or a
2045 range label. */
2046 size += 1;
2047 size += size_of_discr_value (&list->dw_discr_lower_bound);
2048 if (list->dw_discr_range != 0)
2049 size += size_of_discr_value (&list->dw_discr_upper_bound);
2050 }
2051 return size;
2052 }
2053
2054 static HOST_WIDE_INT extract_int (const unsigned char *, unsigned);
2055 static void get_ref_die_offset_label (char *, dw_die_ref);
2056 static unsigned long int get_ref_die_offset (dw_die_ref);
2057
2058 /* Output location description stack opcode's operands (if any).
2059 The for_eh_or_skip parameter controls whether register numbers are
2060 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2061 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2062 info). This should be suppressed for the cases that have not been converted
2063 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2064
2065 static void
2066 output_loc_operands (dw_loc_descr_ref loc, int for_eh_or_skip)
2067 {
2068 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2069 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2070
2071 switch (loc->dw_loc_opc)
2072 {
2073 #ifdef DWARF2_DEBUGGING_INFO
2074 case DW_OP_const2u:
2075 case DW_OP_const2s:
2076 dw2_asm_output_data (2, val1->v.val_int, NULL);
2077 break;
2078 case DW_OP_const4u:
2079 if (loc->dtprel)
2080 {
2081 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2082 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 4,
2083 val1->v.val_addr);
2084 fputc ('\n', asm_out_file);
2085 break;
2086 }
2087 /* FALLTHRU */
2088 case DW_OP_const4s:
2089 dw2_asm_output_data (4, val1->v.val_int, NULL);
2090 break;
2091 case DW_OP_const8u:
2092 if (loc->dtprel)
2093 {
2094 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2095 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 8,
2096 val1->v.val_addr);
2097 fputc ('\n', asm_out_file);
2098 break;
2099 }
2100 /* FALLTHRU */
2101 case DW_OP_const8s:
2102 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2103 dw2_asm_output_data (8, val1->v.val_int, NULL);
2104 break;
2105 case DW_OP_skip:
2106 case DW_OP_bra:
2107 {
2108 int offset;
2109
2110 gcc_assert (val1->val_class == dw_val_class_loc);
2111 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2112
2113 dw2_asm_output_data (2, offset, NULL);
2114 }
2115 break;
2116 case DW_OP_implicit_value:
2117 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2118 switch (val2->val_class)
2119 {
2120 case dw_val_class_const:
2121 dw2_asm_output_data (val1->v.val_unsigned, val2->v.val_int, NULL);
2122 break;
2123 case dw_val_class_vec:
2124 {
2125 unsigned int elt_size = val2->v.val_vec.elt_size;
2126 unsigned int len = val2->v.val_vec.length;
2127 unsigned int i;
2128 unsigned char *p;
2129
2130 if (elt_size > sizeof (HOST_WIDE_INT))
2131 {
2132 elt_size /= 2;
2133 len *= 2;
2134 }
2135 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2136 i < len;
2137 i++, p += elt_size)
2138 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2139 "fp or vector constant word %u", i);
2140 }
2141 break;
2142 case dw_val_class_const_double:
2143 {
2144 unsigned HOST_WIDE_INT first, second;
2145
2146 if (WORDS_BIG_ENDIAN)
2147 {
2148 first = val2->v.val_double.high;
2149 second = val2->v.val_double.low;
2150 }
2151 else
2152 {
2153 first = val2->v.val_double.low;
2154 second = val2->v.val_double.high;
2155 }
2156 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2157 first, NULL);
2158 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2159 second, NULL);
2160 }
2161 break;
2162 case dw_val_class_wide_int:
2163 {
2164 int i;
2165 int len = get_full_len (*val2->v.val_wide);
2166 if (WORDS_BIG_ENDIAN)
2167 for (i = len - 1; i >= 0; --i)
2168 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2169 val2->v.val_wide->elt (i), NULL);
2170 else
2171 for (i = 0; i < len; ++i)
2172 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2173 val2->v.val_wide->elt (i), NULL);
2174 }
2175 break;
2176 case dw_val_class_addr:
2177 gcc_assert (val1->v.val_unsigned == DWARF2_ADDR_SIZE);
2178 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val2->v.val_addr, NULL);
2179 break;
2180 default:
2181 gcc_unreachable ();
2182 }
2183 break;
2184 #else
2185 case DW_OP_const2u:
2186 case DW_OP_const2s:
2187 case DW_OP_const4u:
2188 case DW_OP_const4s:
2189 case DW_OP_const8u:
2190 case DW_OP_const8s:
2191 case DW_OP_skip:
2192 case DW_OP_bra:
2193 case DW_OP_implicit_value:
2194 /* We currently don't make any attempt to make sure these are
2195 aligned properly like we do for the main unwind info, so
2196 don't support emitting things larger than a byte if we're
2197 only doing unwinding. */
2198 gcc_unreachable ();
2199 #endif
2200 case DW_OP_const1u:
2201 case DW_OP_const1s:
2202 dw2_asm_output_data (1, val1->v.val_int, NULL);
2203 break;
2204 case DW_OP_constu:
2205 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2206 break;
2207 case DW_OP_consts:
2208 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2209 break;
2210 case DW_OP_pick:
2211 dw2_asm_output_data (1, val1->v.val_int, NULL);
2212 break;
2213 case DW_OP_plus_uconst:
2214 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2215 break;
2216 case DW_OP_breg0:
2217 case DW_OP_breg1:
2218 case DW_OP_breg2:
2219 case DW_OP_breg3:
2220 case DW_OP_breg4:
2221 case DW_OP_breg5:
2222 case DW_OP_breg6:
2223 case DW_OP_breg7:
2224 case DW_OP_breg8:
2225 case DW_OP_breg9:
2226 case DW_OP_breg10:
2227 case DW_OP_breg11:
2228 case DW_OP_breg12:
2229 case DW_OP_breg13:
2230 case DW_OP_breg14:
2231 case DW_OP_breg15:
2232 case DW_OP_breg16:
2233 case DW_OP_breg17:
2234 case DW_OP_breg18:
2235 case DW_OP_breg19:
2236 case DW_OP_breg20:
2237 case DW_OP_breg21:
2238 case DW_OP_breg22:
2239 case DW_OP_breg23:
2240 case DW_OP_breg24:
2241 case DW_OP_breg25:
2242 case DW_OP_breg26:
2243 case DW_OP_breg27:
2244 case DW_OP_breg28:
2245 case DW_OP_breg29:
2246 case DW_OP_breg30:
2247 case DW_OP_breg31:
2248 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2249 break;
2250 case DW_OP_regx:
2251 {
2252 unsigned r = val1->v.val_unsigned;
2253 if (for_eh_or_skip >= 0)
2254 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2255 gcc_assert (size_of_uleb128 (r)
2256 == size_of_uleb128 (val1->v.val_unsigned));
2257 dw2_asm_output_data_uleb128 (r, NULL);
2258 }
2259 break;
2260 case DW_OP_fbreg:
2261 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2262 break;
2263 case DW_OP_bregx:
2264 {
2265 unsigned r = val1->v.val_unsigned;
2266 if (for_eh_or_skip >= 0)
2267 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2268 gcc_assert (size_of_uleb128 (r)
2269 == size_of_uleb128 (val1->v.val_unsigned));
2270 dw2_asm_output_data_uleb128 (r, NULL);
2271 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2272 }
2273 break;
2274 case DW_OP_piece:
2275 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2276 break;
2277 case DW_OP_bit_piece:
2278 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2279 dw2_asm_output_data_uleb128 (val2->v.val_unsigned, NULL);
2280 break;
2281 case DW_OP_deref_size:
2282 case DW_OP_xderef_size:
2283 dw2_asm_output_data (1, val1->v.val_int, NULL);
2284 break;
2285
2286 case DW_OP_addr:
2287 if (loc->dtprel)
2288 {
2289 if (targetm.asm_out.output_dwarf_dtprel)
2290 {
2291 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
2292 DWARF2_ADDR_SIZE,
2293 val1->v.val_addr);
2294 fputc ('\n', asm_out_file);
2295 }
2296 else
2297 gcc_unreachable ();
2298 }
2299 else
2300 {
2301 #ifdef DWARF2_DEBUGGING_INFO
2302 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val1->v.val_addr, NULL);
2303 #else
2304 gcc_unreachable ();
2305 #endif
2306 }
2307 break;
2308
2309 case DW_OP_GNU_addr_index:
2310 case DW_OP_addrx:
2311 case DW_OP_GNU_const_index:
2312 case DW_OP_constx:
2313 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
2314 dw2_asm_output_data_uleb128 (loc->dw_loc_oprnd1.val_entry->index,
2315 "(index into .debug_addr)");
2316 break;
2317
2318 case DW_OP_call2:
2319 case DW_OP_call4:
2320 {
2321 unsigned long die_offset
2322 = get_ref_die_offset (val1->v.val_die_ref.die);
2323 /* Make sure the offset has been computed and that we can encode it as
2324 an operand. */
2325 gcc_assert (die_offset > 0
2326 && die_offset <= (loc->dw_loc_opc == DW_OP_call2
2327 ? 0xffff
2328 : 0xffffffff));
2329 dw2_asm_output_data ((loc->dw_loc_opc == DW_OP_call2) ? 2 : 4,
2330 die_offset, NULL);
2331 }
2332 break;
2333
2334 case DW_OP_call_ref:
2335 case DW_OP_GNU_variable_value:
2336 {
2337 char label[MAX_ARTIFICIAL_LABEL_BYTES
2338 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2339 gcc_assert (val1->val_class == dw_val_class_die_ref);
2340 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2341 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2342 }
2343 break;
2344
2345 case DW_OP_implicit_pointer:
2346 case DW_OP_GNU_implicit_pointer:
2347 {
2348 char label[MAX_ARTIFICIAL_LABEL_BYTES
2349 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2350 gcc_assert (val1->val_class == dw_val_class_die_ref);
2351 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2352 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2353 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2354 }
2355 break;
2356
2357 case DW_OP_entry_value:
2358 case DW_OP_GNU_entry_value:
2359 dw2_asm_output_data_uleb128 (size_of_locs (val1->v.val_loc), NULL);
2360 output_loc_sequence (val1->v.val_loc, for_eh_or_skip);
2361 break;
2362
2363 case DW_OP_const_type:
2364 case DW_OP_GNU_const_type:
2365 {
2366 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die), l;
2367 gcc_assert (o);
2368 dw2_asm_output_data_uleb128 (o, NULL);
2369 switch (val2->val_class)
2370 {
2371 case dw_val_class_const:
2372 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2373 dw2_asm_output_data (1, l, NULL);
2374 dw2_asm_output_data (l, val2->v.val_int, NULL);
2375 break;
2376 case dw_val_class_vec:
2377 {
2378 unsigned int elt_size = val2->v.val_vec.elt_size;
2379 unsigned int len = val2->v.val_vec.length;
2380 unsigned int i;
2381 unsigned char *p;
2382
2383 l = len * elt_size;
2384 dw2_asm_output_data (1, l, NULL);
2385 if (elt_size > sizeof (HOST_WIDE_INT))
2386 {
2387 elt_size /= 2;
2388 len *= 2;
2389 }
2390 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2391 i < len;
2392 i++, p += elt_size)
2393 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2394 "fp or vector constant word %u", i);
2395 }
2396 break;
2397 case dw_val_class_const_double:
2398 {
2399 unsigned HOST_WIDE_INT first, second;
2400 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2401
2402 dw2_asm_output_data (1, 2 * l, NULL);
2403 if (WORDS_BIG_ENDIAN)
2404 {
2405 first = val2->v.val_double.high;
2406 second = val2->v.val_double.low;
2407 }
2408 else
2409 {
2410 first = val2->v.val_double.low;
2411 second = val2->v.val_double.high;
2412 }
2413 dw2_asm_output_data (l, first, NULL);
2414 dw2_asm_output_data (l, second, NULL);
2415 }
2416 break;
2417 case dw_val_class_wide_int:
2418 {
2419 int i;
2420 int len = get_full_len (*val2->v.val_wide);
2421 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2422
2423 dw2_asm_output_data (1, len * l, NULL);
2424 if (WORDS_BIG_ENDIAN)
2425 for (i = len - 1; i >= 0; --i)
2426 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2427 else
2428 for (i = 0; i < len; ++i)
2429 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2430 }
2431 break;
2432 default:
2433 gcc_unreachable ();
2434 }
2435 }
2436 break;
2437 case DW_OP_regval_type:
2438 case DW_OP_GNU_regval_type:
2439 {
2440 unsigned r = val1->v.val_unsigned;
2441 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2442 gcc_assert (o);
2443 if (for_eh_or_skip >= 0)
2444 {
2445 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2446 gcc_assert (size_of_uleb128 (r)
2447 == size_of_uleb128 (val1->v.val_unsigned));
2448 }
2449 dw2_asm_output_data_uleb128 (r, NULL);
2450 dw2_asm_output_data_uleb128 (o, NULL);
2451 }
2452 break;
2453 case DW_OP_deref_type:
2454 case DW_OP_GNU_deref_type:
2455 {
2456 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2457 gcc_assert (o);
2458 dw2_asm_output_data (1, val1->v.val_int, NULL);
2459 dw2_asm_output_data_uleb128 (o, NULL);
2460 }
2461 break;
2462 case DW_OP_convert:
2463 case DW_OP_reinterpret:
2464 case DW_OP_GNU_convert:
2465 case DW_OP_GNU_reinterpret:
2466 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
2467 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2468 else
2469 {
2470 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die);
2471 gcc_assert (o);
2472 dw2_asm_output_data_uleb128 (o, NULL);
2473 }
2474 break;
2475
2476 case DW_OP_GNU_parameter_ref:
2477 {
2478 unsigned long o;
2479 gcc_assert (val1->val_class == dw_val_class_die_ref);
2480 o = get_ref_die_offset (val1->v.val_die_ref.die);
2481 dw2_asm_output_data (4, o, NULL);
2482 }
2483 break;
2484
2485 default:
2486 /* Other codes have no operands. */
2487 break;
2488 }
2489 }
2490
2491 /* Output a sequence of location operations.
2492 The for_eh_or_skip parameter controls whether register numbers are
2493 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2494 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2495 info). This should be suppressed for the cases that have not been converted
2496 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2497
2498 void
2499 output_loc_sequence (dw_loc_descr_ref loc, int for_eh_or_skip)
2500 {
2501 for (; loc != NULL; loc = loc->dw_loc_next)
2502 {
2503 enum dwarf_location_atom opc = loc->dw_loc_opc;
2504 /* Output the opcode. */
2505 if (for_eh_or_skip >= 0
2506 && opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2507 {
2508 unsigned r = (opc - DW_OP_breg0);
2509 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2510 gcc_assert (r <= 31);
2511 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2512 }
2513 else if (for_eh_or_skip >= 0
2514 && opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2515 {
2516 unsigned r = (opc - DW_OP_reg0);
2517 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2518 gcc_assert (r <= 31);
2519 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2520 }
2521
2522 dw2_asm_output_data (1, opc,
2523 "%s", dwarf_stack_op_name (opc));
2524
2525 /* Output the operand(s) (if any). */
2526 output_loc_operands (loc, for_eh_or_skip);
2527 }
2528 }
2529
2530 /* Output location description stack opcode's operands (if any).
2531 The output is single bytes on a line, suitable for .cfi_escape. */
2532
2533 static void
2534 output_loc_operands_raw (dw_loc_descr_ref loc)
2535 {
2536 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2537 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2538
2539 switch (loc->dw_loc_opc)
2540 {
2541 case DW_OP_addr:
2542 case DW_OP_GNU_addr_index:
2543 case DW_OP_addrx:
2544 case DW_OP_GNU_const_index:
2545 case DW_OP_constx:
2546 case DW_OP_implicit_value:
2547 /* We cannot output addresses in .cfi_escape, only bytes. */
2548 gcc_unreachable ();
2549
2550 case DW_OP_const1u:
2551 case DW_OP_const1s:
2552 case DW_OP_pick:
2553 case DW_OP_deref_size:
2554 case DW_OP_xderef_size:
2555 fputc (',', asm_out_file);
2556 dw2_asm_output_data_raw (1, val1->v.val_int);
2557 break;
2558
2559 case DW_OP_const2u:
2560 case DW_OP_const2s:
2561 fputc (',', asm_out_file);
2562 dw2_asm_output_data_raw (2, val1->v.val_int);
2563 break;
2564
2565 case DW_OP_const4u:
2566 case DW_OP_const4s:
2567 fputc (',', asm_out_file);
2568 dw2_asm_output_data_raw (4, val1->v.val_int);
2569 break;
2570
2571 case DW_OP_const8u:
2572 case DW_OP_const8s:
2573 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2574 fputc (',', asm_out_file);
2575 dw2_asm_output_data_raw (8, val1->v.val_int);
2576 break;
2577
2578 case DW_OP_skip:
2579 case DW_OP_bra:
2580 {
2581 int offset;
2582
2583 gcc_assert (val1->val_class == dw_val_class_loc);
2584 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2585
2586 fputc (',', asm_out_file);
2587 dw2_asm_output_data_raw (2, offset);
2588 }
2589 break;
2590
2591 case DW_OP_regx:
2592 {
2593 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2594 gcc_assert (size_of_uleb128 (r)
2595 == size_of_uleb128 (val1->v.val_unsigned));
2596 fputc (',', asm_out_file);
2597 dw2_asm_output_data_uleb128_raw (r);
2598 }
2599 break;
2600
2601 case DW_OP_constu:
2602 case DW_OP_plus_uconst:
2603 case DW_OP_piece:
2604 fputc (',', asm_out_file);
2605 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2606 break;
2607
2608 case DW_OP_bit_piece:
2609 fputc (',', asm_out_file);
2610 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2611 dw2_asm_output_data_uleb128_raw (val2->v.val_unsigned);
2612 break;
2613
2614 case DW_OP_consts:
2615 case DW_OP_breg0:
2616 case DW_OP_breg1:
2617 case DW_OP_breg2:
2618 case DW_OP_breg3:
2619 case DW_OP_breg4:
2620 case DW_OP_breg5:
2621 case DW_OP_breg6:
2622 case DW_OP_breg7:
2623 case DW_OP_breg8:
2624 case DW_OP_breg9:
2625 case DW_OP_breg10:
2626 case DW_OP_breg11:
2627 case DW_OP_breg12:
2628 case DW_OP_breg13:
2629 case DW_OP_breg14:
2630 case DW_OP_breg15:
2631 case DW_OP_breg16:
2632 case DW_OP_breg17:
2633 case DW_OP_breg18:
2634 case DW_OP_breg19:
2635 case DW_OP_breg20:
2636 case DW_OP_breg21:
2637 case DW_OP_breg22:
2638 case DW_OP_breg23:
2639 case DW_OP_breg24:
2640 case DW_OP_breg25:
2641 case DW_OP_breg26:
2642 case DW_OP_breg27:
2643 case DW_OP_breg28:
2644 case DW_OP_breg29:
2645 case DW_OP_breg30:
2646 case DW_OP_breg31:
2647 case DW_OP_fbreg:
2648 fputc (',', asm_out_file);
2649 dw2_asm_output_data_sleb128_raw (val1->v.val_int);
2650 break;
2651
2652 case DW_OP_bregx:
2653 {
2654 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2655 gcc_assert (size_of_uleb128 (r)
2656 == size_of_uleb128 (val1->v.val_unsigned));
2657 fputc (',', asm_out_file);
2658 dw2_asm_output_data_uleb128_raw (r);
2659 fputc (',', asm_out_file);
2660 dw2_asm_output_data_sleb128_raw (val2->v.val_int);
2661 }
2662 break;
2663
2664 case DW_OP_implicit_pointer:
2665 case DW_OP_entry_value:
2666 case DW_OP_const_type:
2667 case DW_OP_regval_type:
2668 case DW_OP_deref_type:
2669 case DW_OP_convert:
2670 case DW_OP_reinterpret:
2671 case DW_OP_GNU_implicit_pointer:
2672 case DW_OP_GNU_entry_value:
2673 case DW_OP_GNU_const_type:
2674 case DW_OP_GNU_regval_type:
2675 case DW_OP_GNU_deref_type:
2676 case DW_OP_GNU_convert:
2677 case DW_OP_GNU_reinterpret:
2678 case DW_OP_GNU_parameter_ref:
2679 gcc_unreachable ();
2680 break;
2681
2682 default:
2683 /* Other codes have no operands. */
2684 break;
2685 }
2686 }
2687
2688 void
2689 output_loc_sequence_raw (dw_loc_descr_ref loc)
2690 {
2691 while (1)
2692 {
2693 enum dwarf_location_atom opc = loc->dw_loc_opc;
2694 /* Output the opcode. */
2695 if (opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2696 {
2697 unsigned r = (opc - DW_OP_breg0);
2698 r = DWARF2_FRAME_REG_OUT (r, 1);
2699 gcc_assert (r <= 31);
2700 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2701 }
2702 else if (opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2703 {
2704 unsigned r = (opc - DW_OP_reg0);
2705 r = DWARF2_FRAME_REG_OUT (r, 1);
2706 gcc_assert (r <= 31);
2707 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2708 }
2709 /* Output the opcode. */
2710 fprintf (asm_out_file, "%#x", opc);
2711 output_loc_operands_raw (loc);
2712
2713 if (!loc->dw_loc_next)
2714 break;
2715 loc = loc->dw_loc_next;
2716
2717 fputc (',', asm_out_file);
2718 }
2719 }
2720
2721 /* This function builds a dwarf location descriptor sequence from a
2722 dw_cfa_location, adding the given OFFSET to the result of the
2723 expression. */
2724
2725 struct dw_loc_descr_node *
2726 build_cfa_loc (dw_cfa_location *cfa, poly_int64 offset)
2727 {
2728 struct dw_loc_descr_node *head, *tmp;
2729
2730 offset += cfa->offset;
2731
2732 if (cfa->indirect)
2733 {
2734 head = new_reg_loc_descr (cfa->reg, cfa->base_offset);
2735 head->dw_loc_oprnd1.val_class = dw_val_class_const;
2736 head->dw_loc_oprnd1.val_entry = NULL;
2737 tmp = new_loc_descr (DW_OP_deref, 0, 0);
2738 add_loc_descr (&head, tmp);
2739 loc_descr_plus_const (&head, offset);
2740 }
2741 else
2742 head = new_reg_loc_descr (cfa->reg, offset);
2743
2744 return head;
2745 }
2746
2747 /* This function builds a dwarf location descriptor sequence for
2748 the address at OFFSET from the CFA when stack is aligned to
2749 ALIGNMENT byte. */
2750
2751 struct dw_loc_descr_node *
2752 build_cfa_aligned_loc (dw_cfa_location *cfa,
2753 poly_int64 offset, HOST_WIDE_INT alignment)
2754 {
2755 struct dw_loc_descr_node *head;
2756 unsigned int dwarf_fp
2757 = DWARF_FRAME_REGNUM (HARD_FRAME_POINTER_REGNUM);
2758
2759 /* When CFA is defined as FP+OFFSET, emulate stack alignment. */
2760 if (cfa->reg == HARD_FRAME_POINTER_REGNUM && cfa->indirect == 0)
2761 {
2762 head = new_reg_loc_descr (dwarf_fp, 0);
2763 add_loc_descr (&head, int_loc_descriptor (alignment));
2764 add_loc_descr (&head, new_loc_descr (DW_OP_and, 0, 0));
2765 loc_descr_plus_const (&head, offset);
2766 }
2767 else
2768 head = new_reg_loc_descr (dwarf_fp, offset);
2769 return head;
2770 }
2771 \f
2772 /* And now, the support for symbolic debugging information. */
2773
2774 /* .debug_str support. */
2775
2776 static void dwarf2out_init (const char *);
2777 static void dwarf2out_finish (const char *);
2778 static void dwarf2out_early_finish (const char *);
2779 static void dwarf2out_assembly_start (void);
2780 static void dwarf2out_define (unsigned int, const char *);
2781 static void dwarf2out_undef (unsigned int, const char *);
2782 static void dwarf2out_start_source_file (unsigned, const char *);
2783 static void dwarf2out_end_source_file (unsigned);
2784 static void dwarf2out_function_decl (tree);
2785 static void dwarf2out_begin_block (unsigned, unsigned);
2786 static void dwarf2out_end_block (unsigned, unsigned);
2787 static bool dwarf2out_ignore_block (const_tree);
2788 static void dwarf2out_early_global_decl (tree);
2789 static void dwarf2out_late_global_decl (tree);
2790 static void dwarf2out_type_decl (tree, int);
2791 static void dwarf2out_imported_module_or_decl (tree, tree, tree, bool, bool);
2792 static void dwarf2out_imported_module_or_decl_1 (tree, tree, tree,
2793 dw_die_ref);
2794 static void dwarf2out_abstract_function (tree);
2795 static void dwarf2out_var_location (rtx_insn *);
2796 static void dwarf2out_inline_entry (tree);
2797 static void dwarf2out_size_function (tree);
2798 static void dwarf2out_begin_function (tree);
2799 static void dwarf2out_end_function (unsigned int);
2800 static void dwarf2out_register_main_translation_unit (tree unit);
2801 static void dwarf2out_set_name (tree, tree);
2802 static void dwarf2out_register_external_die (tree decl, const char *sym,
2803 unsigned HOST_WIDE_INT off);
2804 static bool dwarf2out_die_ref_for_decl (tree decl, const char **sym,
2805 unsigned HOST_WIDE_INT *off);
2806
2807 /* The debug hooks structure. */
2808
2809 const struct gcc_debug_hooks dwarf2_debug_hooks =
2810 {
2811 dwarf2out_init,
2812 dwarf2out_finish,
2813 dwarf2out_early_finish,
2814 dwarf2out_assembly_start,
2815 dwarf2out_define,
2816 dwarf2out_undef,
2817 dwarf2out_start_source_file,
2818 dwarf2out_end_source_file,
2819 dwarf2out_begin_block,
2820 dwarf2out_end_block,
2821 dwarf2out_ignore_block,
2822 dwarf2out_source_line,
2823 dwarf2out_begin_prologue,
2824 #if VMS_DEBUGGING_INFO
2825 dwarf2out_vms_end_prologue,
2826 dwarf2out_vms_begin_epilogue,
2827 #else
2828 debug_nothing_int_charstar,
2829 debug_nothing_int_charstar,
2830 #endif
2831 dwarf2out_end_epilogue,
2832 dwarf2out_begin_function,
2833 dwarf2out_end_function, /* end_function */
2834 dwarf2out_register_main_translation_unit,
2835 dwarf2out_function_decl, /* function_decl */
2836 dwarf2out_early_global_decl,
2837 dwarf2out_late_global_decl,
2838 dwarf2out_type_decl, /* type_decl */
2839 dwarf2out_imported_module_or_decl,
2840 dwarf2out_die_ref_for_decl,
2841 dwarf2out_register_external_die,
2842 debug_nothing_tree, /* deferred_inline_function */
2843 /* The DWARF 2 backend tries to reduce debugging bloat by not
2844 emitting the abstract description of inline functions until
2845 something tries to reference them. */
2846 dwarf2out_abstract_function, /* outlining_inline_function */
2847 debug_nothing_rtx_code_label, /* label */
2848 debug_nothing_int, /* handle_pch */
2849 dwarf2out_var_location,
2850 dwarf2out_inline_entry, /* inline_entry */
2851 dwarf2out_size_function, /* size_function */
2852 dwarf2out_switch_text_section,
2853 dwarf2out_set_name,
2854 1, /* start_end_main_source_file */
2855 TYPE_SYMTAB_IS_DIE /* tree_type_symtab_field */
2856 };
2857
2858 const struct gcc_debug_hooks dwarf2_lineno_debug_hooks =
2859 {
2860 dwarf2out_init,
2861 debug_nothing_charstar,
2862 debug_nothing_charstar,
2863 dwarf2out_assembly_start,
2864 debug_nothing_int_charstar,
2865 debug_nothing_int_charstar,
2866 debug_nothing_int_charstar,
2867 debug_nothing_int,
2868 debug_nothing_int_int, /* begin_block */
2869 debug_nothing_int_int, /* end_block */
2870 debug_true_const_tree, /* ignore_block */
2871 dwarf2out_source_line, /* source_line */
2872 debug_nothing_int_int_charstar, /* begin_prologue */
2873 debug_nothing_int_charstar, /* end_prologue */
2874 debug_nothing_int_charstar, /* begin_epilogue */
2875 debug_nothing_int_charstar, /* end_epilogue */
2876 debug_nothing_tree, /* begin_function */
2877 debug_nothing_int, /* end_function */
2878 debug_nothing_tree, /* register_main_translation_unit */
2879 debug_nothing_tree, /* function_decl */
2880 debug_nothing_tree, /* early_global_decl */
2881 debug_nothing_tree, /* late_global_decl */
2882 debug_nothing_tree_int, /* type_decl */
2883 debug_nothing_tree_tree_tree_bool_bool,/* imported_module_or_decl */
2884 debug_false_tree_charstarstar_uhwistar,/* die_ref_for_decl */
2885 debug_nothing_tree_charstar_uhwi, /* register_external_die */
2886 debug_nothing_tree, /* deferred_inline_function */
2887 debug_nothing_tree, /* outlining_inline_function */
2888 debug_nothing_rtx_code_label, /* label */
2889 debug_nothing_int, /* handle_pch */
2890 debug_nothing_rtx_insn, /* var_location */
2891 debug_nothing_tree, /* inline_entry */
2892 debug_nothing_tree, /* size_function */
2893 debug_nothing_void, /* switch_text_section */
2894 debug_nothing_tree_tree, /* set_name */
2895 0, /* start_end_main_source_file */
2896 TYPE_SYMTAB_IS_ADDRESS /* tree_type_symtab_field */
2897 };
2898 \f
2899 /* NOTE: In the comments in this file, many references are made to
2900 "Debugging Information Entries". This term is abbreviated as `DIE'
2901 throughout the remainder of this file. */
2902
2903 /* An internal representation of the DWARF output is built, and then
2904 walked to generate the DWARF debugging info. The walk of the internal
2905 representation is done after the entire program has been compiled.
2906 The types below are used to describe the internal representation. */
2907
2908 /* Whether to put type DIEs into their own section .debug_types instead
2909 of making them part of the .debug_info section. Only supported for
2910 Dwarf V4 or higher and the user didn't disable them through
2911 -fno-debug-types-section. It is more efficient to put them in a
2912 separate comdat sections since the linker will then be able to
2913 remove duplicates. But not all tools support .debug_types sections
2914 yet. For Dwarf V5 or higher .debug_types doesn't exist any more,
2915 it is DW_UT_type unit type in .debug_info section. */
2916
2917 #define use_debug_types (dwarf_version >= 4 && flag_debug_types_section)
2918
2919 /* Various DIE's use offsets relative to the beginning of the
2920 .debug_info section to refer to each other. */
2921
2922 typedef long int dw_offset;
2923
2924 struct comdat_type_node;
2925
2926 /* The entries in the line_info table more-or-less mirror the opcodes
2927 that are used in the real dwarf line table. Arrays of these entries
2928 are collected per section when DWARF2_ASM_LINE_DEBUG_INFO is not
2929 supported. */
2930
2931 enum dw_line_info_opcode {
2932 /* Emit DW_LNE_set_address; the operand is the label index. */
2933 LI_set_address,
2934
2935 /* Emit a row to the matrix with the given line. This may be done
2936 via any combination of DW_LNS_copy, DW_LNS_advance_line, and
2937 special opcodes. */
2938 LI_set_line,
2939
2940 /* Emit a DW_LNS_set_file. */
2941 LI_set_file,
2942
2943 /* Emit a DW_LNS_set_column. */
2944 LI_set_column,
2945
2946 /* Emit a DW_LNS_negate_stmt; the operand is ignored. */
2947 LI_negate_stmt,
2948
2949 /* Emit a DW_LNS_set_prologue_end/epilogue_begin; the operand is ignored. */
2950 LI_set_prologue_end,
2951 LI_set_epilogue_begin,
2952
2953 /* Emit a DW_LNE_set_discriminator. */
2954 LI_set_discriminator,
2955
2956 /* Output a Fixed Advance PC; the target PC is the label index; the
2957 base PC is the previous LI_adv_address or LI_set_address entry.
2958 We only use this when emitting debug views without assembler
2959 support, at explicit user request. Ideally, we should only use
2960 it when the offset might be zero but we can't tell: it's the only
2961 way to maybe change the PC without resetting the view number. */
2962 LI_adv_address
2963 };
2964
2965 typedef struct GTY(()) dw_line_info_struct {
2966 enum dw_line_info_opcode opcode;
2967 unsigned int val;
2968 } dw_line_info_entry;
2969
2970
2971 struct GTY(()) dw_line_info_table {
2972 /* The label that marks the end of this section. */
2973 const char *end_label;
2974
2975 /* The values for the last row of the matrix, as collected in the table.
2976 These are used to minimize the changes to the next row. */
2977 unsigned int file_num;
2978 unsigned int line_num;
2979 unsigned int column_num;
2980 int discrim_num;
2981 bool is_stmt;
2982 bool in_use;
2983
2984 /* This denotes the NEXT view number.
2985
2986 If it is 0, it is known that the NEXT view will be the first view
2987 at the given PC.
2988
2989 If it is -1, we're forcing the view number to be reset, e.g. at a
2990 function entry.
2991
2992 The meaning of other nonzero values depends on whether we're
2993 computing views internally or leaving it for the assembler to do
2994 so. If we're emitting them internally, view denotes the view
2995 number since the last known advance of PC. If we're leaving it
2996 for the assembler, it denotes the LVU label number that we're
2997 going to ask the assembler to assign. */
2998 var_loc_view view;
2999
3000 /* This counts the number of symbolic views emitted in this table
3001 since the latest view reset. Its max value, over all tables,
3002 sets symview_upper_bound. */
3003 var_loc_view symviews_since_reset;
3004
3005 #define FORCE_RESET_NEXT_VIEW(x) ((x) = (var_loc_view)-1)
3006 #define RESET_NEXT_VIEW(x) ((x) = (var_loc_view)0)
3007 #define FORCE_RESETTING_VIEW_P(x) ((x) == (var_loc_view)-1)
3008 #define RESETTING_VIEW_P(x) ((x) == (var_loc_view)0 || FORCE_RESETTING_VIEW_P (x))
3009
3010 vec<dw_line_info_entry, va_gc> *entries;
3011 };
3012
3013 /* This is an upper bound for view numbers that the assembler may
3014 assign to symbolic views output in this translation. It is used to
3015 decide how big a field to use to represent view numbers in
3016 symview-classed attributes. */
3017
3018 static var_loc_view symview_upper_bound;
3019
3020 /* If we're keep track of location views and their reset points, and
3021 INSN is a reset point (i.e., it necessarily advances the PC), mark
3022 the next view in TABLE as reset. */
3023
3024 static void
3025 maybe_reset_location_view (rtx_insn *insn, dw_line_info_table *table)
3026 {
3027 if (!debug_internal_reset_location_views)
3028 return;
3029
3030 /* Maybe turn (part of?) this test into a default target hook. */
3031 int reset = 0;
3032
3033 if (targetm.reset_location_view)
3034 reset = targetm.reset_location_view (insn);
3035
3036 if (reset)
3037 ;
3038 else if (JUMP_TABLE_DATA_P (insn))
3039 reset = 1;
3040 else if (GET_CODE (insn) == USE
3041 || GET_CODE (insn) == CLOBBER
3042 || GET_CODE (insn) == ASM_INPUT
3043 || asm_noperands (insn) >= 0)
3044 ;
3045 else if (get_attr_min_length (insn) > 0)
3046 reset = 1;
3047
3048 if (reset > 0 && !RESETTING_VIEW_P (table->view))
3049 RESET_NEXT_VIEW (table->view);
3050 }
3051
3052 /* Each DIE attribute has a field specifying the attribute kind,
3053 a link to the next attribute in the chain, and an attribute value.
3054 Attributes are typically linked below the DIE they modify. */
3055
3056 typedef struct GTY(()) dw_attr_struct {
3057 enum dwarf_attribute dw_attr;
3058 dw_val_node dw_attr_val;
3059 }
3060 dw_attr_node;
3061
3062
3063 /* The Debugging Information Entry (DIE) structure. DIEs form a tree.
3064 The children of each node form a circular list linked by
3065 die_sib. die_child points to the node *before* the "first" child node. */
3066
3067 typedef struct GTY((chain_circular ("%h.die_sib"), for_user)) die_struct {
3068 union die_symbol_or_type_node
3069 {
3070 const char * GTY ((tag ("0"))) die_symbol;
3071 comdat_type_node *GTY ((tag ("1"))) die_type_node;
3072 }
3073 GTY ((desc ("%0.comdat_type_p"))) die_id;
3074 vec<dw_attr_node, va_gc> *die_attr;
3075 dw_die_ref die_parent;
3076 dw_die_ref die_child;
3077 dw_die_ref die_sib;
3078 dw_die_ref die_definition; /* ref from a specification to its definition */
3079 dw_offset die_offset;
3080 unsigned long die_abbrev;
3081 int die_mark;
3082 unsigned int decl_id;
3083 enum dwarf_tag die_tag;
3084 /* Die is used and must not be pruned as unused. */
3085 BOOL_BITFIELD die_perennial_p : 1;
3086 BOOL_BITFIELD comdat_type_p : 1; /* DIE has a type signature */
3087 /* For an external ref to die_symbol if die_offset contains an extra
3088 offset to that symbol. */
3089 BOOL_BITFIELD with_offset : 1;
3090 /* Whether this DIE was removed from the DIE tree, for example via
3091 prune_unused_types. We don't consider those present from the
3092 DIE lookup routines. */
3093 BOOL_BITFIELD removed : 1;
3094 /* Lots of spare bits. */
3095 }
3096 die_node;
3097
3098 /* Set to TRUE while dwarf2out_early_global_decl is running. */
3099 static bool early_dwarf;
3100 static bool early_dwarf_finished;
3101 struct set_early_dwarf {
3102 bool saved;
3103 set_early_dwarf () : saved(early_dwarf)
3104 {
3105 gcc_assert (! early_dwarf_finished);
3106 early_dwarf = true;
3107 }
3108 ~set_early_dwarf () { early_dwarf = saved; }
3109 };
3110
3111 /* Evaluate 'expr' while 'c' is set to each child of DIE in order. */
3112 #define FOR_EACH_CHILD(die, c, expr) do { \
3113 c = die->die_child; \
3114 if (c) do { \
3115 c = c->die_sib; \
3116 expr; \
3117 } while (c != die->die_child); \
3118 } while (0)
3119
3120 /* The pubname structure */
3121
3122 typedef struct GTY(()) pubname_struct {
3123 dw_die_ref die;
3124 const char *name;
3125 }
3126 pubname_entry;
3127
3128
3129 struct GTY(()) dw_ranges {
3130 const char *label;
3131 /* If this is positive, it's a block number, otherwise it's a
3132 bitwise-negated index into dw_ranges_by_label. */
3133 int num;
3134 /* Index for the range list for DW_FORM_rnglistx. */
3135 unsigned int idx : 31;
3136 /* True if this range might be possibly in a different section
3137 from previous entry. */
3138 unsigned int maybe_new_sec : 1;
3139 };
3140
3141 /* A structure to hold a macinfo entry. */
3142
3143 typedef struct GTY(()) macinfo_struct {
3144 unsigned char code;
3145 unsigned HOST_WIDE_INT lineno;
3146 const char *info;
3147 }
3148 macinfo_entry;
3149
3150
3151 struct GTY(()) dw_ranges_by_label {
3152 const char *begin;
3153 const char *end;
3154 };
3155
3156 /* The comdat type node structure. */
3157 struct GTY(()) comdat_type_node
3158 {
3159 dw_die_ref root_die;
3160 dw_die_ref type_die;
3161 dw_die_ref skeleton_die;
3162 char signature[DWARF_TYPE_SIGNATURE_SIZE];
3163 comdat_type_node *next;
3164 };
3165
3166 /* A list of DIEs for which we can't determine ancestry (parent_die
3167 field) just yet. Later in dwarf2out_finish we will fill in the
3168 missing bits. */
3169 typedef struct GTY(()) limbo_die_struct {
3170 dw_die_ref die;
3171 /* The tree for which this DIE was created. We use this to
3172 determine ancestry later. */
3173 tree created_for;
3174 struct limbo_die_struct *next;
3175 }
3176 limbo_die_node;
3177
3178 typedef struct skeleton_chain_struct
3179 {
3180 dw_die_ref old_die;
3181 dw_die_ref new_die;
3182 struct skeleton_chain_struct *parent;
3183 }
3184 skeleton_chain_node;
3185
3186 /* Define a macro which returns nonzero for a TYPE_DECL which was
3187 implicitly generated for a type.
3188
3189 Note that, unlike the C front-end (which generates a NULL named
3190 TYPE_DECL node for each complete tagged type, each array type,
3191 and each function type node created) the C++ front-end generates
3192 a _named_ TYPE_DECL node for each tagged type node created.
3193 These TYPE_DECLs have DECL_ARTIFICIAL set, so we know not to
3194 generate a DW_TAG_typedef DIE for them. Likewise with the Ada
3195 front-end, but for each type, tagged or not. */
3196
3197 #define TYPE_DECL_IS_STUB(decl) \
3198 (DECL_NAME (decl) == NULL_TREE \
3199 || (DECL_ARTIFICIAL (decl) \
3200 && ((decl == TYPE_STUB_DECL (TREE_TYPE (decl))) \
3201 /* This is necessary for stub decls that \
3202 appear in nested inline functions. */ \
3203 || (DECL_ABSTRACT_ORIGIN (decl) != NULL_TREE \
3204 && (decl_ultimate_origin (decl) \
3205 == TYPE_STUB_DECL (TREE_TYPE (decl)))))))
3206
3207 /* Information concerning the compilation unit's programming
3208 language, and compiler version. */
3209
3210 /* Fixed size portion of the DWARF compilation unit header. */
3211 #define DWARF_COMPILE_UNIT_HEADER_SIZE \
3212 (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE \
3213 + (dwarf_version >= 5 ? 4 : 3))
3214
3215 /* Fixed size portion of the DWARF comdat type unit header. */
3216 #define DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE \
3217 (DWARF_COMPILE_UNIT_HEADER_SIZE \
3218 + DWARF_TYPE_SIGNATURE_SIZE + DWARF_OFFSET_SIZE)
3219
3220 /* Fixed size portion of the DWARF skeleton compilation unit header. */
3221 #define DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE \
3222 (DWARF_COMPILE_UNIT_HEADER_SIZE + (dwarf_version >= 5 ? 8 : 0))
3223
3224 /* Fixed size portion of public names info. */
3225 #define DWARF_PUBNAMES_HEADER_SIZE (2 * DWARF_OFFSET_SIZE + 2)
3226
3227 /* Fixed size portion of the address range info. */
3228 #define DWARF_ARANGES_HEADER_SIZE \
3229 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3230 DWARF2_ADDR_SIZE * 2) \
3231 - DWARF_INITIAL_LENGTH_SIZE)
3232
3233 /* Size of padding portion in the address range info. It must be
3234 aligned to twice the pointer size. */
3235 #define DWARF_ARANGES_PAD_SIZE \
3236 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3237 DWARF2_ADDR_SIZE * 2) \
3238 - (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4))
3239
3240 /* Use assembler line directives if available. */
3241 #ifndef DWARF2_ASM_LINE_DEBUG_INFO
3242 #ifdef HAVE_AS_DWARF2_DEBUG_LINE
3243 #define DWARF2_ASM_LINE_DEBUG_INFO 1
3244 #else
3245 #define DWARF2_ASM_LINE_DEBUG_INFO 0
3246 #endif
3247 #endif
3248
3249 /* Use assembler views in line directives if available. */
3250 #ifndef DWARF2_ASM_VIEW_DEBUG_INFO
3251 #ifdef HAVE_AS_DWARF2_DEBUG_VIEW
3252 #define DWARF2_ASM_VIEW_DEBUG_INFO 1
3253 #else
3254 #define DWARF2_ASM_VIEW_DEBUG_INFO 0
3255 #endif
3256 #endif
3257
3258 /* Return true if GCC configure detected assembler support for .loc. */
3259
3260 bool
3261 dwarf2out_default_as_loc_support (void)
3262 {
3263 return DWARF2_ASM_LINE_DEBUG_INFO;
3264 #if (GCC_VERSION >= 3000)
3265 # undef DWARF2_ASM_LINE_DEBUG_INFO
3266 # pragma GCC poison DWARF2_ASM_LINE_DEBUG_INFO
3267 #endif
3268 }
3269
3270 /* Return true if GCC configure detected assembler support for views
3271 in .loc directives. */
3272
3273 bool
3274 dwarf2out_default_as_locview_support (void)
3275 {
3276 return DWARF2_ASM_VIEW_DEBUG_INFO;
3277 #if (GCC_VERSION >= 3000)
3278 # undef DWARF2_ASM_VIEW_DEBUG_INFO
3279 # pragma GCC poison DWARF2_ASM_VIEW_DEBUG_INFO
3280 #endif
3281 }
3282
3283 /* A bit is set in ZERO_VIEW_P if we are using the assembler-supported
3284 view computation, and it refers to a view identifier for which we
3285 will not emit a label because it is known to map to a view number
3286 zero. We won't allocate the bitmap if we're not using assembler
3287 support for location views, but we have to make the variable
3288 visible for GGC and for code that will be optimized out for lack of
3289 support but that's still parsed and compiled. We could abstract it
3290 out with macros, but it's not worth it. */
3291 static GTY(()) bitmap zero_view_p;
3292
3293 /* Evaluate to TRUE iff N is known to identify the first location view
3294 at its PC. When not using assembler location view computation,
3295 that must be view number zero. Otherwise, ZERO_VIEW_P is allocated
3296 and views label numbers recorded in it are the ones known to be
3297 zero. */
3298 #define ZERO_VIEW_P(N) ((N) == (var_loc_view)0 \
3299 || (N) == (var_loc_view)-1 \
3300 || (zero_view_p \
3301 && bitmap_bit_p (zero_view_p, (N))))
3302
3303 /* Return true iff we're to emit .loc directives for the assembler to
3304 generate line number sections.
3305
3306 When we're not emitting views, all we need from the assembler is
3307 support for .loc directives.
3308
3309 If we are emitting views, we can only use the assembler's .loc
3310 support if it also supports views.
3311
3312 When the compiler is emitting the line number programs and
3313 computing view numbers itself, it resets view numbers at known PC
3314 changes and counts from that, and then it emits view numbers as
3315 literal constants in locviewlists. There are cases in which the
3316 compiler is not sure about PC changes, e.g. when extra alignment is
3317 requested for a label. In these cases, the compiler may not reset
3318 the view counter, and the potential PC advance in the line number
3319 program will use an opcode that does not reset the view counter
3320 even if the PC actually changes, so that compiler and debug info
3321 consumer can keep view numbers in sync.
3322
3323 When the compiler defers view computation to the assembler, it
3324 emits symbolic view numbers in locviewlists, with the exception of
3325 views known to be zero (forced resets, or reset after
3326 compiler-visible PC changes): instead of emitting symbols for
3327 these, we emit literal zero and assert the assembler agrees with
3328 the compiler's assessment. We could use symbolic views everywhere,
3329 instead of special-casing zero views, but then we'd be unable to
3330 optimize out locviewlists that contain only zeros. */
3331
3332 static bool
3333 output_asm_line_debug_info (void)
3334 {
3335 return (dwarf2out_as_loc_support
3336 && (dwarf2out_as_locview_support
3337 || !debug_variable_location_views));
3338 }
3339
3340 /* Minimum line offset in a special line info. opcode.
3341 This value was chosen to give a reasonable range of values. */
3342 #define DWARF_LINE_BASE -10
3343
3344 /* First special line opcode - leave room for the standard opcodes. */
3345 #define DWARF_LINE_OPCODE_BASE ((int)DW_LNS_set_isa + 1)
3346
3347 /* Range of line offsets in a special line info. opcode. */
3348 #define DWARF_LINE_RANGE (254-DWARF_LINE_OPCODE_BASE+1)
3349
3350 /* Flag that indicates the initial value of the is_stmt_start flag.
3351 In the present implementation, we do not mark any lines as
3352 the beginning of a source statement, because that information
3353 is not made available by the GCC front-end. */
3354 #define DWARF_LINE_DEFAULT_IS_STMT_START 1
3355
3356 /* Maximum number of operations per instruction bundle. */
3357 #ifndef DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN
3358 #define DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN 1
3359 #endif
3360
3361 /* This location is used by calc_die_sizes() to keep track
3362 the offset of each DIE within the .debug_info section. */
3363 static unsigned long next_die_offset;
3364
3365 /* Record the root of the DIE's built for the current compilation unit. */
3366 static GTY(()) dw_die_ref single_comp_unit_die;
3367
3368 /* A list of type DIEs that have been separated into comdat sections. */
3369 static GTY(()) comdat_type_node *comdat_type_list;
3370
3371 /* A list of CU DIEs that have been separated. */
3372 static GTY(()) limbo_die_node *cu_die_list;
3373
3374 /* A list of DIEs with a NULL parent waiting to be relocated. */
3375 static GTY(()) limbo_die_node *limbo_die_list;
3376
3377 /* A list of DIEs for which we may have to generate
3378 DW_AT_{,MIPS_}linkage_name once their DECL_ASSEMBLER_NAMEs are set. */
3379 static GTY(()) limbo_die_node *deferred_asm_name;
3380
3381 struct dwarf_file_hasher : ggc_ptr_hash<dwarf_file_data>
3382 {
3383 typedef const char *compare_type;
3384
3385 static hashval_t hash (dwarf_file_data *);
3386 static bool equal (dwarf_file_data *, const char *);
3387 };
3388
3389 /* Filenames referenced by this compilation unit. */
3390 static GTY(()) hash_table<dwarf_file_hasher> *file_table;
3391
3392 struct decl_die_hasher : ggc_ptr_hash<die_node>
3393 {
3394 typedef tree compare_type;
3395
3396 static hashval_t hash (die_node *);
3397 static bool equal (die_node *, tree);
3398 };
3399 /* A hash table of references to DIE's that describe declarations.
3400 The key is a DECL_UID() which is a unique number identifying each decl. */
3401 static GTY (()) hash_table<decl_die_hasher> *decl_die_table;
3402
3403 struct GTY ((for_user)) variable_value_struct {
3404 unsigned int decl_id;
3405 vec<dw_die_ref, va_gc> *dies;
3406 };
3407
3408 struct variable_value_hasher : ggc_ptr_hash<variable_value_struct>
3409 {
3410 typedef tree compare_type;
3411
3412 static hashval_t hash (variable_value_struct *);
3413 static bool equal (variable_value_struct *, tree);
3414 };
3415 /* A hash table of DIEs that contain DW_OP_GNU_variable_value with
3416 dw_val_class_decl_ref class, indexed by FUNCTION_DECLs which is
3417 DECL_CONTEXT of the referenced VAR_DECLs. */
3418 static GTY (()) hash_table<variable_value_hasher> *variable_value_hash;
3419
3420 struct block_die_hasher : ggc_ptr_hash<die_struct>
3421 {
3422 static hashval_t hash (die_struct *);
3423 static bool equal (die_struct *, die_struct *);
3424 };
3425
3426 /* A hash table of references to DIE's that describe COMMON blocks.
3427 The key is DECL_UID() ^ die_parent. */
3428 static GTY (()) hash_table<block_die_hasher> *common_block_die_table;
3429
3430 typedef struct GTY(()) die_arg_entry_struct {
3431 dw_die_ref die;
3432 tree arg;
3433 } die_arg_entry;
3434
3435
3436 /* Node of the variable location list. */
3437 struct GTY ((chain_next ("%h.next"))) var_loc_node {
3438 /* Either NOTE_INSN_VAR_LOCATION, or, for SRA optimized variables,
3439 EXPR_LIST chain. For small bitsizes, bitsize is encoded
3440 in mode of the EXPR_LIST node and first EXPR_LIST operand
3441 is either NOTE_INSN_VAR_LOCATION for a piece with a known
3442 location or NULL for padding. For larger bitsizes,
3443 mode is 0 and first operand is a CONCAT with bitsize
3444 as first CONCAT operand and NOTE_INSN_VAR_LOCATION resp.
3445 NULL as second operand. */
3446 rtx GTY (()) loc;
3447 const char * GTY (()) label;
3448 struct var_loc_node * GTY (()) next;
3449 var_loc_view view;
3450 };
3451
3452 /* Variable location list. */
3453 struct GTY ((for_user)) var_loc_list_def {
3454 struct var_loc_node * GTY (()) first;
3455
3456 /* Pointer to the last but one or last element of the
3457 chained list. If the list is empty, both first and
3458 last are NULL, if the list contains just one node
3459 or the last node certainly is not redundant, it points
3460 to the last node, otherwise points to the last but one.
3461 Do not mark it for GC because it is marked through the chain. */
3462 struct var_loc_node * GTY ((skip ("%h"))) last;
3463
3464 /* Pointer to the last element before section switch,
3465 if NULL, either sections weren't switched or first
3466 is after section switch. */
3467 struct var_loc_node * GTY ((skip ("%h"))) last_before_switch;
3468
3469 /* DECL_UID of the variable decl. */
3470 unsigned int decl_id;
3471 };
3472 typedef struct var_loc_list_def var_loc_list;
3473
3474 /* Call argument location list. */
3475 struct GTY ((chain_next ("%h.next"))) call_arg_loc_node {
3476 rtx GTY (()) call_arg_loc_note;
3477 const char * GTY (()) label;
3478 tree GTY (()) block;
3479 bool tail_call_p;
3480 rtx GTY (()) symbol_ref;
3481 struct call_arg_loc_node * GTY (()) next;
3482 };
3483
3484
3485 struct decl_loc_hasher : ggc_ptr_hash<var_loc_list>
3486 {
3487 typedef const_tree compare_type;
3488
3489 static hashval_t hash (var_loc_list *);
3490 static bool equal (var_loc_list *, const_tree);
3491 };
3492
3493 /* Table of decl location linked lists. */
3494 static GTY (()) hash_table<decl_loc_hasher> *decl_loc_table;
3495
3496 /* Head and tail of call_arg_loc chain. */
3497 static GTY (()) struct call_arg_loc_node *call_arg_locations;
3498 static struct call_arg_loc_node *call_arg_loc_last;
3499
3500 /* Number of call sites in the current function. */
3501 static int call_site_count = -1;
3502 /* Number of tail call sites in the current function. */
3503 static int tail_call_site_count = -1;
3504
3505 /* A cached location list. */
3506 struct GTY ((for_user)) cached_dw_loc_list_def {
3507 /* The DECL_UID of the decl that this entry describes. */
3508 unsigned int decl_id;
3509
3510 /* The cached location list. */
3511 dw_loc_list_ref loc_list;
3512 };
3513 typedef struct cached_dw_loc_list_def cached_dw_loc_list;
3514
3515 struct dw_loc_list_hasher : ggc_ptr_hash<cached_dw_loc_list>
3516 {
3517
3518 typedef const_tree compare_type;
3519
3520 static hashval_t hash (cached_dw_loc_list *);
3521 static bool equal (cached_dw_loc_list *, const_tree);
3522 };
3523
3524 /* Table of cached location lists. */
3525 static GTY (()) hash_table<dw_loc_list_hasher> *cached_dw_loc_list_table;
3526
3527 /* A vector of references to DIE's that are uniquely identified by their tag,
3528 presence/absence of children DIE's, and list of attribute/value pairs. */
3529 static GTY(()) vec<dw_die_ref, va_gc> *abbrev_die_table;
3530
3531 /* A hash map to remember the stack usage for DWARF procedures. The value
3532 stored is the stack size difference between before the DWARF procedure
3533 invokation and after it returned. In other words, for a DWARF procedure
3534 that consumes N stack slots and that pushes M ones, this stores M - N. */
3535 static hash_map<dw_die_ref, int> *dwarf_proc_stack_usage_map;
3536
3537 /* A global counter for generating labels for line number data. */
3538 static unsigned int line_info_label_num;
3539
3540 /* The current table to which we should emit line number information
3541 for the current function. This will be set up at the beginning of
3542 assembly for the function. */
3543 static GTY(()) dw_line_info_table *cur_line_info_table;
3544
3545 /* The two default tables of line number info. */
3546 static GTY(()) dw_line_info_table *text_section_line_info;
3547 static GTY(()) dw_line_info_table *cold_text_section_line_info;
3548
3549 /* The set of all non-default tables of line number info. */
3550 static GTY(()) vec<dw_line_info_table *, va_gc> *separate_line_info;
3551
3552 /* A flag to tell pubnames/types export if there is an info section to
3553 refer to. */
3554 static bool info_section_emitted;
3555
3556 /* A pointer to the base of a table that contains a list of publicly
3557 accessible names. */
3558 static GTY (()) vec<pubname_entry, va_gc> *pubname_table;
3559
3560 /* A pointer to the base of a table that contains a list of publicly
3561 accessible types. */
3562 static GTY (()) vec<pubname_entry, va_gc> *pubtype_table;
3563
3564 /* A pointer to the base of a table that contains a list of macro
3565 defines/undefines (and file start/end markers). */
3566 static GTY (()) vec<macinfo_entry, va_gc> *macinfo_table;
3567
3568 /* True if .debug_macinfo or .debug_macros section is going to be
3569 emitted. */
3570 #define have_macinfo \
3571 ((!XCOFF_DEBUGGING_INFO || HAVE_XCOFF_DWARF_EXTRAS) \
3572 && debug_info_level >= DINFO_LEVEL_VERBOSE \
3573 && !macinfo_table->is_empty ())
3574
3575 /* Vector of dies for which we should generate .debug_ranges info. */
3576 static GTY (()) vec<dw_ranges, va_gc> *ranges_table;
3577
3578 /* Vector of pairs of labels referenced in ranges_table. */
3579 static GTY (()) vec<dw_ranges_by_label, va_gc> *ranges_by_label;
3580
3581 /* Whether we have location lists that need outputting */
3582 static GTY(()) bool have_location_lists;
3583
3584 /* Unique label counter. */
3585 static GTY(()) unsigned int loclabel_num;
3586
3587 /* Unique label counter for point-of-call tables. */
3588 static GTY(()) unsigned int poc_label_num;
3589
3590 /* The last file entry emitted by maybe_emit_file(). */
3591 static GTY(()) struct dwarf_file_data * last_emitted_file;
3592
3593 /* Number of internal labels generated by gen_internal_sym(). */
3594 static GTY(()) int label_num;
3595
3596 static GTY(()) vec<die_arg_entry, va_gc> *tmpl_value_parm_die_table;
3597
3598 /* Instances of generic types for which we need to generate debug
3599 info that describe their generic parameters and arguments. That
3600 generation needs to happen once all types are properly laid out so
3601 we do it at the end of compilation. */
3602 static GTY(()) vec<tree, va_gc> *generic_type_instances;
3603
3604 /* Offset from the "steady-state frame pointer" to the frame base,
3605 within the current function. */
3606 static poly_int64 frame_pointer_fb_offset;
3607 static bool frame_pointer_fb_offset_valid;
3608
3609 static vec<dw_die_ref> base_types;
3610
3611 /* Flags to represent a set of attribute classes for attributes that represent
3612 a scalar value (bounds, pointers, ...). */
3613 enum dw_scalar_form
3614 {
3615 dw_scalar_form_constant = 0x01,
3616 dw_scalar_form_exprloc = 0x02,
3617 dw_scalar_form_reference = 0x04
3618 };
3619
3620 /* Forward declarations for functions defined in this file. */
3621
3622 static int is_pseudo_reg (const_rtx);
3623 static tree type_main_variant (tree);
3624 static int is_tagged_type (const_tree);
3625 static const char *dwarf_tag_name (unsigned);
3626 static const char *dwarf_attr_name (unsigned);
3627 static const char *dwarf_form_name (unsigned);
3628 static tree decl_ultimate_origin (const_tree);
3629 static tree decl_class_context (tree);
3630 static void add_dwarf_attr (dw_die_ref, dw_attr_node *);
3631 static inline enum dw_val_class AT_class (dw_attr_node *);
3632 static inline unsigned int AT_index (dw_attr_node *);
3633 static void add_AT_flag (dw_die_ref, enum dwarf_attribute, unsigned);
3634 static inline unsigned AT_flag (dw_attr_node *);
3635 static void add_AT_int (dw_die_ref, enum dwarf_attribute, HOST_WIDE_INT);
3636 static inline HOST_WIDE_INT AT_int (dw_attr_node *);
3637 static void add_AT_unsigned (dw_die_ref, enum dwarf_attribute, unsigned HOST_WIDE_INT);
3638 static inline unsigned HOST_WIDE_INT AT_unsigned (dw_attr_node *);
3639 static void add_AT_double (dw_die_ref, enum dwarf_attribute,
3640 HOST_WIDE_INT, unsigned HOST_WIDE_INT);
3641 static inline void add_AT_vec (dw_die_ref, enum dwarf_attribute, unsigned int,
3642 unsigned int, unsigned char *);
3643 static void add_AT_data8 (dw_die_ref, enum dwarf_attribute, unsigned char *);
3644 static void add_AT_string (dw_die_ref, enum dwarf_attribute, const char *);
3645 static inline const char *AT_string (dw_attr_node *);
3646 static enum dwarf_form AT_string_form (dw_attr_node *);
3647 static void add_AT_die_ref (dw_die_ref, enum dwarf_attribute, dw_die_ref);
3648 static void add_AT_specification (dw_die_ref, dw_die_ref);
3649 static inline dw_die_ref AT_ref (dw_attr_node *);
3650 static inline int AT_ref_external (dw_attr_node *);
3651 static inline void set_AT_ref_external (dw_attr_node *, int);
3652 static void add_AT_loc (dw_die_ref, enum dwarf_attribute, dw_loc_descr_ref);
3653 static inline dw_loc_descr_ref AT_loc (dw_attr_node *);
3654 static void add_AT_loc_list (dw_die_ref, enum dwarf_attribute,
3655 dw_loc_list_ref);
3656 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3657 static void add_AT_view_list (dw_die_ref, enum dwarf_attribute);
3658 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3659 static addr_table_entry *add_addr_table_entry (void *, enum ate_kind);
3660 static void remove_addr_table_entry (addr_table_entry *);
3661 static void add_AT_addr (dw_die_ref, enum dwarf_attribute, rtx, bool);
3662 static inline rtx AT_addr (dw_attr_node *);
3663 static void add_AT_symview (dw_die_ref, enum dwarf_attribute, const char *);
3664 static void add_AT_lbl_id (dw_die_ref, enum dwarf_attribute, const char *);
3665 static void add_AT_lineptr (dw_die_ref, enum dwarf_attribute, const char *);
3666 static void add_AT_macptr (dw_die_ref, enum dwarf_attribute, const char *);
3667 static void add_AT_range_list (dw_die_ref, enum dwarf_attribute,
3668 unsigned long, bool);
3669 static inline const char *AT_lbl (dw_attr_node *);
3670 static dw_attr_node *get_AT (dw_die_ref, enum dwarf_attribute);
3671 static const char *get_AT_low_pc (dw_die_ref);
3672 static const char *get_AT_string (dw_die_ref, enum dwarf_attribute);
3673 static int get_AT_flag (dw_die_ref, enum dwarf_attribute);
3674 static unsigned get_AT_unsigned (dw_die_ref, enum dwarf_attribute);
3675 static inline dw_die_ref get_AT_ref (dw_die_ref, enum dwarf_attribute);
3676 static bool is_c (void);
3677 static bool is_cxx (void);
3678 static bool is_cxx (const_tree);
3679 static bool is_fortran (void);
3680 static bool is_ada (void);
3681 static bool remove_AT (dw_die_ref, enum dwarf_attribute);
3682 static void remove_child_TAG (dw_die_ref, enum dwarf_tag);
3683 static void add_child_die (dw_die_ref, dw_die_ref);
3684 static dw_die_ref new_die (enum dwarf_tag, dw_die_ref, tree);
3685 static dw_die_ref lookup_type_die (tree);
3686 static dw_die_ref strip_naming_typedef (tree, dw_die_ref);
3687 static dw_die_ref lookup_type_die_strip_naming_typedef (tree);
3688 static void equate_type_number_to_die (tree, dw_die_ref);
3689 static dw_die_ref lookup_decl_die (tree);
3690 static var_loc_list *lookup_decl_loc (const_tree);
3691 static void equate_decl_number_to_die (tree, dw_die_ref);
3692 static struct var_loc_node *add_var_loc_to_decl (tree, rtx, const char *, var_loc_view);
3693 static void print_spaces (FILE *);
3694 static void print_die (dw_die_ref, FILE *);
3695 static void loc_checksum (dw_loc_descr_ref, struct md5_ctx *);
3696 static void attr_checksum (dw_attr_node *, struct md5_ctx *, int *);
3697 static void die_checksum (dw_die_ref, struct md5_ctx *, int *);
3698 static void checksum_sleb128 (HOST_WIDE_INT, struct md5_ctx *);
3699 static void checksum_uleb128 (unsigned HOST_WIDE_INT, struct md5_ctx *);
3700 static void loc_checksum_ordered (dw_loc_descr_ref, struct md5_ctx *);
3701 static void attr_checksum_ordered (enum dwarf_tag, dw_attr_node *,
3702 struct md5_ctx *, int *);
3703 struct checksum_attributes;
3704 static void collect_checksum_attributes (struct checksum_attributes *, dw_die_ref);
3705 static void die_checksum_ordered (dw_die_ref, struct md5_ctx *, int *);
3706 static void checksum_die_context (dw_die_ref, struct md5_ctx *);
3707 static void generate_type_signature (dw_die_ref, comdat_type_node *);
3708 static int same_loc_p (dw_loc_descr_ref, dw_loc_descr_ref, int *);
3709 static int same_dw_val_p (const dw_val_node *, const dw_val_node *, int *);
3710 static int same_attr_p (dw_attr_node *, dw_attr_node *, int *);
3711 static int same_die_p (dw_die_ref, dw_die_ref, int *);
3712 static int is_type_die (dw_die_ref);
3713 static inline bool is_template_instantiation (dw_die_ref);
3714 static int is_declaration_die (dw_die_ref);
3715 static int should_move_die_to_comdat (dw_die_ref);
3716 static dw_die_ref clone_as_declaration (dw_die_ref);
3717 static dw_die_ref clone_die (dw_die_ref);
3718 static dw_die_ref clone_tree (dw_die_ref);
3719 static dw_die_ref copy_declaration_context (dw_die_ref, dw_die_ref);
3720 static void generate_skeleton_ancestor_tree (skeleton_chain_node *);
3721 static void generate_skeleton_bottom_up (skeleton_chain_node *);
3722 static dw_die_ref generate_skeleton (dw_die_ref);
3723 static dw_die_ref remove_child_or_replace_with_skeleton (dw_die_ref,
3724 dw_die_ref,
3725 dw_die_ref);
3726 static void break_out_comdat_types (dw_die_ref);
3727 static void copy_decls_for_unworthy_types (dw_die_ref);
3728
3729 static void add_sibling_attributes (dw_die_ref);
3730 static void output_location_lists (dw_die_ref);
3731 static int constant_size (unsigned HOST_WIDE_INT);
3732 static unsigned long size_of_die (dw_die_ref);
3733 static void calc_die_sizes (dw_die_ref);
3734 static void calc_base_type_die_sizes (void);
3735 static void mark_dies (dw_die_ref);
3736 static void unmark_dies (dw_die_ref);
3737 static void unmark_all_dies (dw_die_ref);
3738 static unsigned long size_of_pubnames (vec<pubname_entry, va_gc> *);
3739 static unsigned long size_of_aranges (void);
3740 static enum dwarf_form value_format (dw_attr_node *);
3741 static void output_value_format (dw_attr_node *);
3742 static void output_abbrev_section (void);
3743 static void output_die_abbrevs (unsigned long, dw_die_ref);
3744 static void output_die (dw_die_ref);
3745 static void output_compilation_unit_header (enum dwarf_unit_type);
3746 static void output_comp_unit (dw_die_ref, int, const unsigned char *);
3747 static void output_comdat_type_unit (comdat_type_node *);
3748 static const char *dwarf2_name (tree, int);
3749 static void add_pubname (tree, dw_die_ref);
3750 static void add_enumerator_pubname (const char *, dw_die_ref);
3751 static void add_pubname_string (const char *, dw_die_ref);
3752 static void add_pubtype (tree, dw_die_ref);
3753 static void output_pubnames (vec<pubname_entry, va_gc> *);
3754 static void output_aranges (void);
3755 static unsigned int add_ranges (const_tree, bool = false);
3756 static void add_ranges_by_labels (dw_die_ref, const char *, const char *,
3757 bool *, bool);
3758 static void output_ranges (void);
3759 static dw_line_info_table *new_line_info_table (void);
3760 static void output_line_info (bool);
3761 static void output_file_names (void);
3762 static dw_die_ref base_type_die (tree, bool);
3763 static int is_base_type (tree);
3764 static dw_die_ref subrange_type_die (tree, tree, tree, tree, dw_die_ref);
3765 static int decl_quals (const_tree);
3766 static dw_die_ref modified_type_die (tree, int, bool, dw_die_ref);
3767 static dw_die_ref generic_parameter_die (tree, tree, bool, dw_die_ref);
3768 static dw_die_ref template_parameter_pack_die (tree, tree, dw_die_ref);
3769 static unsigned int dbx_reg_number (const_rtx);
3770 static void add_loc_descr_op_piece (dw_loc_descr_ref *, int);
3771 static dw_loc_descr_ref reg_loc_descriptor (rtx, enum var_init_status);
3772 static dw_loc_descr_ref one_reg_loc_descriptor (unsigned int,
3773 enum var_init_status);
3774 static dw_loc_descr_ref multiple_reg_loc_descriptor (rtx, rtx,
3775 enum var_init_status);
3776 static dw_loc_descr_ref based_loc_descr (rtx, poly_int64,
3777 enum var_init_status);
3778 static int is_based_loc (const_rtx);
3779 static bool resolve_one_addr (rtx *);
3780 static dw_loc_descr_ref concat_loc_descriptor (rtx, rtx,
3781 enum var_init_status);
3782 static dw_loc_descr_ref loc_descriptor (rtx, machine_mode mode,
3783 enum var_init_status);
3784 struct loc_descr_context;
3785 static void add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref);
3786 static void add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list);
3787 static dw_loc_list_ref loc_list_from_tree (tree, int,
3788 struct loc_descr_context *);
3789 static dw_loc_descr_ref loc_descriptor_from_tree (tree, int,
3790 struct loc_descr_context *);
3791 static tree field_type (const_tree);
3792 static unsigned int simple_type_align_in_bits (const_tree);
3793 static unsigned int simple_decl_align_in_bits (const_tree);
3794 static unsigned HOST_WIDE_INT simple_type_size_in_bits (const_tree);
3795 struct vlr_context;
3796 static dw_loc_descr_ref field_byte_offset (const_tree, struct vlr_context *,
3797 HOST_WIDE_INT *);
3798 static void add_AT_location_description (dw_die_ref, enum dwarf_attribute,
3799 dw_loc_list_ref);
3800 static void add_data_member_location_attribute (dw_die_ref, tree,
3801 struct vlr_context *);
3802 static bool add_const_value_attribute (dw_die_ref, rtx);
3803 static void insert_int (HOST_WIDE_INT, unsigned, unsigned char *);
3804 static void insert_wide_int (const wide_int &, unsigned char *, int);
3805 static void insert_float (const_rtx, unsigned char *);
3806 static rtx rtl_for_decl_location (tree);
3807 static bool add_location_or_const_value_attribute (dw_die_ref, tree, bool);
3808 static bool tree_add_const_value_attribute (dw_die_ref, tree);
3809 static bool tree_add_const_value_attribute_for_decl (dw_die_ref, tree);
3810 static void add_name_attribute (dw_die_ref, const char *);
3811 static void add_desc_attribute (dw_die_ref, tree);
3812 static void add_gnat_descriptive_type_attribute (dw_die_ref, tree, dw_die_ref);
3813 static void add_comp_dir_attribute (dw_die_ref);
3814 static void add_scalar_info (dw_die_ref, enum dwarf_attribute, tree, int,
3815 struct loc_descr_context *);
3816 static void add_bound_info (dw_die_ref, enum dwarf_attribute, tree,
3817 struct loc_descr_context *);
3818 static void add_subscript_info (dw_die_ref, tree, bool);
3819 static void add_byte_size_attribute (dw_die_ref, tree);
3820 static void add_alignment_attribute (dw_die_ref, tree);
3821 static inline void add_bit_offset_attribute (dw_die_ref, tree,
3822 struct vlr_context *);
3823 static void add_bit_size_attribute (dw_die_ref, tree);
3824 static void add_prototyped_attribute (dw_die_ref, tree);
3825 static void add_abstract_origin_attribute (dw_die_ref, tree);
3826 static void add_pure_or_virtual_attribute (dw_die_ref, tree);
3827 static void add_src_coords_attributes (dw_die_ref, tree);
3828 static void add_name_and_src_coords_attributes (dw_die_ref, tree, bool = false);
3829 static void add_discr_value (dw_die_ref, dw_discr_value *);
3830 static void add_discr_list (dw_die_ref, dw_discr_list_ref);
3831 static inline dw_discr_list_ref AT_discr_list (dw_attr_node *);
3832 static dw_die_ref scope_die_for (tree, dw_die_ref);
3833 static inline int local_scope_p (dw_die_ref);
3834 static inline int class_scope_p (dw_die_ref);
3835 static inline int class_or_namespace_scope_p (dw_die_ref);
3836 static void add_type_attribute (dw_die_ref, tree, int, bool, dw_die_ref);
3837 static void add_calling_convention_attribute (dw_die_ref, tree);
3838 static const char *type_tag (const_tree);
3839 static tree member_declared_type (const_tree);
3840 #if 0
3841 static const char *decl_start_label (tree);
3842 #endif
3843 static void gen_array_type_die (tree, dw_die_ref);
3844 static void gen_descr_array_type_die (tree, struct array_descr_info *, dw_die_ref);
3845 #if 0
3846 static void gen_entry_point_die (tree, dw_die_ref);
3847 #endif
3848 static dw_die_ref gen_enumeration_type_die (tree, dw_die_ref);
3849 static dw_die_ref gen_formal_parameter_die (tree, tree, bool, dw_die_ref);
3850 static dw_die_ref gen_formal_parameter_pack_die (tree, tree, dw_die_ref, tree*);
3851 static void gen_unspecified_parameters_die (tree, dw_die_ref);
3852 static void gen_formal_types_die (tree, dw_die_ref);
3853 static void gen_subprogram_die (tree, dw_die_ref);
3854 static void gen_variable_die (tree, tree, dw_die_ref);
3855 static void gen_const_die (tree, dw_die_ref);
3856 static void gen_label_die (tree, dw_die_ref);
3857 static void gen_lexical_block_die (tree, dw_die_ref);
3858 static void gen_inlined_subroutine_die (tree, dw_die_ref);
3859 static void gen_field_die (tree, struct vlr_context *, dw_die_ref);
3860 static void gen_ptr_to_mbr_type_die (tree, dw_die_ref);
3861 static dw_die_ref gen_compile_unit_die (const char *);
3862 static void gen_inheritance_die (tree, tree, tree, dw_die_ref);
3863 static void gen_member_die (tree, dw_die_ref);
3864 static void gen_struct_or_union_type_die (tree, dw_die_ref,
3865 enum debug_info_usage);
3866 static void gen_subroutine_type_die (tree, dw_die_ref);
3867 static void gen_typedef_die (tree, dw_die_ref);
3868 static void gen_type_die (tree, dw_die_ref);
3869 static void gen_block_die (tree, dw_die_ref);
3870 static void decls_for_scope (tree, dw_die_ref, bool = true);
3871 static bool is_naming_typedef_decl (const_tree);
3872 static inline dw_die_ref get_context_die (tree);
3873 static void gen_namespace_die (tree, dw_die_ref);
3874 static dw_die_ref gen_namelist_decl (tree, dw_die_ref, tree);
3875 static dw_die_ref gen_decl_die (tree, tree, struct vlr_context *, dw_die_ref);
3876 static dw_die_ref force_decl_die (tree);
3877 static dw_die_ref force_type_die (tree);
3878 static dw_die_ref setup_namespace_context (tree, dw_die_ref);
3879 static dw_die_ref declare_in_namespace (tree, dw_die_ref);
3880 static struct dwarf_file_data * lookup_filename (const char *);
3881 static void retry_incomplete_types (void);
3882 static void gen_type_die_for_member (tree, tree, dw_die_ref);
3883 static void gen_generic_params_dies (tree);
3884 static void gen_tagged_type_die (tree, dw_die_ref, enum debug_info_usage);
3885 static void gen_type_die_with_usage (tree, dw_die_ref, enum debug_info_usage);
3886 static void splice_child_die (dw_die_ref, dw_die_ref);
3887 static int file_info_cmp (const void *, const void *);
3888 static dw_loc_list_ref new_loc_list (dw_loc_descr_ref, const char *, var_loc_view,
3889 const char *, var_loc_view, const char *);
3890 static void output_loc_list (dw_loc_list_ref);
3891 static char *gen_internal_sym (const char *);
3892 static bool want_pubnames (void);
3893
3894 static void prune_unmark_dies (dw_die_ref);
3895 static void prune_unused_types_mark_generic_parms_dies (dw_die_ref);
3896 static void prune_unused_types_mark (dw_die_ref, int);
3897 static void prune_unused_types_walk (dw_die_ref);
3898 static void prune_unused_types_walk_attribs (dw_die_ref);
3899 static void prune_unused_types_prune (dw_die_ref);
3900 static void prune_unused_types (void);
3901 static int maybe_emit_file (struct dwarf_file_data *fd);
3902 static inline const char *AT_vms_delta1 (dw_attr_node *);
3903 static inline const char *AT_vms_delta2 (dw_attr_node *);
3904 static void append_entry_to_tmpl_value_parm_die_table (dw_die_ref, tree);
3905 static void gen_remaining_tmpl_value_param_die_attribute (void);
3906 static bool generic_type_p (tree);
3907 static void schedule_generic_params_dies_gen (tree t);
3908 static void gen_scheduled_generic_parms_dies (void);
3909 static void resolve_variable_values (void);
3910
3911 static const char *comp_dir_string (void);
3912
3913 static void hash_loc_operands (dw_loc_descr_ref, inchash::hash &);
3914
3915 /* enum for tracking thread-local variables whose address is really an offset
3916 relative to the TLS pointer, which will need link-time relocation, but will
3917 not need relocation by the DWARF consumer. */
3918
3919 enum dtprel_bool
3920 {
3921 dtprel_false = 0,
3922 dtprel_true = 1
3923 };
3924
3925 /* Return the operator to use for an address of a variable. For dtprel_true, we
3926 use DW_OP_const*. For regular variables, which need both link-time
3927 relocation and consumer-level relocation (e.g., to account for shared objects
3928 loaded at a random address), we use DW_OP_addr*. */
3929
3930 static inline enum dwarf_location_atom
3931 dw_addr_op (enum dtprel_bool dtprel)
3932 {
3933 if (dtprel == dtprel_true)
3934 return (dwarf_split_debug_info ? dwarf_OP (DW_OP_constx)
3935 : (DWARF2_ADDR_SIZE == 4 ? DW_OP_const4u : DW_OP_const8u));
3936 else
3937 return dwarf_split_debug_info ? dwarf_OP (DW_OP_addrx) : DW_OP_addr;
3938 }
3939
3940 /* Return a pointer to a newly allocated address location description. If
3941 dwarf_split_debug_info is true, then record the address with the appropriate
3942 relocation. */
3943 static inline dw_loc_descr_ref
3944 new_addr_loc_descr (rtx addr, enum dtprel_bool dtprel)
3945 {
3946 dw_loc_descr_ref ref = new_loc_descr (dw_addr_op (dtprel), 0, 0);
3947
3948 ref->dw_loc_oprnd1.val_class = dw_val_class_addr;
3949 ref->dw_loc_oprnd1.v.val_addr = addr;
3950 ref->dtprel = dtprel;
3951 if (dwarf_split_debug_info)
3952 ref->dw_loc_oprnd1.val_entry
3953 = add_addr_table_entry (addr,
3954 dtprel ? ate_kind_rtx_dtprel : ate_kind_rtx);
3955 else
3956 ref->dw_loc_oprnd1.val_entry = NULL;
3957
3958 return ref;
3959 }
3960
3961 /* Section names used to hold DWARF debugging information. */
3962
3963 #ifndef DEBUG_INFO_SECTION
3964 #define DEBUG_INFO_SECTION ".debug_info"
3965 #endif
3966 #ifndef DEBUG_DWO_INFO_SECTION
3967 #define DEBUG_DWO_INFO_SECTION ".debug_info.dwo"
3968 #endif
3969 #ifndef DEBUG_LTO_INFO_SECTION
3970 #define DEBUG_LTO_INFO_SECTION ".gnu.debuglto_.debug_info"
3971 #endif
3972 #ifndef DEBUG_LTO_DWO_INFO_SECTION
3973 #define DEBUG_LTO_DWO_INFO_SECTION ".gnu.debuglto_.debug_info.dwo"
3974 #endif
3975 #ifndef DEBUG_ABBREV_SECTION
3976 #define DEBUG_ABBREV_SECTION ".debug_abbrev"
3977 #endif
3978 #ifndef DEBUG_LTO_ABBREV_SECTION
3979 #define DEBUG_LTO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev"
3980 #endif
3981 #ifndef DEBUG_DWO_ABBREV_SECTION
3982 #define DEBUG_DWO_ABBREV_SECTION ".debug_abbrev.dwo"
3983 #endif
3984 #ifndef DEBUG_LTO_DWO_ABBREV_SECTION
3985 #define DEBUG_LTO_DWO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev.dwo"
3986 #endif
3987 #ifndef DEBUG_ARANGES_SECTION
3988 #define DEBUG_ARANGES_SECTION ".debug_aranges"
3989 #endif
3990 #ifndef DEBUG_ADDR_SECTION
3991 #define DEBUG_ADDR_SECTION ".debug_addr"
3992 #endif
3993 #ifndef DEBUG_MACINFO_SECTION
3994 #define DEBUG_MACINFO_SECTION ".debug_macinfo"
3995 #endif
3996 #ifndef DEBUG_LTO_MACINFO_SECTION
3997 #define DEBUG_LTO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo"
3998 #endif
3999 #ifndef DEBUG_DWO_MACINFO_SECTION
4000 #define DEBUG_DWO_MACINFO_SECTION ".debug_macinfo.dwo"
4001 #endif
4002 #ifndef DEBUG_LTO_DWO_MACINFO_SECTION
4003 #define DEBUG_LTO_DWO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo.dwo"
4004 #endif
4005 #ifndef DEBUG_MACRO_SECTION
4006 #define DEBUG_MACRO_SECTION ".debug_macro"
4007 #endif
4008 #ifndef DEBUG_LTO_MACRO_SECTION
4009 #define DEBUG_LTO_MACRO_SECTION ".gnu.debuglto_.debug_macro"
4010 #endif
4011 #ifndef DEBUG_DWO_MACRO_SECTION
4012 #define DEBUG_DWO_MACRO_SECTION ".debug_macro.dwo"
4013 #endif
4014 #ifndef DEBUG_LTO_DWO_MACRO_SECTION
4015 #define DEBUG_LTO_DWO_MACRO_SECTION ".gnu.debuglto_.debug_macro.dwo"
4016 #endif
4017 #ifndef DEBUG_LINE_SECTION
4018 #define DEBUG_LINE_SECTION ".debug_line"
4019 #endif
4020 #ifndef DEBUG_LTO_LINE_SECTION
4021 #define DEBUG_LTO_LINE_SECTION ".gnu.debuglto_.debug_line"
4022 #endif
4023 #ifndef DEBUG_DWO_LINE_SECTION
4024 #define DEBUG_DWO_LINE_SECTION ".debug_line.dwo"
4025 #endif
4026 #ifndef DEBUG_LTO_DWO_LINE_SECTION
4027 #define DEBUG_LTO_DWO_LINE_SECTION ".gnu.debuglto_.debug_line.dwo"
4028 #endif
4029 #ifndef DEBUG_LOC_SECTION
4030 #define DEBUG_LOC_SECTION ".debug_loc"
4031 #endif
4032 #ifndef DEBUG_DWO_LOC_SECTION
4033 #define DEBUG_DWO_LOC_SECTION ".debug_loc.dwo"
4034 #endif
4035 #ifndef DEBUG_LOCLISTS_SECTION
4036 #define DEBUG_LOCLISTS_SECTION ".debug_loclists"
4037 #endif
4038 #ifndef DEBUG_DWO_LOCLISTS_SECTION
4039 #define DEBUG_DWO_LOCLISTS_SECTION ".debug_loclists.dwo"
4040 #endif
4041 #ifndef DEBUG_PUBNAMES_SECTION
4042 #define DEBUG_PUBNAMES_SECTION \
4043 ((debug_generate_pub_sections == 2) \
4044 ? ".debug_gnu_pubnames" : ".debug_pubnames")
4045 #endif
4046 #ifndef DEBUG_PUBTYPES_SECTION
4047 #define DEBUG_PUBTYPES_SECTION \
4048 ((debug_generate_pub_sections == 2) \
4049 ? ".debug_gnu_pubtypes" : ".debug_pubtypes")
4050 #endif
4051 #ifndef DEBUG_STR_OFFSETS_SECTION
4052 #define DEBUG_STR_OFFSETS_SECTION ".debug_str_offsets"
4053 #endif
4054 #ifndef DEBUG_DWO_STR_OFFSETS_SECTION
4055 #define DEBUG_DWO_STR_OFFSETS_SECTION ".debug_str_offsets.dwo"
4056 #endif
4057 #ifndef DEBUG_LTO_DWO_STR_OFFSETS_SECTION
4058 #define DEBUG_LTO_DWO_STR_OFFSETS_SECTION ".gnu.debuglto_.debug_str_offsets.dwo"
4059 #endif
4060 #ifndef DEBUG_STR_SECTION
4061 #define DEBUG_STR_SECTION ".debug_str"
4062 #endif
4063 #ifndef DEBUG_LTO_STR_SECTION
4064 #define DEBUG_LTO_STR_SECTION ".gnu.debuglto_.debug_str"
4065 #endif
4066 #ifndef DEBUG_STR_DWO_SECTION
4067 #define DEBUG_STR_DWO_SECTION ".debug_str.dwo"
4068 #endif
4069 #ifndef DEBUG_LTO_STR_DWO_SECTION
4070 #define DEBUG_LTO_STR_DWO_SECTION ".gnu.debuglto_.debug_str.dwo"
4071 #endif
4072 #ifndef DEBUG_RANGES_SECTION
4073 #define DEBUG_RANGES_SECTION ".debug_ranges"
4074 #endif
4075 #ifndef DEBUG_RNGLISTS_SECTION
4076 #define DEBUG_RNGLISTS_SECTION ".debug_rnglists"
4077 #endif
4078 #ifndef DEBUG_LINE_STR_SECTION
4079 #define DEBUG_LINE_STR_SECTION ".debug_line_str"
4080 #endif
4081 #ifndef DEBUG_LTO_LINE_STR_SECTION
4082 #define DEBUG_LTO_LINE_STR_SECTION ".gnu.debuglto_.debug_line_str"
4083 #endif
4084
4085 /* Standard ELF section names for compiled code and data. */
4086 #ifndef TEXT_SECTION_NAME
4087 #define TEXT_SECTION_NAME ".text"
4088 #endif
4089
4090 /* Section flags for .debug_str section. */
4091 #define DEBUG_STR_SECTION_FLAGS \
4092 (HAVE_GAS_SHF_MERGE && flag_merge_debug_strings \
4093 ? SECTION_DEBUG | SECTION_MERGE | SECTION_STRINGS | 1 \
4094 : SECTION_DEBUG)
4095
4096 /* Section flags for .debug_str.dwo section. */
4097 #define DEBUG_STR_DWO_SECTION_FLAGS (SECTION_DEBUG | SECTION_EXCLUDE)
4098
4099 /* Attribute used to refer to the macro section. */
4100 #define DEBUG_MACRO_ATTRIBUTE (dwarf_version >= 5 ? DW_AT_macros \
4101 : dwarf_strict ? DW_AT_macro_info : DW_AT_GNU_macros)
4102
4103 /* Labels we insert at beginning sections we can reference instead of
4104 the section names themselves. */
4105
4106 #ifndef TEXT_SECTION_LABEL
4107 #define TEXT_SECTION_LABEL "Ltext"
4108 #endif
4109 #ifndef COLD_TEXT_SECTION_LABEL
4110 #define COLD_TEXT_SECTION_LABEL "Ltext_cold"
4111 #endif
4112 #ifndef DEBUG_LINE_SECTION_LABEL
4113 #define DEBUG_LINE_SECTION_LABEL "Ldebug_line"
4114 #endif
4115 #ifndef DEBUG_SKELETON_LINE_SECTION_LABEL
4116 #define DEBUG_SKELETON_LINE_SECTION_LABEL "Lskeleton_debug_line"
4117 #endif
4118 #ifndef DEBUG_INFO_SECTION_LABEL
4119 #define DEBUG_INFO_SECTION_LABEL "Ldebug_info"
4120 #endif
4121 #ifndef DEBUG_SKELETON_INFO_SECTION_LABEL
4122 #define DEBUG_SKELETON_INFO_SECTION_LABEL "Lskeleton_debug_info"
4123 #endif
4124 #ifndef DEBUG_ABBREV_SECTION_LABEL
4125 #define DEBUG_ABBREV_SECTION_LABEL "Ldebug_abbrev"
4126 #endif
4127 #ifndef DEBUG_SKELETON_ABBREV_SECTION_LABEL
4128 #define DEBUG_SKELETON_ABBREV_SECTION_LABEL "Lskeleton_debug_abbrev"
4129 #endif
4130 #ifndef DEBUG_ADDR_SECTION_LABEL
4131 #define DEBUG_ADDR_SECTION_LABEL "Ldebug_addr"
4132 #endif
4133 #ifndef DEBUG_LOC_SECTION_LABEL
4134 #define DEBUG_LOC_SECTION_LABEL "Ldebug_loc"
4135 #endif
4136 #ifndef DEBUG_RANGES_SECTION_LABEL
4137 #define DEBUG_RANGES_SECTION_LABEL "Ldebug_ranges"
4138 #endif
4139 #ifndef DEBUG_MACINFO_SECTION_LABEL
4140 #define DEBUG_MACINFO_SECTION_LABEL "Ldebug_macinfo"
4141 #endif
4142 #ifndef DEBUG_MACRO_SECTION_LABEL
4143 #define DEBUG_MACRO_SECTION_LABEL "Ldebug_macro"
4144 #endif
4145 #define SKELETON_COMP_DIE_ABBREV 1
4146 #define SKELETON_TYPE_DIE_ABBREV 2
4147
4148 /* Definitions of defaults for formats and names of various special
4149 (artificial) labels which may be generated within this file (when the -g
4150 options is used and DWARF2_DEBUGGING_INFO is in effect.
4151 If necessary, these may be overridden from within the tm.h file, but
4152 typically, overriding these defaults is unnecessary. */
4153
4154 static char text_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4155 static char text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4156 static char cold_text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4157 static char cold_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4158 static char abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4159 static char debug_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4160 static char debug_skeleton_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4161 static char debug_skeleton_abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4162 static char debug_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4163 static char debug_addr_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4164 static char debug_skeleton_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4165 static char macinfo_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4166 static char loc_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4167 static char ranges_section_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4168 static char ranges_base_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4169
4170 #ifndef TEXT_END_LABEL
4171 #define TEXT_END_LABEL "Letext"
4172 #endif
4173 #ifndef COLD_END_LABEL
4174 #define COLD_END_LABEL "Letext_cold"
4175 #endif
4176 #ifndef BLOCK_BEGIN_LABEL
4177 #define BLOCK_BEGIN_LABEL "LBB"
4178 #endif
4179 #ifndef BLOCK_INLINE_ENTRY_LABEL
4180 #define BLOCK_INLINE_ENTRY_LABEL "LBI"
4181 #endif
4182 #ifndef BLOCK_END_LABEL
4183 #define BLOCK_END_LABEL "LBE"
4184 #endif
4185 #ifndef LINE_CODE_LABEL
4186 #define LINE_CODE_LABEL "LM"
4187 #endif
4188
4189 \f
4190 /* Return the root of the DIE's built for the current compilation unit. */
4191 static dw_die_ref
4192 comp_unit_die (void)
4193 {
4194 if (!single_comp_unit_die)
4195 single_comp_unit_die = gen_compile_unit_die (NULL);
4196 return single_comp_unit_die;
4197 }
4198
4199 /* We allow a language front-end to designate a function that is to be
4200 called to "demangle" any name before it is put into a DIE. */
4201
4202 static const char *(*demangle_name_func) (const char *);
4203
4204 void
4205 dwarf2out_set_demangle_name_func (const char *(*func) (const char *))
4206 {
4207 demangle_name_func = func;
4208 }
4209
4210 /* Test if rtl node points to a pseudo register. */
4211
4212 static inline int
4213 is_pseudo_reg (const_rtx rtl)
4214 {
4215 return ((REG_P (rtl) && REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
4216 || (GET_CODE (rtl) == SUBREG
4217 && REGNO (SUBREG_REG (rtl)) >= FIRST_PSEUDO_REGISTER));
4218 }
4219
4220 /* Return a reference to a type, with its const and volatile qualifiers
4221 removed. */
4222
4223 static inline tree
4224 type_main_variant (tree type)
4225 {
4226 type = TYPE_MAIN_VARIANT (type);
4227
4228 /* ??? There really should be only one main variant among any group of
4229 variants of a given type (and all of the MAIN_VARIANT values for all
4230 members of the group should point to that one type) but sometimes the C
4231 front-end messes this up for array types, so we work around that bug
4232 here. */
4233 if (TREE_CODE (type) == ARRAY_TYPE)
4234 while (type != TYPE_MAIN_VARIANT (type))
4235 type = TYPE_MAIN_VARIANT (type);
4236
4237 return type;
4238 }
4239
4240 /* Return nonzero if the given type node represents a tagged type. */
4241
4242 static inline int
4243 is_tagged_type (const_tree type)
4244 {
4245 enum tree_code code = TREE_CODE (type);
4246
4247 return (code == RECORD_TYPE || code == UNION_TYPE
4248 || code == QUAL_UNION_TYPE || code == ENUMERAL_TYPE);
4249 }
4250
4251 /* Set label to debug_info_section_label + die_offset of a DIE reference. */
4252
4253 static void
4254 get_ref_die_offset_label (char *label, dw_die_ref ref)
4255 {
4256 sprintf (label, "%s+%ld", debug_info_section_label, ref->die_offset);
4257 }
4258
4259 /* Return die_offset of a DIE reference to a base type. */
4260
4261 static unsigned long int
4262 get_base_type_offset (dw_die_ref ref)
4263 {
4264 if (ref->die_offset)
4265 return ref->die_offset;
4266 if (comp_unit_die ()->die_abbrev)
4267 {
4268 calc_base_type_die_sizes ();
4269 gcc_assert (ref->die_offset);
4270 }
4271 return ref->die_offset;
4272 }
4273
4274 /* Return die_offset of a DIE reference other than base type. */
4275
4276 static unsigned long int
4277 get_ref_die_offset (dw_die_ref ref)
4278 {
4279 gcc_assert (ref->die_offset);
4280 return ref->die_offset;
4281 }
4282
4283 /* Convert a DIE tag into its string name. */
4284
4285 static const char *
4286 dwarf_tag_name (unsigned int tag)
4287 {
4288 const char *name = get_DW_TAG_name (tag);
4289
4290 if (name != NULL)
4291 return name;
4292
4293 return "DW_TAG_<unknown>";
4294 }
4295
4296 /* Convert a DWARF attribute code into its string name. */
4297
4298 static const char *
4299 dwarf_attr_name (unsigned int attr)
4300 {
4301 const char *name;
4302
4303 switch (attr)
4304 {
4305 #if VMS_DEBUGGING_INFO
4306 case DW_AT_HP_prologue:
4307 return "DW_AT_HP_prologue";
4308 #else
4309 case DW_AT_MIPS_loop_unroll_factor:
4310 return "DW_AT_MIPS_loop_unroll_factor";
4311 #endif
4312
4313 #if VMS_DEBUGGING_INFO
4314 case DW_AT_HP_epilogue:
4315 return "DW_AT_HP_epilogue";
4316 #else
4317 case DW_AT_MIPS_stride:
4318 return "DW_AT_MIPS_stride";
4319 #endif
4320 }
4321
4322 name = get_DW_AT_name (attr);
4323
4324 if (name != NULL)
4325 return name;
4326
4327 return "DW_AT_<unknown>";
4328 }
4329
4330 /* Convert a DWARF value form code into its string name. */
4331
4332 static const char *
4333 dwarf_form_name (unsigned int form)
4334 {
4335 const char *name = get_DW_FORM_name (form);
4336
4337 if (name != NULL)
4338 return name;
4339
4340 return "DW_FORM_<unknown>";
4341 }
4342 \f
4343 /* Determine the "ultimate origin" of a decl. The decl may be an inlined
4344 instance of an inlined instance of a decl which is local to an inline
4345 function, so we have to trace all of the way back through the origin chain
4346 to find out what sort of node actually served as the original seed for the
4347 given block. */
4348
4349 static tree
4350 decl_ultimate_origin (const_tree decl)
4351 {
4352 if (!CODE_CONTAINS_STRUCT (TREE_CODE (decl), TS_DECL_COMMON))
4353 return NULL_TREE;
4354
4355 /* DECL_ABSTRACT_ORIGIN can point to itself; ignore that if
4356 we're trying to output the abstract instance of this function. */
4357 if (DECL_ABSTRACT_P (decl) && DECL_ABSTRACT_ORIGIN (decl) == decl)
4358 return NULL_TREE;
4359
4360 /* Since the DECL_ABSTRACT_ORIGIN for a DECL is supposed to be the
4361 most distant ancestor, this should never happen. */
4362 gcc_assert (!DECL_FROM_INLINE (DECL_ORIGIN (decl)));
4363
4364 return DECL_ABSTRACT_ORIGIN (decl);
4365 }
4366
4367 /* Get the class to which DECL belongs, if any. In g++, the DECL_CONTEXT
4368 of a virtual function may refer to a base class, so we check the 'this'
4369 parameter. */
4370
4371 static tree
4372 decl_class_context (tree decl)
4373 {
4374 tree context = NULL_TREE;
4375
4376 if (TREE_CODE (decl) != FUNCTION_DECL || ! DECL_VINDEX (decl))
4377 context = DECL_CONTEXT (decl);
4378 else
4379 context = TYPE_MAIN_VARIANT
4380 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
4381
4382 if (context && !TYPE_P (context))
4383 context = NULL_TREE;
4384
4385 return context;
4386 }
4387 \f
4388 /* Add an attribute/value pair to a DIE. */
4389
4390 static inline void
4391 add_dwarf_attr (dw_die_ref die, dw_attr_node *attr)
4392 {
4393 /* Maybe this should be an assert? */
4394 if (die == NULL)
4395 return;
4396
4397 if (flag_checking)
4398 {
4399 /* Check we do not add duplicate attrs. Can't use get_AT here
4400 because that recurses to the specification/abstract origin DIE. */
4401 dw_attr_node *a;
4402 unsigned ix;
4403 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
4404 gcc_assert (a->dw_attr != attr->dw_attr);
4405 }
4406
4407 vec_safe_reserve (die->die_attr, 1);
4408 vec_safe_push (die->die_attr, *attr);
4409 }
4410
4411 static inline enum dw_val_class
4412 AT_class (dw_attr_node *a)
4413 {
4414 return a->dw_attr_val.val_class;
4415 }
4416
4417 /* Return the index for any attribute that will be referenced with a
4418 DW_FORM_addrx/GNU_addr_index or DW_FORM_strx/GNU_str_index. String
4419 indices are stored in dw_attr_val.v.val_str for reference counting
4420 pruning. */
4421
4422 static inline unsigned int
4423 AT_index (dw_attr_node *a)
4424 {
4425 if (AT_class (a) == dw_val_class_str)
4426 return a->dw_attr_val.v.val_str->index;
4427 else if (a->dw_attr_val.val_entry != NULL)
4428 return a->dw_attr_val.val_entry->index;
4429 return NOT_INDEXED;
4430 }
4431
4432 /* Add a flag value attribute to a DIE. */
4433
4434 static inline void
4435 add_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int flag)
4436 {
4437 dw_attr_node attr;
4438
4439 attr.dw_attr = attr_kind;
4440 attr.dw_attr_val.val_class = dw_val_class_flag;
4441 attr.dw_attr_val.val_entry = NULL;
4442 attr.dw_attr_val.v.val_flag = flag;
4443 add_dwarf_attr (die, &attr);
4444 }
4445
4446 static inline unsigned
4447 AT_flag (dw_attr_node *a)
4448 {
4449 gcc_assert (a && AT_class (a) == dw_val_class_flag);
4450 return a->dw_attr_val.v.val_flag;
4451 }
4452
4453 /* Add a signed integer attribute value to a DIE. */
4454
4455 static inline void
4456 add_AT_int (dw_die_ref die, enum dwarf_attribute attr_kind, HOST_WIDE_INT int_val)
4457 {
4458 dw_attr_node attr;
4459
4460 attr.dw_attr = attr_kind;
4461 attr.dw_attr_val.val_class = dw_val_class_const;
4462 attr.dw_attr_val.val_entry = NULL;
4463 attr.dw_attr_val.v.val_int = int_val;
4464 add_dwarf_attr (die, &attr);
4465 }
4466
4467 static inline HOST_WIDE_INT
4468 AT_int (dw_attr_node *a)
4469 {
4470 gcc_assert (a && (AT_class (a) == dw_val_class_const
4471 || AT_class (a) == dw_val_class_const_implicit));
4472 return a->dw_attr_val.v.val_int;
4473 }
4474
4475 /* Add an unsigned integer attribute value to a DIE. */
4476
4477 static inline void
4478 add_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind,
4479 unsigned HOST_WIDE_INT unsigned_val)
4480 {
4481 dw_attr_node attr;
4482
4483 attr.dw_attr = attr_kind;
4484 attr.dw_attr_val.val_class = dw_val_class_unsigned_const;
4485 attr.dw_attr_val.val_entry = NULL;
4486 attr.dw_attr_val.v.val_unsigned = unsigned_val;
4487 add_dwarf_attr (die, &attr);
4488 }
4489
4490 static inline unsigned HOST_WIDE_INT
4491 AT_unsigned (dw_attr_node *a)
4492 {
4493 gcc_assert (a && (AT_class (a) == dw_val_class_unsigned_const
4494 || AT_class (a) == dw_val_class_unsigned_const_implicit));
4495 return a->dw_attr_val.v.val_unsigned;
4496 }
4497
4498 /* Add an unsigned wide integer attribute value to a DIE. */
4499
4500 static inline void
4501 add_AT_wide (dw_die_ref die, enum dwarf_attribute attr_kind,
4502 const wide_int& w)
4503 {
4504 dw_attr_node attr;
4505
4506 attr.dw_attr = attr_kind;
4507 attr.dw_attr_val.val_class = dw_val_class_wide_int;
4508 attr.dw_attr_val.val_entry = NULL;
4509 attr.dw_attr_val.v.val_wide = ggc_alloc<wide_int> ();
4510 *attr.dw_attr_val.v.val_wide = w;
4511 add_dwarf_attr (die, &attr);
4512 }
4513
4514 /* Add an unsigned double integer attribute value to a DIE. */
4515
4516 static inline void
4517 add_AT_double (dw_die_ref die, enum dwarf_attribute attr_kind,
4518 HOST_WIDE_INT high, unsigned HOST_WIDE_INT low)
4519 {
4520 dw_attr_node attr;
4521
4522 attr.dw_attr = attr_kind;
4523 attr.dw_attr_val.val_class = dw_val_class_const_double;
4524 attr.dw_attr_val.val_entry = NULL;
4525 attr.dw_attr_val.v.val_double.high = high;
4526 attr.dw_attr_val.v.val_double.low = low;
4527 add_dwarf_attr (die, &attr);
4528 }
4529
4530 /* Add a floating point attribute value to a DIE and return it. */
4531
4532 static inline void
4533 add_AT_vec (dw_die_ref die, enum dwarf_attribute attr_kind,
4534 unsigned int length, unsigned int elt_size, unsigned char *array)
4535 {
4536 dw_attr_node attr;
4537
4538 attr.dw_attr = attr_kind;
4539 attr.dw_attr_val.val_class = dw_val_class_vec;
4540 attr.dw_attr_val.val_entry = NULL;
4541 attr.dw_attr_val.v.val_vec.length = length;
4542 attr.dw_attr_val.v.val_vec.elt_size = elt_size;
4543 attr.dw_attr_val.v.val_vec.array = array;
4544 add_dwarf_attr (die, &attr);
4545 }
4546
4547 /* Add an 8-byte data attribute value to a DIE. */
4548
4549 static inline void
4550 add_AT_data8 (dw_die_ref die, enum dwarf_attribute attr_kind,
4551 unsigned char data8[8])
4552 {
4553 dw_attr_node attr;
4554
4555 attr.dw_attr = attr_kind;
4556 attr.dw_attr_val.val_class = dw_val_class_data8;
4557 attr.dw_attr_val.val_entry = NULL;
4558 memcpy (attr.dw_attr_val.v.val_data8, data8, 8);
4559 add_dwarf_attr (die, &attr);
4560 }
4561
4562 /* Add DW_AT_low_pc and DW_AT_high_pc to a DIE. When using
4563 dwarf_split_debug_info, address attributes in dies destined for the
4564 final executable have force_direct set to avoid using indexed
4565 references. */
4566
4567 static inline void
4568 add_AT_low_high_pc (dw_die_ref die, const char *lbl_low, const char *lbl_high,
4569 bool force_direct)
4570 {
4571 dw_attr_node attr;
4572 char * lbl_id;
4573
4574 lbl_id = xstrdup (lbl_low);
4575 attr.dw_attr = DW_AT_low_pc;
4576 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4577 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4578 if (dwarf_split_debug_info && !force_direct)
4579 attr.dw_attr_val.val_entry
4580 = add_addr_table_entry (lbl_id, ate_kind_label);
4581 else
4582 attr.dw_attr_val.val_entry = NULL;
4583 add_dwarf_attr (die, &attr);
4584
4585 attr.dw_attr = DW_AT_high_pc;
4586 if (dwarf_version < 4)
4587 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4588 else
4589 attr.dw_attr_val.val_class = dw_val_class_high_pc;
4590 lbl_id = xstrdup (lbl_high);
4591 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4592 if (attr.dw_attr_val.val_class == dw_val_class_lbl_id
4593 && dwarf_split_debug_info && !force_direct)
4594 attr.dw_attr_val.val_entry
4595 = add_addr_table_entry (lbl_id, ate_kind_label);
4596 else
4597 attr.dw_attr_val.val_entry = NULL;
4598 add_dwarf_attr (die, &attr);
4599 }
4600
4601 /* Hash and equality functions for debug_str_hash. */
4602
4603 hashval_t
4604 indirect_string_hasher::hash (indirect_string_node *x)
4605 {
4606 return htab_hash_string (x->str);
4607 }
4608
4609 bool
4610 indirect_string_hasher::equal (indirect_string_node *x1, const char *x2)
4611 {
4612 return strcmp (x1->str, x2) == 0;
4613 }
4614
4615 /* Add STR to the given string hash table. */
4616
4617 static struct indirect_string_node *
4618 find_AT_string_in_table (const char *str,
4619 hash_table<indirect_string_hasher> *table)
4620 {
4621 struct indirect_string_node *node;
4622
4623 indirect_string_node **slot
4624 = table->find_slot_with_hash (str, htab_hash_string (str), INSERT);
4625 if (*slot == NULL)
4626 {
4627 node = ggc_cleared_alloc<indirect_string_node> ();
4628 node->str = ggc_strdup (str);
4629 *slot = node;
4630 }
4631 else
4632 node = *slot;
4633
4634 node->refcount++;
4635 return node;
4636 }
4637
4638 /* Add STR to the indirect string hash table. */
4639
4640 static struct indirect_string_node *
4641 find_AT_string (const char *str)
4642 {
4643 if (! debug_str_hash)
4644 debug_str_hash = hash_table<indirect_string_hasher>::create_ggc (10);
4645
4646 return find_AT_string_in_table (str, debug_str_hash);
4647 }
4648
4649 /* Add a string attribute value to a DIE. */
4650
4651 static inline void
4652 add_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind, const char *str)
4653 {
4654 dw_attr_node attr;
4655 struct indirect_string_node *node;
4656
4657 node = find_AT_string (str);
4658
4659 attr.dw_attr = attr_kind;
4660 attr.dw_attr_val.val_class = dw_val_class_str;
4661 attr.dw_attr_val.val_entry = NULL;
4662 attr.dw_attr_val.v.val_str = node;
4663 add_dwarf_attr (die, &attr);
4664 }
4665
4666 static inline const char *
4667 AT_string (dw_attr_node *a)
4668 {
4669 gcc_assert (a && AT_class (a) == dw_val_class_str);
4670 return a->dw_attr_val.v.val_str->str;
4671 }
4672
4673 /* Call this function directly to bypass AT_string_form's logic to put
4674 the string inline in the die. */
4675
4676 static void
4677 set_indirect_string (struct indirect_string_node *node)
4678 {
4679 char label[MAX_ARTIFICIAL_LABEL_BYTES];
4680 /* Already indirect is a no op. */
4681 if (node->form == DW_FORM_strp
4682 || node->form == DW_FORM_line_strp
4683 || node->form == dwarf_FORM (DW_FORM_strx))
4684 {
4685 gcc_assert (node->label);
4686 return;
4687 }
4688 ASM_GENERATE_INTERNAL_LABEL (label, "LASF", dw2_string_counter);
4689 ++dw2_string_counter;
4690 node->label = xstrdup (label);
4691
4692 if (!dwarf_split_debug_info)
4693 {
4694 node->form = DW_FORM_strp;
4695 node->index = NOT_INDEXED;
4696 }
4697 else
4698 {
4699 node->form = dwarf_FORM (DW_FORM_strx);
4700 node->index = NO_INDEX_ASSIGNED;
4701 }
4702 }
4703
4704 /* A helper function for dwarf2out_finish, called to reset indirect
4705 string decisions done for early LTO dwarf output before fat object
4706 dwarf output. */
4707
4708 int
4709 reset_indirect_string (indirect_string_node **h, void *)
4710 {
4711 struct indirect_string_node *node = *h;
4712 if (node->form == DW_FORM_strp || node->form == dwarf_FORM (DW_FORM_strx))
4713 {
4714 free (node->label);
4715 node->label = NULL;
4716 node->form = (dwarf_form) 0;
4717 node->index = 0;
4718 }
4719 return 1;
4720 }
4721
4722 /* Find out whether a string should be output inline in DIE
4723 or out-of-line in .debug_str section. */
4724
4725 static enum dwarf_form
4726 find_string_form (struct indirect_string_node *node)
4727 {
4728 unsigned int len;
4729
4730 if (node->form)
4731 return node->form;
4732
4733 len = strlen (node->str) + 1;
4734
4735 /* If the string is shorter or equal to the size of the reference, it is
4736 always better to put it inline. */
4737 if (len <= DWARF_OFFSET_SIZE || node->refcount == 0)
4738 return node->form = DW_FORM_string;
4739
4740 /* If we cannot expect the linker to merge strings in .debug_str
4741 section, only put it into .debug_str if it is worth even in this
4742 single module. */
4743 if (DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
4744 || ((debug_str_section->common.flags & SECTION_MERGE) == 0
4745 && (len - DWARF_OFFSET_SIZE) * node->refcount <= len))
4746 return node->form = DW_FORM_string;
4747
4748 set_indirect_string (node);
4749
4750 return node->form;
4751 }
4752
4753 /* Find out whether the string referenced from the attribute should be
4754 output inline in DIE or out-of-line in .debug_str section. */
4755
4756 static enum dwarf_form
4757 AT_string_form (dw_attr_node *a)
4758 {
4759 gcc_assert (a && AT_class (a) == dw_val_class_str);
4760 return find_string_form (a->dw_attr_val.v.val_str);
4761 }
4762
4763 /* Add a DIE reference attribute value to a DIE. */
4764
4765 static inline void
4766 add_AT_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind, dw_die_ref targ_die)
4767 {
4768 dw_attr_node attr;
4769 gcc_checking_assert (targ_die != NULL);
4770
4771 /* With LTO we can end up trying to reference something we didn't create
4772 a DIE for. Avoid crashing later on a NULL referenced DIE. */
4773 if (targ_die == NULL)
4774 return;
4775
4776 attr.dw_attr = attr_kind;
4777 attr.dw_attr_val.val_class = dw_val_class_die_ref;
4778 attr.dw_attr_val.val_entry = NULL;
4779 attr.dw_attr_val.v.val_die_ref.die = targ_die;
4780 attr.dw_attr_val.v.val_die_ref.external = 0;
4781 add_dwarf_attr (die, &attr);
4782 }
4783
4784 /* Change DIE reference REF to point to NEW_DIE instead. */
4785
4786 static inline void
4787 change_AT_die_ref (dw_attr_node *ref, dw_die_ref new_die)
4788 {
4789 gcc_assert (ref->dw_attr_val.val_class == dw_val_class_die_ref);
4790 ref->dw_attr_val.v.val_die_ref.die = new_die;
4791 ref->dw_attr_val.v.val_die_ref.external = 0;
4792 }
4793
4794 /* Add an AT_specification attribute to a DIE, and also make the back
4795 pointer from the specification to the definition. */
4796
4797 static inline void
4798 add_AT_specification (dw_die_ref die, dw_die_ref targ_die)
4799 {
4800 add_AT_die_ref (die, DW_AT_specification, targ_die);
4801 gcc_assert (!targ_die->die_definition);
4802 targ_die->die_definition = die;
4803 }
4804
4805 static inline dw_die_ref
4806 AT_ref (dw_attr_node *a)
4807 {
4808 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4809 return a->dw_attr_val.v.val_die_ref.die;
4810 }
4811
4812 static inline int
4813 AT_ref_external (dw_attr_node *a)
4814 {
4815 if (a && AT_class (a) == dw_val_class_die_ref)
4816 return a->dw_attr_val.v.val_die_ref.external;
4817
4818 return 0;
4819 }
4820
4821 static inline void
4822 set_AT_ref_external (dw_attr_node *a, int i)
4823 {
4824 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4825 a->dw_attr_val.v.val_die_ref.external = i;
4826 }
4827
4828 /* Add a location description attribute value to a DIE. */
4829
4830 static inline void
4831 add_AT_loc (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_descr_ref loc)
4832 {
4833 dw_attr_node attr;
4834
4835 attr.dw_attr = attr_kind;
4836 attr.dw_attr_val.val_class = dw_val_class_loc;
4837 attr.dw_attr_val.val_entry = NULL;
4838 attr.dw_attr_val.v.val_loc = loc;
4839 add_dwarf_attr (die, &attr);
4840 }
4841
4842 static inline dw_loc_descr_ref
4843 AT_loc (dw_attr_node *a)
4844 {
4845 gcc_assert (a && AT_class (a) == dw_val_class_loc);
4846 return a->dw_attr_val.v.val_loc;
4847 }
4848
4849 static inline void
4850 add_AT_loc_list (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_list_ref loc_list)
4851 {
4852 dw_attr_node attr;
4853
4854 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4855 return;
4856
4857 attr.dw_attr = attr_kind;
4858 attr.dw_attr_val.val_class = dw_val_class_loc_list;
4859 attr.dw_attr_val.val_entry = NULL;
4860 attr.dw_attr_val.v.val_loc_list = loc_list;
4861 add_dwarf_attr (die, &attr);
4862 have_location_lists = true;
4863 }
4864
4865 static inline dw_loc_list_ref
4866 AT_loc_list (dw_attr_node *a)
4867 {
4868 gcc_assert (a && AT_class (a) == dw_val_class_loc_list);
4869 return a->dw_attr_val.v.val_loc_list;
4870 }
4871
4872 /* Add a view list attribute to DIE. It must have a DW_AT_location
4873 attribute, because the view list complements the location list. */
4874
4875 static inline void
4876 add_AT_view_list (dw_die_ref die, enum dwarf_attribute attr_kind)
4877 {
4878 dw_attr_node attr;
4879
4880 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4881 return;
4882
4883 attr.dw_attr = attr_kind;
4884 attr.dw_attr_val.val_class = dw_val_class_view_list;
4885 attr.dw_attr_val.val_entry = NULL;
4886 attr.dw_attr_val.v.val_view_list = die;
4887 add_dwarf_attr (die, &attr);
4888 gcc_checking_assert (get_AT (die, DW_AT_location));
4889 gcc_assert (have_location_lists);
4890 }
4891
4892 /* Return a pointer to the location list referenced by the attribute.
4893 If the named attribute is a view list, look up the corresponding
4894 DW_AT_location attribute and return its location list. */
4895
4896 static inline dw_loc_list_ref *
4897 AT_loc_list_ptr (dw_attr_node *a)
4898 {
4899 gcc_assert (a);
4900 switch (AT_class (a))
4901 {
4902 case dw_val_class_loc_list:
4903 return &a->dw_attr_val.v.val_loc_list;
4904 case dw_val_class_view_list:
4905 {
4906 dw_attr_node *l;
4907 l = get_AT (a->dw_attr_val.v.val_view_list, DW_AT_location);
4908 if (!l)
4909 return NULL;
4910 gcc_checking_assert (l + 1 == a);
4911 return AT_loc_list_ptr (l);
4912 }
4913 default:
4914 gcc_unreachable ();
4915 }
4916 }
4917
4918 /* Return the location attribute value associated with a view list
4919 attribute value. */
4920
4921 static inline dw_val_node *
4922 view_list_to_loc_list_val_node (dw_val_node *val)
4923 {
4924 gcc_assert (val->val_class == dw_val_class_view_list);
4925 dw_attr_node *loc = get_AT (val->v.val_view_list, DW_AT_location);
4926 if (!loc)
4927 return NULL;
4928 gcc_checking_assert (&(loc + 1)->dw_attr_val == val);
4929 gcc_assert (AT_class (loc) == dw_val_class_loc_list);
4930 return &loc->dw_attr_val;
4931 }
4932
4933 struct addr_hasher : ggc_ptr_hash<addr_table_entry>
4934 {
4935 static hashval_t hash (addr_table_entry *);
4936 static bool equal (addr_table_entry *, addr_table_entry *);
4937 };
4938
4939 /* Table of entries into the .debug_addr section. */
4940
4941 static GTY (()) hash_table<addr_hasher> *addr_index_table;
4942
4943 /* Hash an address_table_entry. */
4944
4945 hashval_t
4946 addr_hasher::hash (addr_table_entry *a)
4947 {
4948 inchash::hash hstate;
4949 switch (a->kind)
4950 {
4951 case ate_kind_rtx:
4952 hstate.add_int (0);
4953 break;
4954 case ate_kind_rtx_dtprel:
4955 hstate.add_int (1);
4956 break;
4957 case ate_kind_label:
4958 return htab_hash_string (a->addr.label);
4959 default:
4960 gcc_unreachable ();
4961 }
4962 inchash::add_rtx (a->addr.rtl, hstate);
4963 return hstate.end ();
4964 }
4965
4966 /* Determine equality for two address_table_entries. */
4967
4968 bool
4969 addr_hasher::equal (addr_table_entry *a1, addr_table_entry *a2)
4970 {
4971 if (a1->kind != a2->kind)
4972 return 0;
4973 switch (a1->kind)
4974 {
4975 case ate_kind_rtx:
4976 case ate_kind_rtx_dtprel:
4977 return rtx_equal_p (a1->addr.rtl, a2->addr.rtl);
4978 case ate_kind_label:
4979 return strcmp (a1->addr.label, a2->addr.label) == 0;
4980 default:
4981 gcc_unreachable ();
4982 }
4983 }
4984
4985 /* Initialize an addr_table_entry. */
4986
4987 void
4988 init_addr_table_entry (addr_table_entry *e, enum ate_kind kind, void *addr)
4989 {
4990 e->kind = kind;
4991 switch (kind)
4992 {
4993 case ate_kind_rtx:
4994 case ate_kind_rtx_dtprel:
4995 e->addr.rtl = (rtx) addr;
4996 break;
4997 case ate_kind_label:
4998 e->addr.label = (char *) addr;
4999 break;
5000 }
5001 e->refcount = 0;
5002 e->index = NO_INDEX_ASSIGNED;
5003 }
5004
5005 /* Add attr to the address table entry to the table. Defer setting an
5006 index until output time. */
5007
5008 static addr_table_entry *
5009 add_addr_table_entry (void *addr, enum ate_kind kind)
5010 {
5011 addr_table_entry *node;
5012 addr_table_entry finder;
5013
5014 gcc_assert (dwarf_split_debug_info);
5015 if (! addr_index_table)
5016 addr_index_table = hash_table<addr_hasher>::create_ggc (10);
5017 init_addr_table_entry (&finder, kind, addr);
5018 addr_table_entry **slot = addr_index_table->find_slot (&finder, INSERT);
5019
5020 if (*slot == HTAB_EMPTY_ENTRY)
5021 {
5022 node = ggc_cleared_alloc<addr_table_entry> ();
5023 init_addr_table_entry (node, kind, addr);
5024 *slot = node;
5025 }
5026 else
5027 node = *slot;
5028
5029 node->refcount++;
5030 return node;
5031 }
5032
5033 /* Remove an entry from the addr table by decrementing its refcount.
5034 Strictly, decrementing the refcount would be enough, but the
5035 assertion that the entry is actually in the table has found
5036 bugs. */
5037
5038 static void
5039 remove_addr_table_entry (addr_table_entry *entry)
5040 {
5041 gcc_assert (dwarf_split_debug_info && addr_index_table);
5042 /* After an index is assigned, the table is frozen. */
5043 gcc_assert (entry->refcount > 0 && entry->index == NO_INDEX_ASSIGNED);
5044 entry->refcount--;
5045 }
5046
5047 /* Given a location list, remove all addresses it refers to from the
5048 address_table. */
5049
5050 static void
5051 remove_loc_list_addr_table_entries (dw_loc_descr_ref descr)
5052 {
5053 for (; descr; descr = descr->dw_loc_next)
5054 if (descr->dw_loc_oprnd1.val_entry != NULL)
5055 {
5056 gcc_assert (descr->dw_loc_oprnd1.val_entry->index == NO_INDEX_ASSIGNED);
5057 remove_addr_table_entry (descr->dw_loc_oprnd1.val_entry);
5058 }
5059 }
5060
5061 /* A helper function for dwarf2out_finish called through
5062 htab_traverse. Assign an addr_table_entry its index. All entries
5063 must be collected into the table when this function is called,
5064 because the indexing code relies on htab_traverse to traverse nodes
5065 in the same order for each run. */
5066
5067 int
5068 index_addr_table_entry (addr_table_entry **h, unsigned int *index)
5069 {
5070 addr_table_entry *node = *h;
5071
5072 /* Don't index unreferenced nodes. */
5073 if (node->refcount == 0)
5074 return 1;
5075
5076 gcc_assert (node->index == NO_INDEX_ASSIGNED);
5077 node->index = *index;
5078 *index += 1;
5079
5080 return 1;
5081 }
5082
5083 /* Add an address constant attribute value to a DIE. When using
5084 dwarf_split_debug_info, address attributes in dies destined for the
5085 final executable should be direct references--setting the parameter
5086 force_direct ensures this behavior. */
5087
5088 static inline void
5089 add_AT_addr (dw_die_ref die, enum dwarf_attribute attr_kind, rtx addr,
5090 bool force_direct)
5091 {
5092 dw_attr_node attr;
5093
5094 attr.dw_attr = attr_kind;
5095 attr.dw_attr_val.val_class = dw_val_class_addr;
5096 attr.dw_attr_val.v.val_addr = addr;
5097 if (dwarf_split_debug_info && !force_direct)
5098 attr.dw_attr_val.val_entry = add_addr_table_entry (addr, ate_kind_rtx);
5099 else
5100 attr.dw_attr_val.val_entry = NULL;
5101 add_dwarf_attr (die, &attr);
5102 }
5103
5104 /* Get the RTX from to an address DIE attribute. */
5105
5106 static inline rtx
5107 AT_addr (dw_attr_node *a)
5108 {
5109 gcc_assert (a && AT_class (a) == dw_val_class_addr);
5110 return a->dw_attr_val.v.val_addr;
5111 }
5112
5113 /* Add a file attribute value to a DIE. */
5114
5115 static inline void
5116 add_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind,
5117 struct dwarf_file_data *fd)
5118 {
5119 dw_attr_node attr;
5120
5121 attr.dw_attr = attr_kind;
5122 attr.dw_attr_val.val_class = dw_val_class_file;
5123 attr.dw_attr_val.val_entry = NULL;
5124 attr.dw_attr_val.v.val_file = fd;
5125 add_dwarf_attr (die, &attr);
5126 }
5127
5128 /* Get the dwarf_file_data from a file DIE attribute. */
5129
5130 static inline struct dwarf_file_data *
5131 AT_file (dw_attr_node *a)
5132 {
5133 gcc_assert (a && (AT_class (a) == dw_val_class_file
5134 || AT_class (a) == dw_val_class_file_implicit));
5135 return a->dw_attr_val.v.val_file;
5136 }
5137
5138 /* Add a symbolic view identifier attribute value to a DIE. */
5139
5140 static inline void
5141 add_AT_symview (dw_die_ref die, enum dwarf_attribute attr_kind,
5142 const char *view_label)
5143 {
5144 dw_attr_node attr;
5145
5146 attr.dw_attr = attr_kind;
5147 attr.dw_attr_val.val_class = dw_val_class_symview;
5148 attr.dw_attr_val.val_entry = NULL;
5149 attr.dw_attr_val.v.val_symbolic_view = xstrdup (view_label);
5150 add_dwarf_attr (die, &attr);
5151 }
5152
5153 /* Add a label identifier attribute value to a DIE. */
5154
5155 static inline void
5156 add_AT_lbl_id (dw_die_ref die, enum dwarf_attribute attr_kind,
5157 const char *lbl_id)
5158 {
5159 dw_attr_node attr;
5160
5161 attr.dw_attr = attr_kind;
5162 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
5163 attr.dw_attr_val.val_entry = NULL;
5164 attr.dw_attr_val.v.val_lbl_id = xstrdup (lbl_id);
5165 if (dwarf_split_debug_info)
5166 attr.dw_attr_val.val_entry
5167 = add_addr_table_entry (attr.dw_attr_val.v.val_lbl_id,
5168 ate_kind_label);
5169 add_dwarf_attr (die, &attr);
5170 }
5171
5172 /* Add a section offset attribute value to a DIE, an offset into the
5173 debug_line section. */
5174
5175 static inline void
5176 add_AT_lineptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5177 const char *label)
5178 {
5179 dw_attr_node attr;
5180
5181 attr.dw_attr = attr_kind;
5182 attr.dw_attr_val.val_class = dw_val_class_lineptr;
5183 attr.dw_attr_val.val_entry = NULL;
5184 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5185 add_dwarf_attr (die, &attr);
5186 }
5187
5188 /* Add a section offset attribute value to a DIE, an offset into the
5189 debug_macinfo section. */
5190
5191 static inline void
5192 add_AT_macptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5193 const char *label)
5194 {
5195 dw_attr_node attr;
5196
5197 attr.dw_attr = attr_kind;
5198 attr.dw_attr_val.val_class = dw_val_class_macptr;
5199 attr.dw_attr_val.val_entry = NULL;
5200 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5201 add_dwarf_attr (die, &attr);
5202 }
5203
5204 /* Add a range_list attribute value to a DIE. When using
5205 dwarf_split_debug_info, address attributes in dies destined for the
5206 final executable should be direct references--setting the parameter
5207 force_direct ensures this behavior. */
5208
5209 #define UNRELOCATED_OFFSET ((addr_table_entry *) 1)
5210 #define RELOCATED_OFFSET (NULL)
5211
5212 static void
5213 add_AT_range_list (dw_die_ref die, enum dwarf_attribute attr_kind,
5214 long unsigned int offset, bool force_direct)
5215 {
5216 dw_attr_node attr;
5217
5218 attr.dw_attr = attr_kind;
5219 attr.dw_attr_val.val_class = dw_val_class_range_list;
5220 /* For the range_list attribute, use val_entry to store whether the
5221 offset should follow split-debug-info or normal semantics. This
5222 value is read in output_range_list_offset. */
5223 if (dwarf_split_debug_info && !force_direct)
5224 attr.dw_attr_val.val_entry = UNRELOCATED_OFFSET;
5225 else
5226 attr.dw_attr_val.val_entry = RELOCATED_OFFSET;
5227 attr.dw_attr_val.v.val_offset = offset;
5228 add_dwarf_attr (die, &attr);
5229 }
5230
5231 /* Return the start label of a delta attribute. */
5232
5233 static inline const char *
5234 AT_vms_delta1 (dw_attr_node *a)
5235 {
5236 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5237 return a->dw_attr_val.v.val_vms_delta.lbl1;
5238 }
5239
5240 /* Return the end label of a delta attribute. */
5241
5242 static inline const char *
5243 AT_vms_delta2 (dw_attr_node *a)
5244 {
5245 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5246 return a->dw_attr_val.v.val_vms_delta.lbl2;
5247 }
5248
5249 static inline const char *
5250 AT_lbl (dw_attr_node *a)
5251 {
5252 gcc_assert (a && (AT_class (a) == dw_val_class_lbl_id
5253 || AT_class (a) == dw_val_class_lineptr
5254 || AT_class (a) == dw_val_class_macptr
5255 || AT_class (a) == dw_val_class_loclistsptr
5256 || AT_class (a) == dw_val_class_high_pc));
5257 return a->dw_attr_val.v.val_lbl_id;
5258 }
5259
5260 /* Get the attribute of type attr_kind. */
5261
5262 static dw_attr_node *
5263 get_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5264 {
5265 dw_attr_node *a;
5266 unsigned ix;
5267 dw_die_ref spec = NULL;
5268
5269 if (! die)
5270 return NULL;
5271
5272 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5273 if (a->dw_attr == attr_kind)
5274 return a;
5275 else if (a->dw_attr == DW_AT_specification
5276 || a->dw_attr == DW_AT_abstract_origin)
5277 spec = AT_ref (a);
5278
5279 if (spec)
5280 return get_AT (spec, attr_kind);
5281
5282 return NULL;
5283 }
5284
5285 /* Returns the parent of the declaration of DIE. */
5286
5287 static dw_die_ref
5288 get_die_parent (dw_die_ref die)
5289 {
5290 dw_die_ref t;
5291
5292 if (!die)
5293 return NULL;
5294
5295 if ((t = get_AT_ref (die, DW_AT_abstract_origin))
5296 || (t = get_AT_ref (die, DW_AT_specification)))
5297 die = t;
5298
5299 return die->die_parent;
5300 }
5301
5302 /* Return the "low pc" attribute value, typically associated with a subprogram
5303 DIE. Return null if the "low pc" attribute is either not present, or if it
5304 cannot be represented as an assembler label identifier. */
5305
5306 static inline const char *
5307 get_AT_low_pc (dw_die_ref die)
5308 {
5309 dw_attr_node *a = get_AT (die, DW_AT_low_pc);
5310
5311 return a ? AT_lbl (a) : NULL;
5312 }
5313
5314 /* Return the value of the string attribute designated by ATTR_KIND, or
5315 NULL if it is not present. */
5316
5317 static inline const char *
5318 get_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind)
5319 {
5320 dw_attr_node *a = get_AT (die, attr_kind);
5321
5322 return a ? AT_string (a) : NULL;
5323 }
5324
5325 /* Return the value of the flag attribute designated by ATTR_KIND, or -1
5326 if it is not present. */
5327
5328 static inline int
5329 get_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind)
5330 {
5331 dw_attr_node *a = get_AT (die, attr_kind);
5332
5333 return a ? AT_flag (a) : 0;
5334 }
5335
5336 /* Return the value of the unsigned attribute designated by ATTR_KIND, or 0
5337 if it is not present. */
5338
5339 static inline unsigned
5340 get_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind)
5341 {
5342 dw_attr_node *a = get_AT (die, attr_kind);
5343
5344 return a ? AT_unsigned (a) : 0;
5345 }
5346
5347 static inline dw_die_ref
5348 get_AT_ref (dw_die_ref die, enum dwarf_attribute attr_kind)
5349 {
5350 dw_attr_node *a = get_AT (die, attr_kind);
5351
5352 return a ? AT_ref (a) : NULL;
5353 }
5354
5355 static inline struct dwarf_file_data *
5356 get_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind)
5357 {
5358 dw_attr_node *a = get_AT (die, attr_kind);
5359
5360 return a ? AT_file (a) : NULL;
5361 }
5362
5363 /* Return TRUE if the language is C. */
5364
5365 static inline bool
5366 is_c (void)
5367 {
5368 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5369
5370 return (lang == DW_LANG_C || lang == DW_LANG_C89 || lang == DW_LANG_C99
5371 || lang == DW_LANG_C11 || lang == DW_LANG_ObjC);
5372
5373
5374 }
5375
5376 /* Return TRUE if the language is C++. */
5377
5378 static inline bool
5379 is_cxx (void)
5380 {
5381 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5382
5383 return (lang == DW_LANG_C_plus_plus || lang == DW_LANG_ObjC_plus_plus
5384 || lang == DW_LANG_C_plus_plus_11 || lang == DW_LANG_C_plus_plus_14);
5385 }
5386
5387 /* Return TRUE if DECL was created by the C++ frontend. */
5388
5389 static bool
5390 is_cxx (const_tree decl)
5391 {
5392 if (in_lto_p)
5393 {
5394 const_tree context = get_ultimate_context (decl);
5395 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5396 return strncmp (TRANSLATION_UNIT_LANGUAGE (context), "GNU C++", 7) == 0;
5397 }
5398 return is_cxx ();
5399 }
5400
5401 /* Return TRUE if the language is Fortran. */
5402
5403 static inline bool
5404 is_fortran (void)
5405 {
5406 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5407
5408 return (lang == DW_LANG_Fortran77
5409 || lang == DW_LANG_Fortran90
5410 || lang == DW_LANG_Fortran95
5411 || lang == DW_LANG_Fortran03
5412 || lang == DW_LANG_Fortran08);
5413 }
5414
5415 static inline bool
5416 is_fortran (const_tree decl)
5417 {
5418 if (in_lto_p)
5419 {
5420 const_tree context = get_ultimate_context (decl);
5421 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5422 return (strncmp (TRANSLATION_UNIT_LANGUAGE (context),
5423 "GNU Fortran", 11) == 0
5424 || strcmp (TRANSLATION_UNIT_LANGUAGE (context),
5425 "GNU F77") == 0);
5426 }
5427 return is_fortran ();
5428 }
5429
5430 /* Return TRUE if the language is Ada. */
5431
5432 static inline bool
5433 is_ada (void)
5434 {
5435 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5436
5437 return lang == DW_LANG_Ada95 || lang == DW_LANG_Ada83;
5438 }
5439
5440 /* Remove the specified attribute if present. Return TRUE if removal
5441 was successful. */
5442
5443 static bool
5444 remove_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5445 {
5446 dw_attr_node *a;
5447 unsigned ix;
5448
5449 if (! die)
5450 return false;
5451
5452 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5453 if (a->dw_attr == attr_kind)
5454 {
5455 if (AT_class (a) == dw_val_class_str)
5456 if (a->dw_attr_val.v.val_str->refcount)
5457 a->dw_attr_val.v.val_str->refcount--;
5458
5459 /* vec::ordered_remove should help reduce the number of abbrevs
5460 that are needed. */
5461 die->die_attr->ordered_remove (ix);
5462 return true;
5463 }
5464 return false;
5465 }
5466
5467 /* Remove CHILD from its parent. PREV must have the property that
5468 PREV->DIE_SIB == CHILD. Does not alter CHILD. */
5469
5470 static void
5471 remove_child_with_prev (dw_die_ref child, dw_die_ref prev)
5472 {
5473 gcc_assert (child->die_parent == prev->die_parent);
5474 gcc_assert (prev->die_sib == child);
5475 if (prev == child)
5476 {
5477 gcc_assert (child->die_parent->die_child == child);
5478 prev = NULL;
5479 }
5480 else
5481 prev->die_sib = child->die_sib;
5482 if (child->die_parent->die_child == child)
5483 child->die_parent->die_child = prev;
5484 child->die_sib = NULL;
5485 }
5486
5487 /* Replace OLD_CHILD with NEW_CHILD. PREV must have the property that
5488 PREV->DIE_SIB == OLD_CHILD. Does not alter OLD_CHILD. */
5489
5490 static void
5491 replace_child (dw_die_ref old_child, dw_die_ref new_child, dw_die_ref prev)
5492 {
5493 dw_die_ref parent = old_child->die_parent;
5494
5495 gcc_assert (parent == prev->die_parent);
5496 gcc_assert (prev->die_sib == old_child);
5497
5498 new_child->die_parent = parent;
5499 if (prev == old_child)
5500 {
5501 gcc_assert (parent->die_child == old_child);
5502 new_child->die_sib = new_child;
5503 }
5504 else
5505 {
5506 prev->die_sib = new_child;
5507 new_child->die_sib = old_child->die_sib;
5508 }
5509 if (old_child->die_parent->die_child == old_child)
5510 old_child->die_parent->die_child = new_child;
5511 old_child->die_sib = NULL;
5512 }
5513
5514 /* Move all children from OLD_PARENT to NEW_PARENT. */
5515
5516 static void
5517 move_all_children (dw_die_ref old_parent, dw_die_ref new_parent)
5518 {
5519 dw_die_ref c;
5520 new_parent->die_child = old_parent->die_child;
5521 old_parent->die_child = NULL;
5522 FOR_EACH_CHILD (new_parent, c, c->die_parent = new_parent);
5523 }
5524
5525 /* Remove child DIE whose die_tag is TAG. Do nothing if no child
5526 matches TAG. */
5527
5528 static void
5529 remove_child_TAG (dw_die_ref die, enum dwarf_tag tag)
5530 {
5531 dw_die_ref c;
5532
5533 c = die->die_child;
5534 if (c) do {
5535 dw_die_ref prev = c;
5536 c = c->die_sib;
5537 while (c->die_tag == tag)
5538 {
5539 remove_child_with_prev (c, prev);
5540 c->die_parent = NULL;
5541 /* Might have removed every child. */
5542 if (die->die_child == NULL)
5543 return;
5544 c = prev->die_sib;
5545 }
5546 } while (c != die->die_child);
5547 }
5548
5549 /* Add a CHILD_DIE as the last child of DIE. */
5550
5551 static void
5552 add_child_die (dw_die_ref die, dw_die_ref child_die)
5553 {
5554 /* FIXME this should probably be an assert. */
5555 if (! die || ! child_die)
5556 return;
5557 gcc_assert (die != child_die);
5558
5559 child_die->die_parent = die;
5560 if (die->die_child)
5561 {
5562 child_die->die_sib = die->die_child->die_sib;
5563 die->die_child->die_sib = child_die;
5564 }
5565 else
5566 child_die->die_sib = child_die;
5567 die->die_child = child_die;
5568 }
5569
5570 /* Like add_child_die, but put CHILD_DIE after AFTER_DIE. */
5571
5572 static void
5573 add_child_die_after (dw_die_ref die, dw_die_ref child_die,
5574 dw_die_ref after_die)
5575 {
5576 gcc_assert (die
5577 && child_die
5578 && after_die
5579 && die->die_child
5580 && die != child_die);
5581
5582 child_die->die_parent = die;
5583 child_die->die_sib = after_die->die_sib;
5584 after_die->die_sib = child_die;
5585 if (die->die_child == after_die)
5586 die->die_child = child_die;
5587 }
5588
5589 /* Unassociate CHILD from its parent, and make its parent be
5590 NEW_PARENT. */
5591
5592 static void
5593 reparent_child (dw_die_ref child, dw_die_ref new_parent)
5594 {
5595 for (dw_die_ref p = child->die_parent->die_child; ; p = p->die_sib)
5596 if (p->die_sib == child)
5597 {
5598 remove_child_with_prev (child, p);
5599 break;
5600 }
5601 add_child_die (new_parent, child);
5602 }
5603
5604 /* Move CHILD, which must be a child of PARENT or the DIE for which PARENT
5605 is the specification, to the end of PARENT's list of children.
5606 This is done by removing and re-adding it. */
5607
5608 static void
5609 splice_child_die (dw_die_ref parent, dw_die_ref child)
5610 {
5611 /* We want the declaration DIE from inside the class, not the
5612 specification DIE at toplevel. */
5613 if (child->die_parent != parent)
5614 {
5615 dw_die_ref tmp = get_AT_ref (child, DW_AT_specification);
5616
5617 if (tmp)
5618 child = tmp;
5619 }
5620
5621 gcc_assert (child->die_parent == parent
5622 || (child->die_parent
5623 == get_AT_ref (parent, DW_AT_specification)));
5624
5625 reparent_child (child, parent);
5626 }
5627
5628 /* Create and return a new die with TAG_VALUE as tag. */
5629
5630 static inline dw_die_ref
5631 new_die_raw (enum dwarf_tag tag_value)
5632 {
5633 dw_die_ref die = ggc_cleared_alloc<die_node> ();
5634 die->die_tag = tag_value;
5635 return die;
5636 }
5637
5638 /* Create and return a new die with a parent of PARENT_DIE. If
5639 PARENT_DIE is NULL, the new DIE is placed in limbo and an
5640 associated tree T must be supplied to determine parenthood
5641 later. */
5642
5643 static inline dw_die_ref
5644 new_die (enum dwarf_tag tag_value, dw_die_ref parent_die, tree t)
5645 {
5646 dw_die_ref die = new_die_raw (tag_value);
5647
5648 if (parent_die != NULL)
5649 add_child_die (parent_die, die);
5650 else
5651 {
5652 limbo_die_node *limbo_node;
5653
5654 /* No DIEs created after early dwarf should end up in limbo,
5655 because the limbo list should not persist past LTO
5656 streaming. */
5657 if (tag_value != DW_TAG_compile_unit
5658 /* These are allowed because they're generated while
5659 breaking out COMDAT units late. */
5660 && tag_value != DW_TAG_type_unit
5661 && tag_value != DW_TAG_skeleton_unit
5662 && !early_dwarf
5663 /* Allow nested functions to live in limbo because they will
5664 only temporarily live there, as decls_for_scope will fix
5665 them up. */
5666 && (TREE_CODE (t) != FUNCTION_DECL
5667 || !decl_function_context (t))
5668 /* Same as nested functions above but for types. Types that
5669 are local to a function will be fixed in
5670 decls_for_scope. */
5671 && (!RECORD_OR_UNION_TYPE_P (t)
5672 || !TYPE_CONTEXT (t)
5673 || TREE_CODE (TYPE_CONTEXT (t)) != FUNCTION_DECL)
5674 /* FIXME debug-early: Allow late limbo DIE creation for LTO,
5675 especially in the ltrans stage, but once we implement LTO
5676 dwarf streaming, we should remove this exception. */
5677 && !in_lto_p)
5678 {
5679 fprintf (stderr, "symbol ended up in limbo too late:");
5680 debug_generic_stmt (t);
5681 gcc_unreachable ();
5682 }
5683
5684 limbo_node = ggc_cleared_alloc<limbo_die_node> ();
5685 limbo_node->die = die;
5686 limbo_node->created_for = t;
5687 limbo_node->next = limbo_die_list;
5688 limbo_die_list = limbo_node;
5689 }
5690
5691 return die;
5692 }
5693
5694 /* Return the DIE associated with the given type specifier. */
5695
5696 static inline dw_die_ref
5697 lookup_type_die (tree type)
5698 {
5699 dw_die_ref die = TYPE_SYMTAB_DIE (type);
5700 if (die && die->removed)
5701 {
5702 TYPE_SYMTAB_DIE (type) = NULL;
5703 return NULL;
5704 }
5705 return die;
5706 }
5707
5708 /* Given a TYPE_DIE representing the type TYPE, if TYPE is an
5709 anonymous type named by the typedef TYPE_DIE, return the DIE of the
5710 anonymous type instead the one of the naming typedef. */
5711
5712 static inline dw_die_ref
5713 strip_naming_typedef (tree type, dw_die_ref type_die)
5714 {
5715 if (type
5716 && TREE_CODE (type) == RECORD_TYPE
5717 && type_die
5718 && type_die->die_tag == DW_TAG_typedef
5719 && is_naming_typedef_decl (TYPE_NAME (type)))
5720 type_die = get_AT_ref (type_die, DW_AT_type);
5721 return type_die;
5722 }
5723
5724 /* Like lookup_type_die, but if type is an anonymous type named by a
5725 typedef[1], return the DIE of the anonymous type instead the one of
5726 the naming typedef. This is because in gen_typedef_die, we did
5727 equate the anonymous struct named by the typedef with the DIE of
5728 the naming typedef. So by default, lookup_type_die on an anonymous
5729 struct yields the DIE of the naming typedef.
5730
5731 [1]: Read the comment of is_naming_typedef_decl to learn about what
5732 a naming typedef is. */
5733
5734 static inline dw_die_ref
5735 lookup_type_die_strip_naming_typedef (tree type)
5736 {
5737 dw_die_ref die = lookup_type_die (type);
5738 return strip_naming_typedef (type, die);
5739 }
5740
5741 /* Equate a DIE to a given type specifier. */
5742
5743 static inline void
5744 equate_type_number_to_die (tree type, dw_die_ref type_die)
5745 {
5746 TYPE_SYMTAB_DIE (type) = type_die;
5747 }
5748
5749 static dw_die_ref maybe_create_die_with_external_ref (tree);
5750 struct GTY(()) sym_off_pair
5751 {
5752 const char * GTY((skip)) sym;
5753 unsigned HOST_WIDE_INT off;
5754 };
5755 static GTY(()) hash_map<tree, sym_off_pair> *external_die_map;
5756
5757 /* Returns a hash value for X (which really is a die_struct). */
5758
5759 inline hashval_t
5760 decl_die_hasher::hash (die_node *x)
5761 {
5762 return (hashval_t) x->decl_id;
5763 }
5764
5765 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
5766
5767 inline bool
5768 decl_die_hasher::equal (die_node *x, tree y)
5769 {
5770 return (x->decl_id == DECL_UID (y));
5771 }
5772
5773 /* Return the DIE associated with a given declaration. */
5774
5775 static inline dw_die_ref
5776 lookup_decl_die (tree decl)
5777 {
5778 dw_die_ref *die = decl_die_table->find_slot_with_hash (decl, DECL_UID (decl),
5779 NO_INSERT);
5780 if (!die)
5781 {
5782 if (in_lto_p)
5783 return maybe_create_die_with_external_ref (decl);
5784 return NULL;
5785 }
5786 if ((*die)->removed)
5787 {
5788 decl_die_table->clear_slot (die);
5789 return NULL;
5790 }
5791 return *die;
5792 }
5793
5794
5795 /* Return the DIE associated with BLOCK. */
5796
5797 static inline dw_die_ref
5798 lookup_block_die (tree block)
5799 {
5800 dw_die_ref die = BLOCK_DIE (block);
5801 if (!die && in_lto_p)
5802 return maybe_create_die_with_external_ref (block);
5803 return die;
5804 }
5805
5806 /* Associate DIE with BLOCK. */
5807
5808 static inline void
5809 equate_block_to_die (tree block, dw_die_ref die)
5810 {
5811 BLOCK_DIE (block) = die;
5812 }
5813 #undef BLOCK_DIE
5814
5815
5816 /* For DECL which might have early dwarf output query a SYMBOL + OFFSET
5817 style reference. Return true if we found one refering to a DIE for
5818 DECL, otherwise return false. */
5819
5820 static bool
5821 dwarf2out_die_ref_for_decl (tree decl, const char **sym,
5822 unsigned HOST_WIDE_INT *off)
5823 {
5824 dw_die_ref die;
5825
5826 if (in_lto_p)
5827 {
5828 /* During WPA stage and incremental linking we use a hash-map
5829 to store the decl <-> label + offset map. */
5830 if (!external_die_map)
5831 return false;
5832 sym_off_pair *desc = external_die_map->get (decl);
5833 if (!desc)
5834 return false;
5835 *sym = desc->sym;
5836 *off = desc->off;
5837 return true;
5838 }
5839
5840 if (TREE_CODE (decl) == BLOCK)
5841 die = lookup_block_die (decl);
5842 else
5843 die = lookup_decl_die (decl);
5844 if (!die)
5845 return false;
5846
5847 /* Similar to get_ref_die_offset_label, but using the "correct"
5848 label. */
5849 *off = die->die_offset;
5850 while (die->die_parent)
5851 die = die->die_parent;
5852 /* For the containing CU DIE we compute a die_symbol in
5853 compute_comp_unit_symbol. */
5854 gcc_assert (die->die_tag == DW_TAG_compile_unit
5855 && die->die_id.die_symbol != NULL);
5856 *sym = die->die_id.die_symbol;
5857 return true;
5858 }
5859
5860 /* Add a reference of kind ATTR_KIND to a DIE at SYMBOL + OFFSET to DIE. */
5861
5862 static void
5863 add_AT_external_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind,
5864 const char *symbol, HOST_WIDE_INT offset)
5865 {
5866 /* Create a fake DIE that contains the reference. Don't use
5867 new_die because we don't want to end up in the limbo list. */
5868 /* ??? We probably want to share these, thus put a ref to the DIE
5869 we create here to the external_die_map entry. */
5870 dw_die_ref ref = new_die_raw (die->die_tag);
5871 ref->die_id.die_symbol = symbol;
5872 ref->die_offset = offset;
5873 ref->with_offset = 1;
5874 add_AT_die_ref (die, attr_kind, ref);
5875 }
5876
5877 /* Create a DIE for DECL if required and add a reference to a DIE
5878 at SYMBOL + OFFSET which contains attributes dumped early. */
5879
5880 static void
5881 dwarf2out_register_external_die (tree decl, const char *sym,
5882 unsigned HOST_WIDE_INT off)
5883 {
5884 if (debug_info_level == DINFO_LEVEL_NONE)
5885 return;
5886
5887 if (!external_die_map)
5888 external_die_map = hash_map<tree, sym_off_pair>::create_ggc (1000);
5889 gcc_checking_assert (!external_die_map->get (decl));
5890 sym_off_pair p = { IDENTIFIER_POINTER (get_identifier (sym)), off };
5891 external_die_map->put (decl, p);
5892 }
5893
5894 /* If we have a registered external DIE for DECL return a new DIE for
5895 the concrete instance with an appropriate abstract origin. */
5896
5897 static dw_die_ref
5898 maybe_create_die_with_external_ref (tree decl)
5899 {
5900 if (!external_die_map)
5901 return NULL;
5902 sym_off_pair *desc = external_die_map->get (decl);
5903 if (!desc)
5904 return NULL;
5905
5906 const char *sym = desc->sym;
5907 unsigned HOST_WIDE_INT off = desc->off;
5908
5909 in_lto_p = false;
5910 dw_die_ref die = (TREE_CODE (decl) == BLOCK
5911 ? lookup_block_die (decl) : lookup_decl_die (decl));
5912 gcc_assert (!die);
5913 in_lto_p = true;
5914
5915 tree ctx;
5916 dw_die_ref parent = NULL;
5917 /* Need to lookup a DIE for the decls context - the containing
5918 function or translation unit. */
5919 if (TREE_CODE (decl) == BLOCK)
5920 {
5921 ctx = BLOCK_SUPERCONTEXT (decl);
5922 /* ??? We do not output DIEs for all scopes thus skip as
5923 many DIEs as needed. */
5924 while (TREE_CODE (ctx) == BLOCK
5925 && !lookup_block_die (ctx))
5926 ctx = BLOCK_SUPERCONTEXT (ctx);
5927 }
5928 else
5929 ctx = DECL_CONTEXT (decl);
5930 /* Peel types in the context stack. */
5931 while (ctx && TYPE_P (ctx))
5932 ctx = TYPE_CONTEXT (ctx);
5933 /* Likewise namespaces in case we do not want to emit DIEs for them. */
5934 if (debug_info_level <= DINFO_LEVEL_TERSE)
5935 while (ctx && TREE_CODE (ctx) == NAMESPACE_DECL)
5936 ctx = DECL_CONTEXT (ctx);
5937 if (ctx)
5938 {
5939 if (TREE_CODE (ctx) == BLOCK)
5940 parent = lookup_block_die (ctx);
5941 else if (TREE_CODE (ctx) == TRANSLATION_UNIT_DECL
5942 /* Keep the 1:1 association during WPA. */
5943 && !flag_wpa
5944 && flag_incremental_link != INCREMENTAL_LINK_LTO)
5945 /* Otherwise all late annotations go to the main CU which
5946 imports the original CUs. */
5947 parent = comp_unit_die ();
5948 else if (TREE_CODE (ctx) == FUNCTION_DECL
5949 && TREE_CODE (decl) != FUNCTION_DECL
5950 && TREE_CODE (decl) != PARM_DECL
5951 && TREE_CODE (decl) != RESULT_DECL
5952 && TREE_CODE (decl) != BLOCK)
5953 /* Leave function local entities parent determination to when
5954 we process scope vars. */
5955 ;
5956 else
5957 parent = lookup_decl_die (ctx);
5958 }
5959 else
5960 /* In some cases the FEs fail to set DECL_CONTEXT properly.
5961 Handle this case gracefully by globalizing stuff. */
5962 parent = comp_unit_die ();
5963 /* Create a DIE "stub". */
5964 switch (TREE_CODE (decl))
5965 {
5966 case TRANSLATION_UNIT_DECL:
5967 {
5968 die = comp_unit_die ();
5969 /* We re-target all CU decls to the LTRANS CU DIE, so no need
5970 to create a DIE for the original CUs. */
5971 return die;
5972 }
5973 case NAMESPACE_DECL:
5974 if (is_fortran (decl))
5975 die = new_die (DW_TAG_module, parent, decl);
5976 else
5977 die = new_die (DW_TAG_namespace, parent, decl);
5978 break;
5979 case FUNCTION_DECL:
5980 die = new_die (DW_TAG_subprogram, parent, decl);
5981 break;
5982 case VAR_DECL:
5983 die = new_die (DW_TAG_variable, parent, decl);
5984 break;
5985 case RESULT_DECL:
5986 die = new_die (DW_TAG_variable, parent, decl);
5987 break;
5988 case PARM_DECL:
5989 die = new_die (DW_TAG_formal_parameter, parent, decl);
5990 break;
5991 case CONST_DECL:
5992 die = new_die (DW_TAG_constant, parent, decl);
5993 break;
5994 case LABEL_DECL:
5995 die = new_die (DW_TAG_label, parent, decl);
5996 break;
5997 case BLOCK:
5998 die = new_die (DW_TAG_lexical_block, parent, decl);
5999 break;
6000 default:
6001 gcc_unreachable ();
6002 }
6003 if (TREE_CODE (decl) == BLOCK)
6004 equate_block_to_die (decl, die);
6005 else
6006 equate_decl_number_to_die (decl, die);
6007
6008 add_desc_attribute (die, decl);
6009
6010 /* Add a reference to the DIE providing early debug at $sym + off. */
6011 add_AT_external_die_ref (die, DW_AT_abstract_origin, sym, off);
6012
6013 return die;
6014 }
6015
6016 /* Returns a hash value for X (which really is a var_loc_list). */
6017
6018 inline hashval_t
6019 decl_loc_hasher::hash (var_loc_list *x)
6020 {
6021 return (hashval_t) x->decl_id;
6022 }
6023
6024 /* Return nonzero if decl_id of var_loc_list X is the same as
6025 UID of decl *Y. */
6026
6027 inline bool
6028 decl_loc_hasher::equal (var_loc_list *x, const_tree y)
6029 {
6030 return (x->decl_id == DECL_UID (y));
6031 }
6032
6033 /* Return the var_loc list associated with a given declaration. */
6034
6035 static inline var_loc_list *
6036 lookup_decl_loc (const_tree decl)
6037 {
6038 if (!decl_loc_table)
6039 return NULL;
6040 return decl_loc_table->find_with_hash (decl, DECL_UID (decl));
6041 }
6042
6043 /* Returns a hash value for X (which really is a cached_dw_loc_list_list). */
6044
6045 inline hashval_t
6046 dw_loc_list_hasher::hash (cached_dw_loc_list *x)
6047 {
6048 return (hashval_t) x->decl_id;
6049 }
6050
6051 /* Return nonzero if decl_id of cached_dw_loc_list X is the same as
6052 UID of decl *Y. */
6053
6054 inline bool
6055 dw_loc_list_hasher::equal (cached_dw_loc_list *x, const_tree y)
6056 {
6057 return (x->decl_id == DECL_UID (y));
6058 }
6059
6060 /* Equate a DIE to a particular declaration. */
6061
6062 static void
6063 equate_decl_number_to_die (tree decl, dw_die_ref decl_die)
6064 {
6065 unsigned int decl_id = DECL_UID (decl);
6066
6067 *decl_die_table->find_slot_with_hash (decl, decl_id, INSERT) = decl_die;
6068 decl_die->decl_id = decl_id;
6069 }
6070
6071 /* Return how many bits covers PIECE EXPR_LIST. */
6072
6073 static HOST_WIDE_INT
6074 decl_piece_bitsize (rtx piece)
6075 {
6076 int ret = (int) GET_MODE (piece);
6077 if (ret)
6078 return ret;
6079 gcc_assert (GET_CODE (XEXP (piece, 0)) == CONCAT
6080 && CONST_INT_P (XEXP (XEXP (piece, 0), 0)));
6081 return INTVAL (XEXP (XEXP (piece, 0), 0));
6082 }
6083
6084 /* Return pointer to the location of location note in PIECE EXPR_LIST. */
6085
6086 static rtx *
6087 decl_piece_varloc_ptr (rtx piece)
6088 {
6089 if ((int) GET_MODE (piece))
6090 return &XEXP (piece, 0);
6091 else
6092 return &XEXP (XEXP (piece, 0), 1);
6093 }
6094
6095 /* Create an EXPR_LIST for location note LOC_NOTE covering BITSIZE bits.
6096 Next is the chain of following piece nodes. */
6097
6098 static rtx_expr_list *
6099 decl_piece_node (rtx loc_note, HOST_WIDE_INT bitsize, rtx next)
6100 {
6101 if (bitsize > 0 && bitsize <= (int) MAX_MACHINE_MODE)
6102 return alloc_EXPR_LIST (bitsize, loc_note, next);
6103 else
6104 return alloc_EXPR_LIST (0, gen_rtx_CONCAT (VOIDmode,
6105 GEN_INT (bitsize),
6106 loc_note), next);
6107 }
6108
6109 /* Return rtx that should be stored into loc field for
6110 LOC_NOTE and BITPOS/BITSIZE. */
6111
6112 static rtx
6113 construct_piece_list (rtx loc_note, HOST_WIDE_INT bitpos,
6114 HOST_WIDE_INT bitsize)
6115 {
6116 if (bitsize != -1)
6117 {
6118 loc_note = decl_piece_node (loc_note, bitsize, NULL_RTX);
6119 if (bitpos != 0)
6120 loc_note = decl_piece_node (NULL_RTX, bitpos, loc_note);
6121 }
6122 return loc_note;
6123 }
6124
6125 /* This function either modifies location piece list *DEST in
6126 place (if SRC and INNER is NULL), or copies location piece list
6127 *SRC to *DEST while modifying it. Location BITPOS is modified
6128 to contain LOC_NOTE, any pieces overlapping it are removed resp.
6129 not copied and if needed some padding around it is added.
6130 When modifying in place, DEST should point to EXPR_LIST where
6131 earlier pieces cover PIECE_BITPOS bits, when copying SRC points
6132 to the start of the whole list and INNER points to the EXPR_LIST
6133 where earlier pieces cover PIECE_BITPOS bits. */
6134
6135 static void
6136 adjust_piece_list (rtx *dest, rtx *src, rtx *inner,
6137 HOST_WIDE_INT bitpos, HOST_WIDE_INT piece_bitpos,
6138 HOST_WIDE_INT bitsize, rtx loc_note)
6139 {
6140 HOST_WIDE_INT diff;
6141 bool copy = inner != NULL;
6142
6143 if (copy)
6144 {
6145 /* First copy all nodes preceding the current bitpos. */
6146 while (src != inner)
6147 {
6148 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6149 decl_piece_bitsize (*src), NULL_RTX);
6150 dest = &XEXP (*dest, 1);
6151 src = &XEXP (*src, 1);
6152 }
6153 }
6154 /* Add padding if needed. */
6155 if (bitpos != piece_bitpos)
6156 {
6157 *dest = decl_piece_node (NULL_RTX, bitpos - piece_bitpos,
6158 copy ? NULL_RTX : *dest);
6159 dest = &XEXP (*dest, 1);
6160 }
6161 else if (*dest && decl_piece_bitsize (*dest) == bitsize)
6162 {
6163 gcc_assert (!copy);
6164 /* A piece with correct bitpos and bitsize already exist,
6165 just update the location for it and return. */
6166 *decl_piece_varloc_ptr (*dest) = loc_note;
6167 return;
6168 }
6169 /* Add the piece that changed. */
6170 *dest = decl_piece_node (loc_note, bitsize, copy ? NULL_RTX : *dest);
6171 dest = &XEXP (*dest, 1);
6172 /* Skip over pieces that overlap it. */
6173 diff = bitpos - piece_bitpos + bitsize;
6174 if (!copy)
6175 src = dest;
6176 while (diff > 0 && *src)
6177 {
6178 rtx piece = *src;
6179 diff -= decl_piece_bitsize (piece);
6180 if (copy)
6181 src = &XEXP (piece, 1);
6182 else
6183 {
6184 *src = XEXP (piece, 1);
6185 free_EXPR_LIST_node (piece);
6186 }
6187 }
6188 /* Add padding if needed. */
6189 if (diff < 0 && *src)
6190 {
6191 if (!copy)
6192 dest = src;
6193 *dest = decl_piece_node (NULL_RTX, -diff, copy ? NULL_RTX : *dest);
6194 dest = &XEXP (*dest, 1);
6195 }
6196 if (!copy)
6197 return;
6198 /* Finally copy all nodes following it. */
6199 while (*src)
6200 {
6201 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6202 decl_piece_bitsize (*src), NULL_RTX);
6203 dest = &XEXP (*dest, 1);
6204 src = &XEXP (*src, 1);
6205 }
6206 }
6207
6208 /* Add a variable location node to the linked list for DECL. */
6209
6210 static struct var_loc_node *
6211 add_var_loc_to_decl (tree decl, rtx loc_note, const char *label, var_loc_view view)
6212 {
6213 unsigned int decl_id;
6214 var_loc_list *temp;
6215 struct var_loc_node *loc = NULL;
6216 HOST_WIDE_INT bitsize = -1, bitpos = -1;
6217
6218 if (VAR_P (decl) && DECL_HAS_DEBUG_EXPR_P (decl))
6219 {
6220 tree realdecl = DECL_DEBUG_EXPR (decl);
6221 if (handled_component_p (realdecl)
6222 || (TREE_CODE (realdecl) == MEM_REF
6223 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
6224 {
6225 bool reverse;
6226 tree innerdecl = get_ref_base_and_extent_hwi (realdecl, &bitpos,
6227 &bitsize, &reverse);
6228 if (!innerdecl
6229 || !DECL_P (innerdecl)
6230 || DECL_IGNORED_P (innerdecl)
6231 || TREE_STATIC (innerdecl)
6232 || bitsize == 0
6233 || bitpos + bitsize > 256)
6234 return NULL;
6235 decl = innerdecl;
6236 }
6237 }
6238
6239 decl_id = DECL_UID (decl);
6240 var_loc_list **slot
6241 = decl_loc_table->find_slot_with_hash (decl, decl_id, INSERT);
6242 if (*slot == NULL)
6243 {
6244 temp = ggc_cleared_alloc<var_loc_list> ();
6245 temp->decl_id = decl_id;
6246 *slot = temp;
6247 }
6248 else
6249 temp = *slot;
6250
6251 /* For PARM_DECLs try to keep around the original incoming value,
6252 even if that means we'll emit a zero-range .debug_loc entry. */
6253 if (temp->last
6254 && temp->first == temp->last
6255 && TREE_CODE (decl) == PARM_DECL
6256 && NOTE_P (temp->first->loc)
6257 && NOTE_VAR_LOCATION_DECL (temp->first->loc) == decl
6258 && DECL_INCOMING_RTL (decl)
6259 && NOTE_VAR_LOCATION_LOC (temp->first->loc)
6260 && GET_CODE (NOTE_VAR_LOCATION_LOC (temp->first->loc))
6261 == GET_CODE (DECL_INCOMING_RTL (decl))
6262 && prev_real_insn (as_a<rtx_insn *> (temp->first->loc)) == NULL_RTX
6263 && (bitsize != -1
6264 || !rtx_equal_p (NOTE_VAR_LOCATION_LOC (temp->first->loc),
6265 NOTE_VAR_LOCATION_LOC (loc_note))
6266 || (NOTE_VAR_LOCATION_STATUS (temp->first->loc)
6267 != NOTE_VAR_LOCATION_STATUS (loc_note))))
6268 {
6269 loc = ggc_cleared_alloc<var_loc_node> ();
6270 temp->first->next = loc;
6271 temp->last = loc;
6272 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6273 }
6274 else if (temp->last)
6275 {
6276 struct var_loc_node *last = temp->last, *unused = NULL;
6277 rtx *piece_loc = NULL, last_loc_note;
6278 HOST_WIDE_INT piece_bitpos = 0;
6279 if (last->next)
6280 {
6281 last = last->next;
6282 gcc_assert (last->next == NULL);
6283 }
6284 if (bitsize != -1 && GET_CODE (last->loc) == EXPR_LIST)
6285 {
6286 piece_loc = &last->loc;
6287 do
6288 {
6289 HOST_WIDE_INT cur_bitsize = decl_piece_bitsize (*piece_loc);
6290 if (piece_bitpos + cur_bitsize > bitpos)
6291 break;
6292 piece_bitpos += cur_bitsize;
6293 piece_loc = &XEXP (*piece_loc, 1);
6294 }
6295 while (*piece_loc);
6296 }
6297 /* TEMP->LAST here is either pointer to the last but one or
6298 last element in the chained list, LAST is pointer to the
6299 last element. */
6300 if (label && strcmp (last->label, label) == 0 && last->view == view)
6301 {
6302 /* For SRA optimized variables if there weren't any real
6303 insns since last note, just modify the last node. */
6304 if (piece_loc != NULL)
6305 {
6306 adjust_piece_list (piece_loc, NULL, NULL,
6307 bitpos, piece_bitpos, bitsize, loc_note);
6308 return NULL;
6309 }
6310 /* If the last note doesn't cover any instructions, remove it. */
6311 if (temp->last != last)
6312 {
6313 temp->last->next = NULL;
6314 unused = last;
6315 last = temp->last;
6316 gcc_assert (strcmp (last->label, label) != 0 || last->view != view);
6317 }
6318 else
6319 {
6320 gcc_assert (temp->first == temp->last
6321 || (temp->first->next == temp->last
6322 && TREE_CODE (decl) == PARM_DECL));
6323 memset (temp->last, '\0', sizeof (*temp->last));
6324 temp->last->loc = construct_piece_list (loc_note, bitpos, bitsize);
6325 return temp->last;
6326 }
6327 }
6328 if (bitsize == -1 && NOTE_P (last->loc))
6329 last_loc_note = last->loc;
6330 else if (piece_loc != NULL
6331 && *piece_loc != NULL_RTX
6332 && piece_bitpos == bitpos
6333 && decl_piece_bitsize (*piece_loc) == bitsize)
6334 last_loc_note = *decl_piece_varloc_ptr (*piece_loc);
6335 else
6336 last_loc_note = NULL_RTX;
6337 /* If the current location is the same as the end of the list,
6338 and either both or neither of the locations is uninitialized,
6339 we have nothing to do. */
6340 if (last_loc_note == NULL_RTX
6341 || (!rtx_equal_p (NOTE_VAR_LOCATION_LOC (last_loc_note),
6342 NOTE_VAR_LOCATION_LOC (loc_note)))
6343 || ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6344 != NOTE_VAR_LOCATION_STATUS (loc_note))
6345 && ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6346 == VAR_INIT_STATUS_UNINITIALIZED)
6347 || (NOTE_VAR_LOCATION_STATUS (loc_note)
6348 == VAR_INIT_STATUS_UNINITIALIZED))))
6349 {
6350 /* Add LOC to the end of list and update LAST. If the last
6351 element of the list has been removed above, reuse its
6352 memory for the new node, otherwise allocate a new one. */
6353 if (unused)
6354 {
6355 loc = unused;
6356 memset (loc, '\0', sizeof (*loc));
6357 }
6358 else
6359 loc = ggc_cleared_alloc<var_loc_node> ();
6360 if (bitsize == -1 || piece_loc == NULL)
6361 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6362 else
6363 adjust_piece_list (&loc->loc, &last->loc, piece_loc,
6364 bitpos, piece_bitpos, bitsize, loc_note);
6365 last->next = loc;
6366 /* Ensure TEMP->LAST will point either to the new last but one
6367 element of the chain, or to the last element in it. */
6368 if (last != temp->last)
6369 temp->last = last;
6370 }
6371 else if (unused)
6372 ggc_free (unused);
6373 }
6374 else
6375 {
6376 loc = ggc_cleared_alloc<var_loc_node> ();
6377 temp->first = loc;
6378 temp->last = loc;
6379 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6380 }
6381 return loc;
6382 }
6383 \f
6384 /* Keep track of the number of spaces used to indent the
6385 output of the debugging routines that print the structure of
6386 the DIE internal representation. */
6387 static int print_indent;
6388
6389 /* Indent the line the number of spaces given by print_indent. */
6390
6391 static inline void
6392 print_spaces (FILE *outfile)
6393 {
6394 fprintf (outfile, "%*s", print_indent, "");
6395 }
6396
6397 /* Print a type signature in hex. */
6398
6399 static inline void
6400 print_signature (FILE *outfile, char *sig)
6401 {
6402 int i;
6403
6404 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
6405 fprintf (outfile, "%02x", sig[i] & 0xff);
6406 }
6407
6408 static inline void
6409 print_discr_value (FILE *outfile, dw_discr_value *discr_value)
6410 {
6411 if (discr_value->pos)
6412 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, discr_value->v.sval);
6413 else
6414 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, discr_value->v.uval);
6415 }
6416
6417 static void print_loc_descr (dw_loc_descr_ref, FILE *);
6418
6419 /* Print the value associated to the VAL DWARF value node to OUTFILE. If
6420 RECURSE, output location descriptor operations. */
6421
6422 static void
6423 print_dw_val (dw_val_node *val, bool recurse, FILE *outfile)
6424 {
6425 switch (val->val_class)
6426 {
6427 case dw_val_class_addr:
6428 fprintf (outfile, "address");
6429 break;
6430 case dw_val_class_offset:
6431 fprintf (outfile, "offset");
6432 break;
6433 case dw_val_class_loc:
6434 fprintf (outfile, "location descriptor");
6435 if (val->v.val_loc == NULL)
6436 fprintf (outfile, " -> <null>\n");
6437 else if (recurse)
6438 {
6439 fprintf (outfile, ":\n");
6440 print_indent += 4;
6441 print_loc_descr (val->v.val_loc, outfile);
6442 print_indent -= 4;
6443 }
6444 else
6445 {
6446 if (flag_dump_noaddr || flag_dump_unnumbered)
6447 fprintf (outfile, " #\n");
6448 else
6449 fprintf (outfile, " (%p)\n", (void *) val->v.val_loc);
6450 }
6451 break;
6452 case dw_val_class_loc_list:
6453 fprintf (outfile, "location list -> label:%s",
6454 val->v.val_loc_list->ll_symbol);
6455 break;
6456 case dw_val_class_view_list:
6457 val = view_list_to_loc_list_val_node (val);
6458 fprintf (outfile, "location list with views -> labels:%s and %s",
6459 val->v.val_loc_list->ll_symbol,
6460 val->v.val_loc_list->vl_symbol);
6461 break;
6462 case dw_val_class_range_list:
6463 fprintf (outfile, "range list");
6464 break;
6465 case dw_val_class_const:
6466 case dw_val_class_const_implicit:
6467 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, val->v.val_int);
6468 break;
6469 case dw_val_class_unsigned_const:
6470 case dw_val_class_unsigned_const_implicit:
6471 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, val->v.val_unsigned);
6472 break;
6473 case dw_val_class_const_double:
6474 fprintf (outfile, "constant (" HOST_WIDE_INT_PRINT_DEC","\
6475 HOST_WIDE_INT_PRINT_UNSIGNED")",
6476 val->v.val_double.high,
6477 val->v.val_double.low);
6478 break;
6479 case dw_val_class_wide_int:
6480 {
6481 int i = val->v.val_wide->get_len ();
6482 fprintf (outfile, "constant (");
6483 gcc_assert (i > 0);
6484 if (val->v.val_wide->elt (i - 1) == 0)
6485 fprintf (outfile, "0x");
6486 fprintf (outfile, HOST_WIDE_INT_PRINT_HEX,
6487 val->v.val_wide->elt (--i));
6488 while (--i >= 0)
6489 fprintf (outfile, HOST_WIDE_INT_PRINT_PADDED_HEX,
6490 val->v.val_wide->elt (i));
6491 fprintf (outfile, ")");
6492 break;
6493 }
6494 case dw_val_class_vec:
6495 fprintf (outfile, "floating-point or vector constant");
6496 break;
6497 case dw_val_class_flag:
6498 fprintf (outfile, "%u", val->v.val_flag);
6499 break;
6500 case dw_val_class_die_ref:
6501 if (val->v.val_die_ref.die != NULL)
6502 {
6503 dw_die_ref die = val->v.val_die_ref.die;
6504
6505 if (die->comdat_type_p)
6506 {
6507 fprintf (outfile, "die -> signature: ");
6508 print_signature (outfile,
6509 die->die_id.die_type_node->signature);
6510 }
6511 else if (die->die_id.die_symbol)
6512 {
6513 fprintf (outfile, "die -> label: %s", die->die_id.die_symbol);
6514 if (die->with_offset)
6515 fprintf (outfile, " + %ld", die->die_offset);
6516 }
6517 else
6518 fprintf (outfile, "die -> %ld", die->die_offset);
6519 if (flag_dump_noaddr || flag_dump_unnumbered)
6520 fprintf (outfile, " #");
6521 else
6522 fprintf (outfile, " (%p)", (void *) die);
6523 }
6524 else
6525 fprintf (outfile, "die -> <null>");
6526 break;
6527 case dw_val_class_vms_delta:
6528 fprintf (outfile, "delta: @slotcount(%s-%s)",
6529 val->v.val_vms_delta.lbl2, val->v.val_vms_delta.lbl1);
6530 break;
6531 case dw_val_class_symview:
6532 fprintf (outfile, "view: %s", val->v.val_symbolic_view);
6533 break;
6534 case dw_val_class_lbl_id:
6535 case dw_val_class_lineptr:
6536 case dw_val_class_macptr:
6537 case dw_val_class_loclistsptr:
6538 case dw_val_class_high_pc:
6539 fprintf (outfile, "label: %s", val->v.val_lbl_id);
6540 break;
6541 case dw_val_class_str:
6542 if (val->v.val_str->str != NULL)
6543 fprintf (outfile, "\"%s\"", val->v.val_str->str);
6544 else
6545 fprintf (outfile, "<null>");
6546 break;
6547 case dw_val_class_file:
6548 case dw_val_class_file_implicit:
6549 fprintf (outfile, "\"%s\" (%d)", val->v.val_file->filename,
6550 val->v.val_file->emitted_number);
6551 break;
6552 case dw_val_class_data8:
6553 {
6554 int i;
6555
6556 for (i = 0; i < 8; i++)
6557 fprintf (outfile, "%02x", val->v.val_data8[i]);
6558 break;
6559 }
6560 case dw_val_class_discr_value:
6561 print_discr_value (outfile, &val->v.val_discr_value);
6562 break;
6563 case dw_val_class_discr_list:
6564 for (dw_discr_list_ref node = val->v.val_discr_list;
6565 node != NULL;
6566 node = node->dw_discr_next)
6567 {
6568 if (node->dw_discr_range)
6569 {
6570 fprintf (outfile, " .. ");
6571 print_discr_value (outfile, &node->dw_discr_lower_bound);
6572 print_discr_value (outfile, &node->dw_discr_upper_bound);
6573 }
6574 else
6575 print_discr_value (outfile, &node->dw_discr_lower_bound);
6576
6577 if (node->dw_discr_next != NULL)
6578 fprintf (outfile, " | ");
6579 }
6580 default:
6581 break;
6582 }
6583 }
6584
6585 /* Likewise, for a DIE attribute. */
6586
6587 static void
6588 print_attribute (dw_attr_node *a, bool recurse, FILE *outfile)
6589 {
6590 print_dw_val (&a->dw_attr_val, recurse, outfile);
6591 }
6592
6593
6594 /* Print the list of operands in the LOC location description to OUTFILE. This
6595 routine is a debugging aid only. */
6596
6597 static void
6598 print_loc_descr (dw_loc_descr_ref loc, FILE *outfile)
6599 {
6600 dw_loc_descr_ref l = loc;
6601
6602 if (loc == NULL)
6603 {
6604 print_spaces (outfile);
6605 fprintf (outfile, "<null>\n");
6606 return;
6607 }
6608
6609 for (l = loc; l != NULL; l = l->dw_loc_next)
6610 {
6611 print_spaces (outfile);
6612 if (flag_dump_noaddr || flag_dump_unnumbered)
6613 fprintf (outfile, "#");
6614 else
6615 fprintf (outfile, "(%p)", (void *) l);
6616 fprintf (outfile, " %s",
6617 dwarf_stack_op_name (l->dw_loc_opc));
6618 if (l->dw_loc_oprnd1.val_class != dw_val_class_none)
6619 {
6620 fprintf (outfile, " ");
6621 print_dw_val (&l->dw_loc_oprnd1, false, outfile);
6622 }
6623 if (l->dw_loc_oprnd2.val_class != dw_val_class_none)
6624 {
6625 fprintf (outfile, ", ");
6626 print_dw_val (&l->dw_loc_oprnd2, false, outfile);
6627 }
6628 fprintf (outfile, "\n");
6629 }
6630 }
6631
6632 /* Print the information associated with a given DIE, and its children.
6633 This routine is a debugging aid only. */
6634
6635 static void
6636 print_die (dw_die_ref die, FILE *outfile)
6637 {
6638 dw_attr_node *a;
6639 dw_die_ref c;
6640 unsigned ix;
6641
6642 print_spaces (outfile);
6643 fprintf (outfile, "DIE %4ld: %s ",
6644 die->die_offset, dwarf_tag_name (die->die_tag));
6645 if (flag_dump_noaddr || flag_dump_unnumbered)
6646 fprintf (outfile, "#\n");
6647 else
6648 fprintf (outfile, "(%p)\n", (void*) die);
6649 print_spaces (outfile);
6650 fprintf (outfile, " abbrev id: %lu", die->die_abbrev);
6651 fprintf (outfile, " offset: %ld", die->die_offset);
6652 fprintf (outfile, " mark: %d\n", die->die_mark);
6653
6654 if (die->comdat_type_p)
6655 {
6656 print_spaces (outfile);
6657 fprintf (outfile, " signature: ");
6658 print_signature (outfile, die->die_id.die_type_node->signature);
6659 fprintf (outfile, "\n");
6660 }
6661
6662 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6663 {
6664 print_spaces (outfile);
6665 fprintf (outfile, " %s: ", dwarf_attr_name (a->dw_attr));
6666
6667 print_attribute (a, true, outfile);
6668 fprintf (outfile, "\n");
6669 }
6670
6671 if (die->die_child != NULL)
6672 {
6673 print_indent += 4;
6674 FOR_EACH_CHILD (die, c, print_die (c, outfile));
6675 print_indent -= 4;
6676 }
6677 if (print_indent == 0)
6678 fprintf (outfile, "\n");
6679 }
6680
6681 /* Print the list of operations in the LOC location description. */
6682
6683 DEBUG_FUNCTION void
6684 debug_dwarf_loc_descr (dw_loc_descr_ref loc)
6685 {
6686 print_loc_descr (loc, stderr);
6687 }
6688
6689 /* Print the information collected for a given DIE. */
6690
6691 DEBUG_FUNCTION void
6692 debug_dwarf_die (dw_die_ref die)
6693 {
6694 print_die (die, stderr);
6695 }
6696
6697 DEBUG_FUNCTION void
6698 debug (die_struct &ref)
6699 {
6700 print_die (&ref, stderr);
6701 }
6702
6703 DEBUG_FUNCTION void
6704 debug (die_struct *ptr)
6705 {
6706 if (ptr)
6707 debug (*ptr);
6708 else
6709 fprintf (stderr, "<nil>\n");
6710 }
6711
6712
6713 /* Print all DWARF information collected for the compilation unit.
6714 This routine is a debugging aid only. */
6715
6716 DEBUG_FUNCTION void
6717 debug_dwarf (void)
6718 {
6719 print_indent = 0;
6720 print_die (comp_unit_die (), stderr);
6721 }
6722
6723 /* Verify the DIE tree structure. */
6724
6725 DEBUG_FUNCTION void
6726 verify_die (dw_die_ref die)
6727 {
6728 gcc_assert (!die->die_mark);
6729 if (die->die_parent == NULL
6730 && die->die_sib == NULL)
6731 return;
6732 /* Verify the die_sib list is cyclic. */
6733 dw_die_ref x = die;
6734 do
6735 {
6736 x->die_mark = 1;
6737 x = x->die_sib;
6738 }
6739 while (x && !x->die_mark);
6740 gcc_assert (x == die);
6741 x = die;
6742 do
6743 {
6744 /* Verify all dies have the same parent. */
6745 gcc_assert (x->die_parent == die->die_parent);
6746 if (x->die_child)
6747 {
6748 /* Verify the child has the proper parent and recurse. */
6749 gcc_assert (x->die_child->die_parent == x);
6750 verify_die (x->die_child);
6751 }
6752 x->die_mark = 0;
6753 x = x->die_sib;
6754 }
6755 while (x && x->die_mark);
6756 }
6757
6758 /* Sanity checks on DIEs. */
6759
6760 static void
6761 check_die (dw_die_ref die)
6762 {
6763 unsigned ix;
6764 dw_attr_node *a;
6765 bool inline_found = false;
6766 int n_location = 0, n_low_pc = 0, n_high_pc = 0, n_artificial = 0;
6767 int n_decl_line = 0, n_decl_column = 0, n_decl_file = 0;
6768 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6769 {
6770 switch (a->dw_attr)
6771 {
6772 case DW_AT_inline:
6773 if (a->dw_attr_val.v.val_unsigned)
6774 inline_found = true;
6775 break;
6776 case DW_AT_location:
6777 ++n_location;
6778 break;
6779 case DW_AT_low_pc:
6780 ++n_low_pc;
6781 break;
6782 case DW_AT_high_pc:
6783 ++n_high_pc;
6784 break;
6785 case DW_AT_artificial:
6786 ++n_artificial;
6787 break;
6788 case DW_AT_decl_column:
6789 ++n_decl_column;
6790 break;
6791 case DW_AT_decl_line:
6792 ++n_decl_line;
6793 break;
6794 case DW_AT_decl_file:
6795 ++n_decl_file;
6796 break;
6797 default:
6798 break;
6799 }
6800 }
6801 if (n_location > 1 || n_low_pc > 1 || n_high_pc > 1 || n_artificial > 1
6802 || n_decl_column > 1 || n_decl_line > 1 || n_decl_file > 1)
6803 {
6804 fprintf (stderr, "Duplicate attributes in DIE:\n");
6805 debug_dwarf_die (die);
6806 gcc_unreachable ();
6807 }
6808 if (inline_found)
6809 {
6810 /* A debugging information entry that is a member of an abstract
6811 instance tree [that has DW_AT_inline] should not contain any
6812 attributes which describe aspects of the subroutine which vary
6813 between distinct inlined expansions or distinct out-of-line
6814 expansions. */
6815 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6816 gcc_assert (a->dw_attr != DW_AT_low_pc
6817 && a->dw_attr != DW_AT_high_pc
6818 && a->dw_attr != DW_AT_location
6819 && a->dw_attr != DW_AT_frame_base
6820 && a->dw_attr != DW_AT_call_all_calls
6821 && a->dw_attr != DW_AT_GNU_all_call_sites);
6822 }
6823 }
6824 \f
6825 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6826 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6827 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO), ctx)
6828
6829 /* Calculate the checksum of a location expression. */
6830
6831 static inline void
6832 loc_checksum (dw_loc_descr_ref loc, struct md5_ctx *ctx)
6833 {
6834 int tem;
6835 inchash::hash hstate;
6836 hashval_t hash;
6837
6838 tem = (loc->dtprel << 8) | ((unsigned int) loc->dw_loc_opc);
6839 CHECKSUM (tem);
6840 hash_loc_operands (loc, hstate);
6841 hash = hstate.end();
6842 CHECKSUM (hash);
6843 }
6844
6845 /* Calculate the checksum of an attribute. */
6846
6847 static void
6848 attr_checksum (dw_attr_node *at, struct md5_ctx *ctx, int *mark)
6849 {
6850 dw_loc_descr_ref loc;
6851 rtx r;
6852
6853 CHECKSUM (at->dw_attr);
6854
6855 /* We don't care that this was compiled with a different compiler
6856 snapshot; if the output is the same, that's what matters. */
6857 if (at->dw_attr == DW_AT_producer)
6858 return;
6859
6860 switch (AT_class (at))
6861 {
6862 case dw_val_class_const:
6863 case dw_val_class_const_implicit:
6864 CHECKSUM (at->dw_attr_val.v.val_int);
6865 break;
6866 case dw_val_class_unsigned_const:
6867 case dw_val_class_unsigned_const_implicit:
6868 CHECKSUM (at->dw_attr_val.v.val_unsigned);
6869 break;
6870 case dw_val_class_const_double:
6871 CHECKSUM (at->dw_attr_val.v.val_double);
6872 break;
6873 case dw_val_class_wide_int:
6874 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
6875 get_full_len (*at->dw_attr_val.v.val_wide)
6876 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
6877 break;
6878 case dw_val_class_vec:
6879 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
6880 (at->dw_attr_val.v.val_vec.length
6881 * at->dw_attr_val.v.val_vec.elt_size));
6882 break;
6883 case dw_val_class_flag:
6884 CHECKSUM (at->dw_attr_val.v.val_flag);
6885 break;
6886 case dw_val_class_str:
6887 CHECKSUM_STRING (AT_string (at));
6888 break;
6889
6890 case dw_val_class_addr:
6891 r = AT_addr (at);
6892 gcc_assert (GET_CODE (r) == SYMBOL_REF);
6893 CHECKSUM_STRING (XSTR (r, 0));
6894 break;
6895
6896 case dw_val_class_offset:
6897 CHECKSUM (at->dw_attr_val.v.val_offset);
6898 break;
6899
6900 case dw_val_class_loc:
6901 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
6902 loc_checksum (loc, ctx);
6903 break;
6904
6905 case dw_val_class_die_ref:
6906 die_checksum (AT_ref (at), ctx, mark);
6907 break;
6908
6909 case dw_val_class_fde_ref:
6910 case dw_val_class_vms_delta:
6911 case dw_val_class_symview:
6912 case dw_val_class_lbl_id:
6913 case dw_val_class_lineptr:
6914 case dw_val_class_macptr:
6915 case dw_val_class_loclistsptr:
6916 case dw_val_class_high_pc:
6917 break;
6918
6919 case dw_val_class_file:
6920 case dw_val_class_file_implicit:
6921 CHECKSUM_STRING (AT_file (at)->filename);
6922 break;
6923
6924 case dw_val_class_data8:
6925 CHECKSUM (at->dw_attr_val.v.val_data8);
6926 break;
6927
6928 default:
6929 break;
6930 }
6931 }
6932
6933 /* Calculate the checksum of a DIE. */
6934
6935 static void
6936 die_checksum (dw_die_ref die, struct md5_ctx *ctx, int *mark)
6937 {
6938 dw_die_ref c;
6939 dw_attr_node *a;
6940 unsigned ix;
6941
6942 /* To avoid infinite recursion. */
6943 if (die->die_mark)
6944 {
6945 CHECKSUM (die->die_mark);
6946 return;
6947 }
6948 die->die_mark = ++(*mark);
6949
6950 CHECKSUM (die->die_tag);
6951
6952 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6953 attr_checksum (a, ctx, mark);
6954
6955 FOR_EACH_CHILD (die, c, die_checksum (c, ctx, mark));
6956 }
6957
6958 #undef CHECKSUM
6959 #undef CHECKSUM_BLOCK
6960 #undef CHECKSUM_STRING
6961
6962 /* For DWARF-4 types, include the trailing NULL when checksumming strings. */
6963 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6964 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6965 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO) + 1, ctx)
6966 #define CHECKSUM_SLEB128(FOO) checksum_sleb128 ((FOO), ctx)
6967 #define CHECKSUM_ULEB128(FOO) checksum_uleb128 ((FOO), ctx)
6968 #define CHECKSUM_ATTR(FOO) \
6969 if (FOO) attr_checksum_ordered (die->die_tag, (FOO), ctx, mark)
6970
6971 /* Calculate the checksum of a number in signed LEB128 format. */
6972
6973 static void
6974 checksum_sleb128 (HOST_WIDE_INT value, struct md5_ctx *ctx)
6975 {
6976 unsigned char byte;
6977 bool more;
6978
6979 while (1)
6980 {
6981 byte = (value & 0x7f);
6982 value >>= 7;
6983 more = !((value == 0 && (byte & 0x40) == 0)
6984 || (value == -1 && (byte & 0x40) != 0));
6985 if (more)
6986 byte |= 0x80;
6987 CHECKSUM (byte);
6988 if (!more)
6989 break;
6990 }
6991 }
6992
6993 /* Calculate the checksum of a number in unsigned LEB128 format. */
6994
6995 static void
6996 checksum_uleb128 (unsigned HOST_WIDE_INT value, struct md5_ctx *ctx)
6997 {
6998 while (1)
6999 {
7000 unsigned char byte = (value & 0x7f);
7001 value >>= 7;
7002 if (value != 0)
7003 /* More bytes to follow. */
7004 byte |= 0x80;
7005 CHECKSUM (byte);
7006 if (value == 0)
7007 break;
7008 }
7009 }
7010
7011 /* Checksum the context of the DIE. This adds the names of any
7012 surrounding namespaces or structures to the checksum. */
7013
7014 static void
7015 checksum_die_context (dw_die_ref die, struct md5_ctx *ctx)
7016 {
7017 const char *name;
7018 dw_die_ref spec;
7019 int tag = die->die_tag;
7020
7021 if (tag != DW_TAG_namespace
7022 && tag != DW_TAG_structure_type
7023 && tag != DW_TAG_class_type)
7024 return;
7025
7026 name = get_AT_string (die, DW_AT_name);
7027
7028 spec = get_AT_ref (die, DW_AT_specification);
7029 if (spec != NULL)
7030 die = spec;
7031
7032 if (die->die_parent != NULL)
7033 checksum_die_context (die->die_parent, ctx);
7034
7035 CHECKSUM_ULEB128 ('C');
7036 CHECKSUM_ULEB128 (tag);
7037 if (name != NULL)
7038 CHECKSUM_STRING (name);
7039 }
7040
7041 /* Calculate the checksum of a location expression. */
7042
7043 static inline void
7044 loc_checksum_ordered (dw_loc_descr_ref loc, struct md5_ctx *ctx)
7045 {
7046 /* Special case for lone DW_OP_plus_uconst: checksum as if the location
7047 were emitted as a DW_FORM_sdata instead of a location expression. */
7048 if (loc->dw_loc_opc == DW_OP_plus_uconst && loc->dw_loc_next == NULL)
7049 {
7050 CHECKSUM_ULEB128 (DW_FORM_sdata);
7051 CHECKSUM_SLEB128 ((HOST_WIDE_INT) loc->dw_loc_oprnd1.v.val_unsigned);
7052 return;
7053 }
7054
7055 /* Otherwise, just checksum the raw location expression. */
7056 while (loc != NULL)
7057 {
7058 inchash::hash hstate;
7059 hashval_t hash;
7060
7061 CHECKSUM_ULEB128 (loc->dtprel);
7062 CHECKSUM_ULEB128 (loc->dw_loc_opc);
7063 hash_loc_operands (loc, hstate);
7064 hash = hstate.end ();
7065 CHECKSUM (hash);
7066 loc = loc->dw_loc_next;
7067 }
7068 }
7069
7070 /* Calculate the checksum of an attribute. */
7071
7072 static void
7073 attr_checksum_ordered (enum dwarf_tag tag, dw_attr_node *at,
7074 struct md5_ctx *ctx, int *mark)
7075 {
7076 dw_loc_descr_ref loc;
7077 rtx r;
7078
7079 if (AT_class (at) == dw_val_class_die_ref)
7080 {
7081 dw_die_ref target_die = AT_ref (at);
7082
7083 /* For pointer and reference types, we checksum only the (qualified)
7084 name of the target type (if there is a name). For friend entries,
7085 we checksum only the (qualified) name of the target type or function.
7086 This allows the checksum to remain the same whether the target type
7087 is complete or not. */
7088 if ((at->dw_attr == DW_AT_type
7089 && (tag == DW_TAG_pointer_type
7090 || tag == DW_TAG_reference_type
7091 || tag == DW_TAG_rvalue_reference_type
7092 || tag == DW_TAG_ptr_to_member_type))
7093 || (at->dw_attr == DW_AT_friend
7094 && tag == DW_TAG_friend))
7095 {
7096 dw_attr_node *name_attr = get_AT (target_die, DW_AT_name);
7097
7098 if (name_attr != NULL)
7099 {
7100 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7101
7102 if (decl == NULL)
7103 decl = target_die;
7104 CHECKSUM_ULEB128 ('N');
7105 CHECKSUM_ULEB128 (at->dw_attr);
7106 if (decl->die_parent != NULL)
7107 checksum_die_context (decl->die_parent, ctx);
7108 CHECKSUM_ULEB128 ('E');
7109 CHECKSUM_STRING (AT_string (name_attr));
7110 return;
7111 }
7112 }
7113
7114 /* For all other references to another DIE, we check to see if the
7115 target DIE has already been visited. If it has, we emit a
7116 backward reference; if not, we descend recursively. */
7117 if (target_die->die_mark > 0)
7118 {
7119 CHECKSUM_ULEB128 ('R');
7120 CHECKSUM_ULEB128 (at->dw_attr);
7121 CHECKSUM_ULEB128 (target_die->die_mark);
7122 }
7123 else
7124 {
7125 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7126
7127 if (decl == NULL)
7128 decl = target_die;
7129 target_die->die_mark = ++(*mark);
7130 CHECKSUM_ULEB128 ('T');
7131 CHECKSUM_ULEB128 (at->dw_attr);
7132 if (decl->die_parent != NULL)
7133 checksum_die_context (decl->die_parent, ctx);
7134 die_checksum_ordered (target_die, ctx, mark);
7135 }
7136 return;
7137 }
7138
7139 CHECKSUM_ULEB128 ('A');
7140 CHECKSUM_ULEB128 (at->dw_attr);
7141
7142 switch (AT_class (at))
7143 {
7144 case dw_val_class_const:
7145 case dw_val_class_const_implicit:
7146 CHECKSUM_ULEB128 (DW_FORM_sdata);
7147 CHECKSUM_SLEB128 (at->dw_attr_val.v.val_int);
7148 break;
7149
7150 case dw_val_class_unsigned_const:
7151 case dw_val_class_unsigned_const_implicit:
7152 CHECKSUM_ULEB128 (DW_FORM_sdata);
7153 CHECKSUM_SLEB128 ((int) at->dw_attr_val.v.val_unsigned);
7154 break;
7155
7156 case dw_val_class_const_double:
7157 CHECKSUM_ULEB128 (DW_FORM_block);
7158 CHECKSUM_ULEB128 (sizeof (at->dw_attr_val.v.val_double));
7159 CHECKSUM (at->dw_attr_val.v.val_double);
7160 break;
7161
7162 case dw_val_class_wide_int:
7163 CHECKSUM_ULEB128 (DW_FORM_block);
7164 CHECKSUM_ULEB128 (get_full_len (*at->dw_attr_val.v.val_wide)
7165 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
7166 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
7167 get_full_len (*at->dw_attr_val.v.val_wide)
7168 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
7169 break;
7170
7171 case dw_val_class_vec:
7172 CHECKSUM_ULEB128 (DW_FORM_block);
7173 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_vec.length
7174 * at->dw_attr_val.v.val_vec.elt_size);
7175 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
7176 (at->dw_attr_val.v.val_vec.length
7177 * at->dw_attr_val.v.val_vec.elt_size));
7178 break;
7179
7180 case dw_val_class_flag:
7181 CHECKSUM_ULEB128 (DW_FORM_flag);
7182 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_flag ? 1 : 0);
7183 break;
7184
7185 case dw_val_class_str:
7186 CHECKSUM_ULEB128 (DW_FORM_string);
7187 CHECKSUM_STRING (AT_string (at));
7188 break;
7189
7190 case dw_val_class_addr:
7191 r = AT_addr (at);
7192 gcc_assert (GET_CODE (r) == SYMBOL_REF);
7193 CHECKSUM_ULEB128 (DW_FORM_string);
7194 CHECKSUM_STRING (XSTR (r, 0));
7195 break;
7196
7197 case dw_val_class_offset:
7198 CHECKSUM_ULEB128 (DW_FORM_sdata);
7199 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_offset);
7200 break;
7201
7202 case dw_val_class_loc:
7203 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
7204 loc_checksum_ordered (loc, ctx);
7205 break;
7206
7207 case dw_val_class_fde_ref:
7208 case dw_val_class_symview:
7209 case dw_val_class_lbl_id:
7210 case dw_val_class_lineptr:
7211 case dw_val_class_macptr:
7212 case dw_val_class_loclistsptr:
7213 case dw_val_class_high_pc:
7214 break;
7215
7216 case dw_val_class_file:
7217 case dw_val_class_file_implicit:
7218 CHECKSUM_ULEB128 (DW_FORM_string);
7219 CHECKSUM_STRING (AT_file (at)->filename);
7220 break;
7221
7222 case dw_val_class_data8:
7223 CHECKSUM (at->dw_attr_val.v.val_data8);
7224 break;
7225
7226 default:
7227 break;
7228 }
7229 }
7230
7231 struct checksum_attributes
7232 {
7233 dw_attr_node *at_name;
7234 dw_attr_node *at_type;
7235 dw_attr_node *at_friend;
7236 dw_attr_node *at_accessibility;
7237 dw_attr_node *at_address_class;
7238 dw_attr_node *at_alignment;
7239 dw_attr_node *at_allocated;
7240 dw_attr_node *at_artificial;
7241 dw_attr_node *at_associated;
7242 dw_attr_node *at_binary_scale;
7243 dw_attr_node *at_bit_offset;
7244 dw_attr_node *at_bit_size;
7245 dw_attr_node *at_bit_stride;
7246 dw_attr_node *at_byte_size;
7247 dw_attr_node *at_byte_stride;
7248 dw_attr_node *at_const_value;
7249 dw_attr_node *at_containing_type;
7250 dw_attr_node *at_count;
7251 dw_attr_node *at_data_location;
7252 dw_attr_node *at_data_member_location;
7253 dw_attr_node *at_decimal_scale;
7254 dw_attr_node *at_decimal_sign;
7255 dw_attr_node *at_default_value;
7256 dw_attr_node *at_digit_count;
7257 dw_attr_node *at_discr;
7258 dw_attr_node *at_discr_list;
7259 dw_attr_node *at_discr_value;
7260 dw_attr_node *at_encoding;
7261 dw_attr_node *at_endianity;
7262 dw_attr_node *at_explicit;
7263 dw_attr_node *at_is_optional;
7264 dw_attr_node *at_location;
7265 dw_attr_node *at_lower_bound;
7266 dw_attr_node *at_mutable;
7267 dw_attr_node *at_ordering;
7268 dw_attr_node *at_picture_string;
7269 dw_attr_node *at_prototyped;
7270 dw_attr_node *at_small;
7271 dw_attr_node *at_segment;
7272 dw_attr_node *at_string_length;
7273 dw_attr_node *at_string_length_bit_size;
7274 dw_attr_node *at_string_length_byte_size;
7275 dw_attr_node *at_threads_scaled;
7276 dw_attr_node *at_upper_bound;
7277 dw_attr_node *at_use_location;
7278 dw_attr_node *at_use_UTF8;
7279 dw_attr_node *at_variable_parameter;
7280 dw_attr_node *at_virtuality;
7281 dw_attr_node *at_visibility;
7282 dw_attr_node *at_vtable_elem_location;
7283 };
7284
7285 /* Collect the attributes that we will want to use for the checksum. */
7286
7287 static void
7288 collect_checksum_attributes (struct checksum_attributes *attrs, dw_die_ref die)
7289 {
7290 dw_attr_node *a;
7291 unsigned ix;
7292
7293 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7294 {
7295 switch (a->dw_attr)
7296 {
7297 case DW_AT_name:
7298 attrs->at_name = a;
7299 break;
7300 case DW_AT_type:
7301 attrs->at_type = a;
7302 break;
7303 case DW_AT_friend:
7304 attrs->at_friend = a;
7305 break;
7306 case DW_AT_accessibility:
7307 attrs->at_accessibility = a;
7308 break;
7309 case DW_AT_address_class:
7310 attrs->at_address_class = a;
7311 break;
7312 case DW_AT_alignment:
7313 attrs->at_alignment = a;
7314 break;
7315 case DW_AT_allocated:
7316 attrs->at_allocated = a;
7317 break;
7318 case DW_AT_artificial:
7319 attrs->at_artificial = a;
7320 break;
7321 case DW_AT_associated:
7322 attrs->at_associated = a;
7323 break;
7324 case DW_AT_binary_scale:
7325 attrs->at_binary_scale = a;
7326 break;
7327 case DW_AT_bit_offset:
7328 attrs->at_bit_offset = a;
7329 break;
7330 case DW_AT_bit_size:
7331 attrs->at_bit_size = a;
7332 break;
7333 case DW_AT_bit_stride:
7334 attrs->at_bit_stride = a;
7335 break;
7336 case DW_AT_byte_size:
7337 attrs->at_byte_size = a;
7338 break;
7339 case DW_AT_byte_stride:
7340 attrs->at_byte_stride = a;
7341 break;
7342 case DW_AT_const_value:
7343 attrs->at_const_value = a;
7344 break;
7345 case DW_AT_containing_type:
7346 attrs->at_containing_type = a;
7347 break;
7348 case DW_AT_count:
7349 attrs->at_count = a;
7350 break;
7351 case DW_AT_data_location:
7352 attrs->at_data_location = a;
7353 break;
7354 case DW_AT_data_member_location:
7355 attrs->at_data_member_location = a;
7356 break;
7357 case DW_AT_decimal_scale:
7358 attrs->at_decimal_scale = a;
7359 break;
7360 case DW_AT_decimal_sign:
7361 attrs->at_decimal_sign = a;
7362 break;
7363 case DW_AT_default_value:
7364 attrs->at_default_value = a;
7365 break;
7366 case DW_AT_digit_count:
7367 attrs->at_digit_count = a;
7368 break;
7369 case DW_AT_discr:
7370 attrs->at_discr = a;
7371 break;
7372 case DW_AT_discr_list:
7373 attrs->at_discr_list = a;
7374 break;
7375 case DW_AT_discr_value:
7376 attrs->at_discr_value = a;
7377 break;
7378 case DW_AT_encoding:
7379 attrs->at_encoding = a;
7380 break;
7381 case DW_AT_endianity:
7382 attrs->at_endianity = a;
7383 break;
7384 case DW_AT_explicit:
7385 attrs->at_explicit = a;
7386 break;
7387 case DW_AT_is_optional:
7388 attrs->at_is_optional = a;
7389 break;
7390 case DW_AT_location:
7391 attrs->at_location = a;
7392 break;
7393 case DW_AT_lower_bound:
7394 attrs->at_lower_bound = a;
7395 break;
7396 case DW_AT_mutable:
7397 attrs->at_mutable = a;
7398 break;
7399 case DW_AT_ordering:
7400 attrs->at_ordering = a;
7401 break;
7402 case DW_AT_picture_string:
7403 attrs->at_picture_string = a;
7404 break;
7405 case DW_AT_prototyped:
7406 attrs->at_prototyped = a;
7407 break;
7408 case DW_AT_small:
7409 attrs->at_small = a;
7410 break;
7411 case DW_AT_segment:
7412 attrs->at_segment = a;
7413 break;
7414 case DW_AT_string_length:
7415 attrs->at_string_length = a;
7416 break;
7417 case DW_AT_string_length_bit_size:
7418 attrs->at_string_length_bit_size = a;
7419 break;
7420 case DW_AT_string_length_byte_size:
7421 attrs->at_string_length_byte_size = a;
7422 break;
7423 case DW_AT_threads_scaled:
7424 attrs->at_threads_scaled = a;
7425 break;
7426 case DW_AT_upper_bound:
7427 attrs->at_upper_bound = a;
7428 break;
7429 case DW_AT_use_location:
7430 attrs->at_use_location = a;
7431 break;
7432 case DW_AT_use_UTF8:
7433 attrs->at_use_UTF8 = a;
7434 break;
7435 case DW_AT_variable_parameter:
7436 attrs->at_variable_parameter = a;
7437 break;
7438 case DW_AT_virtuality:
7439 attrs->at_virtuality = a;
7440 break;
7441 case DW_AT_visibility:
7442 attrs->at_visibility = a;
7443 break;
7444 case DW_AT_vtable_elem_location:
7445 attrs->at_vtable_elem_location = a;
7446 break;
7447 default:
7448 break;
7449 }
7450 }
7451 }
7452
7453 /* Calculate the checksum of a DIE, using an ordered subset of attributes. */
7454
7455 static void
7456 die_checksum_ordered (dw_die_ref die, struct md5_ctx *ctx, int *mark)
7457 {
7458 dw_die_ref c;
7459 dw_die_ref decl;
7460 struct checksum_attributes attrs;
7461
7462 CHECKSUM_ULEB128 ('D');
7463 CHECKSUM_ULEB128 (die->die_tag);
7464
7465 memset (&attrs, 0, sizeof (attrs));
7466
7467 decl = get_AT_ref (die, DW_AT_specification);
7468 if (decl != NULL)
7469 collect_checksum_attributes (&attrs, decl);
7470 collect_checksum_attributes (&attrs, die);
7471
7472 CHECKSUM_ATTR (attrs.at_name);
7473 CHECKSUM_ATTR (attrs.at_accessibility);
7474 CHECKSUM_ATTR (attrs.at_address_class);
7475 CHECKSUM_ATTR (attrs.at_allocated);
7476 CHECKSUM_ATTR (attrs.at_artificial);
7477 CHECKSUM_ATTR (attrs.at_associated);
7478 CHECKSUM_ATTR (attrs.at_binary_scale);
7479 CHECKSUM_ATTR (attrs.at_bit_offset);
7480 CHECKSUM_ATTR (attrs.at_bit_size);
7481 CHECKSUM_ATTR (attrs.at_bit_stride);
7482 CHECKSUM_ATTR (attrs.at_byte_size);
7483 CHECKSUM_ATTR (attrs.at_byte_stride);
7484 CHECKSUM_ATTR (attrs.at_const_value);
7485 CHECKSUM_ATTR (attrs.at_containing_type);
7486 CHECKSUM_ATTR (attrs.at_count);
7487 CHECKSUM_ATTR (attrs.at_data_location);
7488 CHECKSUM_ATTR (attrs.at_data_member_location);
7489 CHECKSUM_ATTR (attrs.at_decimal_scale);
7490 CHECKSUM_ATTR (attrs.at_decimal_sign);
7491 CHECKSUM_ATTR (attrs.at_default_value);
7492 CHECKSUM_ATTR (attrs.at_digit_count);
7493 CHECKSUM_ATTR (attrs.at_discr);
7494 CHECKSUM_ATTR (attrs.at_discr_list);
7495 CHECKSUM_ATTR (attrs.at_discr_value);
7496 CHECKSUM_ATTR (attrs.at_encoding);
7497 CHECKSUM_ATTR (attrs.at_endianity);
7498 CHECKSUM_ATTR (attrs.at_explicit);
7499 CHECKSUM_ATTR (attrs.at_is_optional);
7500 CHECKSUM_ATTR (attrs.at_location);
7501 CHECKSUM_ATTR (attrs.at_lower_bound);
7502 CHECKSUM_ATTR (attrs.at_mutable);
7503 CHECKSUM_ATTR (attrs.at_ordering);
7504 CHECKSUM_ATTR (attrs.at_picture_string);
7505 CHECKSUM_ATTR (attrs.at_prototyped);
7506 CHECKSUM_ATTR (attrs.at_small);
7507 CHECKSUM_ATTR (attrs.at_segment);
7508 CHECKSUM_ATTR (attrs.at_string_length);
7509 CHECKSUM_ATTR (attrs.at_string_length_bit_size);
7510 CHECKSUM_ATTR (attrs.at_string_length_byte_size);
7511 CHECKSUM_ATTR (attrs.at_threads_scaled);
7512 CHECKSUM_ATTR (attrs.at_upper_bound);
7513 CHECKSUM_ATTR (attrs.at_use_location);
7514 CHECKSUM_ATTR (attrs.at_use_UTF8);
7515 CHECKSUM_ATTR (attrs.at_variable_parameter);
7516 CHECKSUM_ATTR (attrs.at_virtuality);
7517 CHECKSUM_ATTR (attrs.at_visibility);
7518 CHECKSUM_ATTR (attrs.at_vtable_elem_location);
7519 CHECKSUM_ATTR (attrs.at_type);
7520 CHECKSUM_ATTR (attrs.at_friend);
7521 CHECKSUM_ATTR (attrs.at_alignment);
7522
7523 /* Checksum the child DIEs. */
7524 c = die->die_child;
7525 if (c) do {
7526 dw_attr_node *name_attr;
7527
7528 c = c->die_sib;
7529 name_attr = get_AT (c, DW_AT_name);
7530 if (is_template_instantiation (c))
7531 {
7532 /* Ignore instantiations of member type and function templates. */
7533 }
7534 else if (name_attr != NULL
7535 && (is_type_die (c) || c->die_tag == DW_TAG_subprogram))
7536 {
7537 /* Use a shallow checksum for named nested types and member
7538 functions. */
7539 CHECKSUM_ULEB128 ('S');
7540 CHECKSUM_ULEB128 (c->die_tag);
7541 CHECKSUM_STRING (AT_string (name_attr));
7542 }
7543 else
7544 {
7545 /* Use a deep checksum for other children. */
7546 /* Mark this DIE so it gets processed when unmarking. */
7547 if (c->die_mark == 0)
7548 c->die_mark = -1;
7549 die_checksum_ordered (c, ctx, mark);
7550 }
7551 } while (c != die->die_child);
7552
7553 CHECKSUM_ULEB128 (0);
7554 }
7555
7556 /* Add a type name and tag to a hash. */
7557 static void
7558 die_odr_checksum (int tag, const char *name, md5_ctx *ctx)
7559 {
7560 CHECKSUM_ULEB128 (tag);
7561 CHECKSUM_STRING (name);
7562 }
7563
7564 #undef CHECKSUM
7565 #undef CHECKSUM_STRING
7566 #undef CHECKSUM_ATTR
7567 #undef CHECKSUM_LEB128
7568 #undef CHECKSUM_ULEB128
7569
7570 /* Generate the type signature for DIE. This is computed by generating an
7571 MD5 checksum over the DIE's tag, its relevant attributes, and its
7572 children. Attributes that are references to other DIEs are processed
7573 by recursion, using the MARK field to prevent infinite recursion.
7574 If the DIE is nested inside a namespace or another type, we also
7575 need to include that context in the signature. The lower 64 bits
7576 of the resulting MD5 checksum comprise the signature. */
7577
7578 static void
7579 generate_type_signature (dw_die_ref die, comdat_type_node *type_node)
7580 {
7581 int mark;
7582 const char *name;
7583 unsigned char checksum[16];
7584 struct md5_ctx ctx;
7585 dw_die_ref decl;
7586 dw_die_ref parent;
7587
7588 name = get_AT_string (die, DW_AT_name);
7589 decl = get_AT_ref (die, DW_AT_specification);
7590 parent = get_die_parent (die);
7591
7592 /* First, compute a signature for just the type name (and its surrounding
7593 context, if any. This is stored in the type unit DIE for link-time
7594 ODR (one-definition rule) checking. */
7595
7596 if (is_cxx () && name != NULL)
7597 {
7598 md5_init_ctx (&ctx);
7599
7600 /* Checksum the names of surrounding namespaces and structures. */
7601 if (parent != NULL)
7602 checksum_die_context (parent, &ctx);
7603
7604 /* Checksum the current DIE. */
7605 die_odr_checksum (die->die_tag, name, &ctx);
7606 md5_finish_ctx (&ctx, checksum);
7607
7608 add_AT_data8 (type_node->root_die, DW_AT_GNU_odr_signature, &checksum[8]);
7609 }
7610
7611 /* Next, compute the complete type signature. */
7612
7613 md5_init_ctx (&ctx);
7614 mark = 1;
7615 die->die_mark = mark;
7616
7617 /* Checksum the names of surrounding namespaces and structures. */
7618 if (parent != NULL)
7619 checksum_die_context (parent, &ctx);
7620
7621 /* Checksum the DIE and its children. */
7622 die_checksum_ordered (die, &ctx, &mark);
7623 unmark_all_dies (die);
7624 md5_finish_ctx (&ctx, checksum);
7625
7626 /* Store the signature in the type node and link the type DIE and the
7627 type node together. */
7628 memcpy (type_node->signature, &checksum[16 - DWARF_TYPE_SIGNATURE_SIZE],
7629 DWARF_TYPE_SIGNATURE_SIZE);
7630 die->comdat_type_p = true;
7631 die->die_id.die_type_node = type_node;
7632 type_node->type_die = die;
7633
7634 /* If the DIE is a specification, link its declaration to the type node
7635 as well. */
7636 if (decl != NULL)
7637 {
7638 decl->comdat_type_p = true;
7639 decl->die_id.die_type_node = type_node;
7640 }
7641 }
7642
7643 /* Do the location expressions look same? */
7644 static inline int
7645 same_loc_p (dw_loc_descr_ref loc1, dw_loc_descr_ref loc2, int *mark)
7646 {
7647 return loc1->dw_loc_opc == loc2->dw_loc_opc
7648 && same_dw_val_p (&loc1->dw_loc_oprnd1, &loc2->dw_loc_oprnd1, mark)
7649 && same_dw_val_p (&loc1->dw_loc_oprnd2, &loc2->dw_loc_oprnd2, mark);
7650 }
7651
7652 /* Do the values look the same? */
7653 static int
7654 same_dw_val_p (const dw_val_node *v1, const dw_val_node *v2, int *mark)
7655 {
7656 dw_loc_descr_ref loc1, loc2;
7657 rtx r1, r2;
7658
7659 if (v1->val_class != v2->val_class)
7660 return 0;
7661
7662 switch (v1->val_class)
7663 {
7664 case dw_val_class_const:
7665 case dw_val_class_const_implicit:
7666 return v1->v.val_int == v2->v.val_int;
7667 case dw_val_class_unsigned_const:
7668 case dw_val_class_unsigned_const_implicit:
7669 return v1->v.val_unsigned == v2->v.val_unsigned;
7670 case dw_val_class_const_double:
7671 return v1->v.val_double.high == v2->v.val_double.high
7672 && v1->v.val_double.low == v2->v.val_double.low;
7673 case dw_val_class_wide_int:
7674 return *v1->v.val_wide == *v2->v.val_wide;
7675 case dw_val_class_vec:
7676 if (v1->v.val_vec.length != v2->v.val_vec.length
7677 || v1->v.val_vec.elt_size != v2->v.val_vec.elt_size)
7678 return 0;
7679 if (memcmp (v1->v.val_vec.array, v2->v.val_vec.array,
7680 v1->v.val_vec.length * v1->v.val_vec.elt_size))
7681 return 0;
7682 return 1;
7683 case dw_val_class_flag:
7684 return v1->v.val_flag == v2->v.val_flag;
7685 case dw_val_class_str:
7686 return !strcmp (v1->v.val_str->str, v2->v.val_str->str);
7687
7688 case dw_val_class_addr:
7689 r1 = v1->v.val_addr;
7690 r2 = v2->v.val_addr;
7691 if (GET_CODE (r1) != GET_CODE (r2))
7692 return 0;
7693 return !rtx_equal_p (r1, r2);
7694
7695 case dw_val_class_offset:
7696 return v1->v.val_offset == v2->v.val_offset;
7697
7698 case dw_val_class_loc:
7699 for (loc1 = v1->v.val_loc, loc2 = v2->v.val_loc;
7700 loc1 && loc2;
7701 loc1 = loc1->dw_loc_next, loc2 = loc2->dw_loc_next)
7702 if (!same_loc_p (loc1, loc2, mark))
7703 return 0;
7704 return !loc1 && !loc2;
7705
7706 case dw_val_class_die_ref:
7707 return same_die_p (v1->v.val_die_ref.die, v2->v.val_die_ref.die, mark);
7708
7709 case dw_val_class_symview:
7710 return strcmp (v1->v.val_symbolic_view, v2->v.val_symbolic_view) == 0;
7711
7712 case dw_val_class_fde_ref:
7713 case dw_val_class_vms_delta:
7714 case dw_val_class_lbl_id:
7715 case dw_val_class_lineptr:
7716 case dw_val_class_macptr:
7717 case dw_val_class_loclistsptr:
7718 case dw_val_class_high_pc:
7719 return 1;
7720
7721 case dw_val_class_file:
7722 case dw_val_class_file_implicit:
7723 return v1->v.val_file == v2->v.val_file;
7724
7725 case dw_val_class_data8:
7726 return !memcmp (v1->v.val_data8, v2->v.val_data8, 8);
7727
7728 default:
7729 return 1;
7730 }
7731 }
7732
7733 /* Do the attributes look the same? */
7734
7735 static int
7736 same_attr_p (dw_attr_node *at1, dw_attr_node *at2, int *mark)
7737 {
7738 if (at1->dw_attr != at2->dw_attr)
7739 return 0;
7740
7741 /* We don't care that this was compiled with a different compiler
7742 snapshot; if the output is the same, that's what matters. */
7743 if (at1->dw_attr == DW_AT_producer)
7744 return 1;
7745
7746 return same_dw_val_p (&at1->dw_attr_val, &at2->dw_attr_val, mark);
7747 }
7748
7749 /* Do the dies look the same? */
7750
7751 static int
7752 same_die_p (dw_die_ref die1, dw_die_ref die2, int *mark)
7753 {
7754 dw_die_ref c1, c2;
7755 dw_attr_node *a1;
7756 unsigned ix;
7757
7758 /* To avoid infinite recursion. */
7759 if (die1->die_mark)
7760 return die1->die_mark == die2->die_mark;
7761 die1->die_mark = die2->die_mark = ++(*mark);
7762
7763 if (die1->die_tag != die2->die_tag)
7764 return 0;
7765
7766 if (vec_safe_length (die1->die_attr) != vec_safe_length (die2->die_attr))
7767 return 0;
7768
7769 FOR_EACH_VEC_SAFE_ELT (die1->die_attr, ix, a1)
7770 if (!same_attr_p (a1, &(*die2->die_attr)[ix], mark))
7771 return 0;
7772
7773 c1 = die1->die_child;
7774 c2 = die2->die_child;
7775 if (! c1)
7776 {
7777 if (c2)
7778 return 0;
7779 }
7780 else
7781 for (;;)
7782 {
7783 if (!same_die_p (c1, c2, mark))
7784 return 0;
7785 c1 = c1->die_sib;
7786 c2 = c2->die_sib;
7787 if (c1 == die1->die_child)
7788 {
7789 if (c2 == die2->die_child)
7790 break;
7791 else
7792 return 0;
7793 }
7794 }
7795
7796 return 1;
7797 }
7798
7799 /* Calculate the MD5 checksum of the compilation unit DIE UNIT_DIE and its
7800 children, and set die_symbol. */
7801
7802 static void
7803 compute_comp_unit_symbol (dw_die_ref unit_die)
7804 {
7805 const char *die_name = get_AT_string (unit_die, DW_AT_name);
7806 const char *base = die_name ? lbasename (die_name) : "anonymous";
7807 char *name = XALLOCAVEC (char, strlen (base) + 64);
7808 char *p;
7809 int i, mark;
7810 unsigned char checksum[16];
7811 struct md5_ctx ctx;
7812
7813 /* Compute the checksum of the DIE, then append part of it as hex digits to
7814 the name filename of the unit. */
7815
7816 md5_init_ctx (&ctx);
7817 mark = 0;
7818 die_checksum (unit_die, &ctx, &mark);
7819 unmark_all_dies (unit_die);
7820 md5_finish_ctx (&ctx, checksum);
7821
7822 /* When we this for comp_unit_die () we have a DW_AT_name that might
7823 not start with a letter but with anything valid for filenames and
7824 clean_symbol_name doesn't fix that up. Prepend 'g' if the first
7825 character is not a letter. */
7826 sprintf (name, "%s%s.", ISALPHA (*base) ? "" : "g", base);
7827 clean_symbol_name (name);
7828
7829 p = name + strlen (name);
7830 for (i = 0; i < 4; i++)
7831 {
7832 sprintf (p, "%.2x", checksum[i]);
7833 p += 2;
7834 }
7835
7836 unit_die->die_id.die_symbol = xstrdup (name);
7837 }
7838
7839 /* Returns nonzero if DIE represents a type, in the sense of TYPE_P. */
7840
7841 static int
7842 is_type_die (dw_die_ref die)
7843 {
7844 switch (die->die_tag)
7845 {
7846 case DW_TAG_array_type:
7847 case DW_TAG_class_type:
7848 case DW_TAG_interface_type:
7849 case DW_TAG_enumeration_type:
7850 case DW_TAG_pointer_type:
7851 case DW_TAG_reference_type:
7852 case DW_TAG_rvalue_reference_type:
7853 case DW_TAG_string_type:
7854 case DW_TAG_structure_type:
7855 case DW_TAG_subroutine_type:
7856 case DW_TAG_union_type:
7857 case DW_TAG_ptr_to_member_type:
7858 case DW_TAG_set_type:
7859 case DW_TAG_subrange_type:
7860 case DW_TAG_base_type:
7861 case DW_TAG_const_type:
7862 case DW_TAG_file_type:
7863 case DW_TAG_packed_type:
7864 case DW_TAG_volatile_type:
7865 case DW_TAG_typedef:
7866 return 1;
7867 default:
7868 return 0;
7869 }
7870 }
7871
7872 /* Returns true iff C is a compile-unit DIE. */
7873
7874 static inline bool
7875 is_cu_die (dw_die_ref c)
7876 {
7877 return c && (c->die_tag == DW_TAG_compile_unit
7878 || c->die_tag == DW_TAG_skeleton_unit);
7879 }
7880
7881 /* Returns true iff C is a unit DIE of some sort. */
7882
7883 static inline bool
7884 is_unit_die (dw_die_ref c)
7885 {
7886 return c && (c->die_tag == DW_TAG_compile_unit
7887 || c->die_tag == DW_TAG_partial_unit
7888 || c->die_tag == DW_TAG_type_unit
7889 || c->die_tag == DW_TAG_skeleton_unit);
7890 }
7891
7892 /* Returns true iff C is a namespace DIE. */
7893
7894 static inline bool
7895 is_namespace_die (dw_die_ref c)
7896 {
7897 return c && c->die_tag == DW_TAG_namespace;
7898 }
7899
7900 /* Return non-zero if this DIE is a template parameter. */
7901
7902 static inline bool
7903 is_template_parameter (dw_die_ref die)
7904 {
7905 switch (die->die_tag)
7906 {
7907 case DW_TAG_template_type_param:
7908 case DW_TAG_template_value_param:
7909 case DW_TAG_GNU_template_template_param:
7910 case DW_TAG_GNU_template_parameter_pack:
7911 return true;
7912 default:
7913 return false;
7914 }
7915 }
7916
7917 /* Return non-zero if this DIE represents a template instantiation. */
7918
7919 static inline bool
7920 is_template_instantiation (dw_die_ref die)
7921 {
7922 dw_die_ref c;
7923
7924 if (!is_type_die (die) && die->die_tag != DW_TAG_subprogram)
7925 return false;
7926 FOR_EACH_CHILD (die, c, if (is_template_parameter (c)) return true);
7927 return false;
7928 }
7929
7930 static char *
7931 gen_internal_sym (const char *prefix)
7932 {
7933 char buf[MAX_ARTIFICIAL_LABEL_BYTES];
7934
7935 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, label_num++);
7936 return xstrdup (buf);
7937 }
7938
7939 /* Return non-zero if this DIE is a declaration. */
7940
7941 static int
7942 is_declaration_die (dw_die_ref die)
7943 {
7944 dw_attr_node *a;
7945 unsigned ix;
7946
7947 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7948 if (a->dw_attr == DW_AT_declaration)
7949 return 1;
7950
7951 return 0;
7952 }
7953
7954 /* Return non-zero if this DIE is nested inside a subprogram. */
7955
7956 static int
7957 is_nested_in_subprogram (dw_die_ref die)
7958 {
7959 dw_die_ref decl = get_AT_ref (die, DW_AT_specification);
7960
7961 if (decl == NULL)
7962 decl = die;
7963 return local_scope_p (decl);
7964 }
7965
7966 /* Return non-zero if this DIE contains a defining declaration of a
7967 subprogram. */
7968
7969 static int
7970 contains_subprogram_definition (dw_die_ref die)
7971 {
7972 dw_die_ref c;
7973
7974 if (die->die_tag == DW_TAG_subprogram && ! is_declaration_die (die))
7975 return 1;
7976 FOR_EACH_CHILD (die, c, if (contains_subprogram_definition (c)) return 1);
7977 return 0;
7978 }
7979
7980 /* Return non-zero if this is a type DIE that should be moved to a
7981 COMDAT .debug_types section or .debug_info section with DW_UT_*type
7982 unit type. */
7983
7984 static int
7985 should_move_die_to_comdat (dw_die_ref die)
7986 {
7987 switch (die->die_tag)
7988 {
7989 case DW_TAG_class_type:
7990 case DW_TAG_structure_type:
7991 case DW_TAG_enumeration_type:
7992 case DW_TAG_union_type:
7993 /* Don't move declarations, inlined instances, types nested in a
7994 subprogram, or types that contain subprogram definitions. */
7995 if (is_declaration_die (die)
7996 || get_AT (die, DW_AT_abstract_origin)
7997 || is_nested_in_subprogram (die)
7998 || contains_subprogram_definition (die))
7999 return 0;
8000 return 1;
8001 case DW_TAG_array_type:
8002 case DW_TAG_interface_type:
8003 case DW_TAG_pointer_type:
8004 case DW_TAG_reference_type:
8005 case DW_TAG_rvalue_reference_type:
8006 case DW_TAG_string_type:
8007 case DW_TAG_subroutine_type:
8008 case DW_TAG_ptr_to_member_type:
8009 case DW_TAG_set_type:
8010 case DW_TAG_subrange_type:
8011 case DW_TAG_base_type:
8012 case DW_TAG_const_type:
8013 case DW_TAG_file_type:
8014 case DW_TAG_packed_type:
8015 case DW_TAG_volatile_type:
8016 case DW_TAG_typedef:
8017 default:
8018 return 0;
8019 }
8020 }
8021
8022 /* Make a clone of DIE. */
8023
8024 static dw_die_ref
8025 clone_die (dw_die_ref die)
8026 {
8027 dw_die_ref clone = new_die_raw (die->die_tag);
8028 dw_attr_node *a;
8029 unsigned ix;
8030
8031 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8032 add_dwarf_attr (clone, a);
8033
8034 return clone;
8035 }
8036
8037 /* Make a clone of the tree rooted at DIE. */
8038
8039 static dw_die_ref
8040 clone_tree (dw_die_ref die)
8041 {
8042 dw_die_ref c;
8043 dw_die_ref clone = clone_die (die);
8044
8045 FOR_EACH_CHILD (die, c, add_child_die (clone, clone_tree (c)));
8046
8047 return clone;
8048 }
8049
8050 /* Make a clone of DIE as a declaration. */
8051
8052 static dw_die_ref
8053 clone_as_declaration (dw_die_ref die)
8054 {
8055 dw_die_ref clone;
8056 dw_die_ref decl;
8057 dw_attr_node *a;
8058 unsigned ix;
8059
8060 /* If the DIE is already a declaration, just clone it. */
8061 if (is_declaration_die (die))
8062 return clone_die (die);
8063
8064 /* If the DIE is a specification, just clone its declaration DIE. */
8065 decl = get_AT_ref (die, DW_AT_specification);
8066 if (decl != NULL)
8067 {
8068 clone = clone_die (decl);
8069 if (die->comdat_type_p)
8070 add_AT_die_ref (clone, DW_AT_signature, die);
8071 return clone;
8072 }
8073
8074 clone = new_die_raw (die->die_tag);
8075
8076 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8077 {
8078 /* We don't want to copy over all attributes.
8079 For example we don't want DW_AT_byte_size because otherwise we will no
8080 longer have a declaration and GDB will treat it as a definition. */
8081
8082 switch (a->dw_attr)
8083 {
8084 case DW_AT_abstract_origin:
8085 case DW_AT_artificial:
8086 case DW_AT_containing_type:
8087 case DW_AT_external:
8088 case DW_AT_name:
8089 case DW_AT_type:
8090 case DW_AT_virtuality:
8091 case DW_AT_linkage_name:
8092 case DW_AT_MIPS_linkage_name:
8093 add_dwarf_attr (clone, a);
8094 break;
8095 case DW_AT_byte_size:
8096 case DW_AT_alignment:
8097 default:
8098 break;
8099 }
8100 }
8101
8102 if (die->comdat_type_p)
8103 add_AT_die_ref (clone, DW_AT_signature, die);
8104
8105 add_AT_flag (clone, DW_AT_declaration, 1);
8106 return clone;
8107 }
8108
8109
8110 /* Structure to map a DIE in one CU to its copy in a comdat type unit. */
8111
8112 struct decl_table_entry
8113 {
8114 dw_die_ref orig;
8115 dw_die_ref copy;
8116 };
8117
8118 /* Helpers to manipulate hash table of copied declarations. */
8119
8120 /* Hashtable helpers. */
8121
8122 struct decl_table_entry_hasher : free_ptr_hash <decl_table_entry>
8123 {
8124 typedef die_struct *compare_type;
8125 static inline hashval_t hash (const decl_table_entry *);
8126 static inline bool equal (const decl_table_entry *, const die_struct *);
8127 };
8128
8129 inline hashval_t
8130 decl_table_entry_hasher::hash (const decl_table_entry *entry)
8131 {
8132 return htab_hash_pointer (entry->orig);
8133 }
8134
8135 inline bool
8136 decl_table_entry_hasher::equal (const decl_table_entry *entry1,
8137 const die_struct *entry2)
8138 {
8139 return entry1->orig == entry2;
8140 }
8141
8142 typedef hash_table<decl_table_entry_hasher> decl_hash_type;
8143
8144 /* Copy DIE and its ancestors, up to, but not including, the compile unit
8145 or type unit entry, to a new tree. Adds the new tree to UNIT and returns
8146 a pointer to the copy of DIE. If DECL_TABLE is provided, it is used
8147 to check if the ancestor has already been copied into UNIT. */
8148
8149 static dw_die_ref
8150 copy_ancestor_tree (dw_die_ref unit, dw_die_ref die,
8151 decl_hash_type *decl_table)
8152 {
8153 dw_die_ref parent = die->die_parent;
8154 dw_die_ref new_parent = unit;
8155 dw_die_ref copy;
8156 decl_table_entry **slot = NULL;
8157 struct decl_table_entry *entry = NULL;
8158
8159 if (decl_table)
8160 {
8161 /* Check if the entry has already been copied to UNIT. */
8162 slot = decl_table->find_slot_with_hash (die, htab_hash_pointer (die),
8163 INSERT);
8164 if (*slot != HTAB_EMPTY_ENTRY)
8165 {
8166 entry = *slot;
8167 return entry->copy;
8168 }
8169
8170 /* Record in DECL_TABLE that DIE has been copied to UNIT. */
8171 entry = XCNEW (struct decl_table_entry);
8172 entry->orig = die;
8173 entry->copy = NULL;
8174 *slot = entry;
8175 }
8176
8177 if (parent != NULL)
8178 {
8179 dw_die_ref spec = get_AT_ref (parent, DW_AT_specification);
8180 if (spec != NULL)
8181 parent = spec;
8182 if (!is_unit_die (parent))
8183 new_parent = copy_ancestor_tree (unit, parent, decl_table);
8184 }
8185
8186 copy = clone_as_declaration (die);
8187 add_child_die (new_parent, copy);
8188
8189 if (decl_table)
8190 {
8191 /* Record the pointer to the copy. */
8192 entry->copy = copy;
8193 }
8194
8195 return copy;
8196 }
8197 /* Copy the declaration context to the new type unit DIE. This includes
8198 any surrounding namespace or type declarations. If the DIE has an
8199 AT_specification attribute, it also includes attributes and children
8200 attached to the specification, and returns a pointer to the original
8201 parent of the declaration DIE. Returns NULL otherwise. */
8202
8203 static dw_die_ref
8204 copy_declaration_context (dw_die_ref unit, dw_die_ref die)
8205 {
8206 dw_die_ref decl;
8207 dw_die_ref new_decl;
8208 dw_die_ref orig_parent = NULL;
8209
8210 decl = get_AT_ref (die, DW_AT_specification);
8211 if (decl == NULL)
8212 decl = die;
8213 else
8214 {
8215 unsigned ix;
8216 dw_die_ref c;
8217 dw_attr_node *a;
8218
8219 /* The original DIE will be changed to a declaration, and must
8220 be moved to be a child of the original declaration DIE. */
8221 orig_parent = decl->die_parent;
8222
8223 /* Copy the type node pointer from the new DIE to the original
8224 declaration DIE so we can forward references later. */
8225 decl->comdat_type_p = true;
8226 decl->die_id.die_type_node = die->die_id.die_type_node;
8227
8228 remove_AT (die, DW_AT_specification);
8229
8230 FOR_EACH_VEC_SAFE_ELT (decl->die_attr, ix, a)
8231 {
8232 if (a->dw_attr != DW_AT_name
8233 && a->dw_attr != DW_AT_declaration
8234 && a->dw_attr != DW_AT_external)
8235 add_dwarf_attr (die, a);
8236 }
8237
8238 FOR_EACH_CHILD (decl, c, add_child_die (die, clone_tree (c)));
8239 }
8240
8241 if (decl->die_parent != NULL
8242 && !is_unit_die (decl->die_parent))
8243 {
8244 new_decl = copy_ancestor_tree (unit, decl, NULL);
8245 if (new_decl != NULL)
8246 {
8247 remove_AT (new_decl, DW_AT_signature);
8248 add_AT_specification (die, new_decl);
8249 }
8250 }
8251
8252 return orig_parent;
8253 }
8254
8255 /* Generate the skeleton ancestor tree for the given NODE, then clone
8256 the DIE and add the clone into the tree. */
8257
8258 static void
8259 generate_skeleton_ancestor_tree (skeleton_chain_node *node)
8260 {
8261 if (node->new_die != NULL)
8262 return;
8263
8264 node->new_die = clone_as_declaration (node->old_die);
8265
8266 if (node->parent != NULL)
8267 {
8268 generate_skeleton_ancestor_tree (node->parent);
8269 add_child_die (node->parent->new_die, node->new_die);
8270 }
8271 }
8272
8273 /* Generate a skeleton tree of DIEs containing any declarations that are
8274 found in the original tree. We traverse the tree looking for declaration
8275 DIEs, and construct the skeleton from the bottom up whenever we find one. */
8276
8277 static void
8278 generate_skeleton_bottom_up (skeleton_chain_node *parent)
8279 {
8280 skeleton_chain_node node;
8281 dw_die_ref c;
8282 dw_die_ref first;
8283 dw_die_ref prev = NULL;
8284 dw_die_ref next = NULL;
8285
8286 node.parent = parent;
8287
8288 first = c = parent->old_die->die_child;
8289 if (c)
8290 next = c->die_sib;
8291 if (c) do {
8292 if (prev == NULL || prev->die_sib == c)
8293 prev = c;
8294 c = next;
8295 next = (c == first ? NULL : c->die_sib);
8296 node.old_die = c;
8297 node.new_die = NULL;
8298 if (is_declaration_die (c))
8299 {
8300 if (is_template_instantiation (c))
8301 {
8302 /* Instantiated templates do not need to be cloned into the
8303 type unit. Just move the DIE and its children back to
8304 the skeleton tree (in the main CU). */
8305 remove_child_with_prev (c, prev);
8306 add_child_die (parent->new_die, c);
8307 c = prev;
8308 }
8309 else if (c->comdat_type_p)
8310 {
8311 /* This is the skeleton of earlier break_out_comdat_types
8312 type. Clone the existing DIE, but keep the children
8313 under the original (which is in the main CU). */
8314 dw_die_ref clone = clone_die (c);
8315
8316 replace_child (c, clone, prev);
8317 generate_skeleton_ancestor_tree (parent);
8318 add_child_die (parent->new_die, c);
8319 c = clone;
8320 continue;
8321 }
8322 else
8323 {
8324 /* Clone the existing DIE, move the original to the skeleton
8325 tree (which is in the main CU), and put the clone, with
8326 all the original's children, where the original came from
8327 (which is about to be moved to the type unit). */
8328 dw_die_ref clone = clone_die (c);
8329 move_all_children (c, clone);
8330
8331 /* If the original has a DW_AT_object_pointer attribute,
8332 it would now point to a child DIE just moved to the
8333 cloned tree, so we need to remove that attribute from
8334 the original. */
8335 remove_AT (c, DW_AT_object_pointer);
8336
8337 replace_child (c, clone, prev);
8338 generate_skeleton_ancestor_tree (parent);
8339 add_child_die (parent->new_die, c);
8340 node.old_die = clone;
8341 node.new_die = c;
8342 c = clone;
8343 }
8344 }
8345 generate_skeleton_bottom_up (&node);
8346 } while (next != NULL);
8347 }
8348
8349 /* Wrapper function for generate_skeleton_bottom_up. */
8350
8351 static dw_die_ref
8352 generate_skeleton (dw_die_ref die)
8353 {
8354 skeleton_chain_node node;
8355
8356 node.old_die = die;
8357 node.new_die = NULL;
8358 node.parent = NULL;
8359
8360 /* If this type definition is nested inside another type,
8361 and is not an instantiation of a template, always leave
8362 at least a declaration in its place. */
8363 if (die->die_parent != NULL
8364 && is_type_die (die->die_parent)
8365 && !is_template_instantiation (die))
8366 node.new_die = clone_as_declaration (die);
8367
8368 generate_skeleton_bottom_up (&node);
8369 return node.new_die;
8370 }
8371
8372 /* Remove the CHILD DIE from its parent, possibly replacing it with a cloned
8373 declaration. The original DIE is moved to a new compile unit so that
8374 existing references to it follow it to the new location. If any of the
8375 original DIE's descendants is a declaration, we need to replace the
8376 original DIE with a skeleton tree and move the declarations back into the
8377 skeleton tree. */
8378
8379 static dw_die_ref
8380 remove_child_or_replace_with_skeleton (dw_die_ref unit, dw_die_ref child,
8381 dw_die_ref prev)
8382 {
8383 dw_die_ref skeleton, orig_parent;
8384
8385 /* Copy the declaration context to the type unit DIE. If the returned
8386 ORIG_PARENT is not NULL, the skeleton needs to be added as a child of
8387 that DIE. */
8388 orig_parent = copy_declaration_context (unit, child);
8389
8390 skeleton = generate_skeleton (child);
8391 if (skeleton == NULL)
8392 remove_child_with_prev (child, prev);
8393 else
8394 {
8395 skeleton->comdat_type_p = true;
8396 skeleton->die_id.die_type_node = child->die_id.die_type_node;
8397
8398 /* If the original DIE was a specification, we need to put
8399 the skeleton under the parent DIE of the declaration.
8400 This leaves the original declaration in the tree, but
8401 it will be pruned later since there are no longer any
8402 references to it. */
8403 if (orig_parent != NULL)
8404 {
8405 remove_child_with_prev (child, prev);
8406 add_child_die (orig_parent, skeleton);
8407 }
8408 else
8409 replace_child (child, skeleton, prev);
8410 }
8411
8412 return skeleton;
8413 }
8414
8415 static void
8416 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8417 comdat_type_node *type_node,
8418 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs);
8419
8420 /* Helper for copy_dwarf_procs_ref_in_dies. Make a copy of the DIE DWARF
8421 procedure, put it under TYPE_NODE and return the copy. Continue looking for
8422 DWARF procedure references in the DW_AT_location attribute. */
8423
8424 static dw_die_ref
8425 copy_dwarf_procedure (dw_die_ref die,
8426 comdat_type_node *type_node,
8427 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8428 {
8429 gcc_assert (die->die_tag == DW_TAG_dwarf_procedure);
8430
8431 /* DWARF procedures are not supposed to have children... */
8432 gcc_assert (die->die_child == NULL);
8433
8434 /* ... and they are supposed to have only one attribute: DW_AT_location. */
8435 gcc_assert (vec_safe_length (die->die_attr) == 1
8436 && ((*die->die_attr)[0].dw_attr == DW_AT_location));
8437
8438 /* Do not copy more than once DWARF procedures. */
8439 bool existed;
8440 dw_die_ref &die_copy = copied_dwarf_procs.get_or_insert (die, &existed);
8441 if (existed)
8442 return die_copy;
8443
8444 die_copy = clone_die (die);
8445 add_child_die (type_node->root_die, die_copy);
8446 copy_dwarf_procs_ref_in_attrs (die_copy, type_node, copied_dwarf_procs);
8447 return die_copy;
8448 }
8449
8450 /* Helper for copy_dwarf_procs_ref_in_dies. Look for references to DWARF
8451 procedures in DIE's attributes. */
8452
8453 static void
8454 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8455 comdat_type_node *type_node,
8456 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8457 {
8458 dw_attr_node *a;
8459 unsigned i;
8460
8461 FOR_EACH_VEC_SAFE_ELT (die->die_attr, i, a)
8462 {
8463 dw_loc_descr_ref loc;
8464
8465 if (a->dw_attr_val.val_class != dw_val_class_loc)
8466 continue;
8467
8468 for (loc = a->dw_attr_val.v.val_loc; loc != NULL; loc = loc->dw_loc_next)
8469 {
8470 switch (loc->dw_loc_opc)
8471 {
8472 case DW_OP_call2:
8473 case DW_OP_call4:
8474 case DW_OP_call_ref:
8475 gcc_assert (loc->dw_loc_oprnd1.val_class
8476 == dw_val_class_die_ref);
8477 loc->dw_loc_oprnd1.v.val_die_ref.die
8478 = copy_dwarf_procedure (loc->dw_loc_oprnd1.v.val_die_ref.die,
8479 type_node,
8480 copied_dwarf_procs);
8481
8482 default:
8483 break;
8484 }
8485 }
8486 }
8487 }
8488
8489 /* Copy DWARF procedures that are referenced by the DIE tree to TREE_NODE and
8490 rewrite references to point to the copies.
8491
8492 References are looked for in DIE's attributes and recursively in all its
8493 children attributes that are location descriptions. COPIED_DWARF_PROCS is a
8494 mapping from old DWARF procedures to their copy. It is used not to copy
8495 twice the same DWARF procedure under TYPE_NODE. */
8496
8497 static void
8498 copy_dwarf_procs_ref_in_dies (dw_die_ref die,
8499 comdat_type_node *type_node,
8500 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8501 {
8502 dw_die_ref c;
8503
8504 copy_dwarf_procs_ref_in_attrs (die, type_node, copied_dwarf_procs);
8505 FOR_EACH_CHILD (die, c, copy_dwarf_procs_ref_in_dies (c,
8506 type_node,
8507 copied_dwarf_procs));
8508 }
8509
8510 /* Traverse the DIE and set up additional .debug_types or .debug_info
8511 DW_UT_*type sections for each type worthy of being placed in a COMDAT
8512 section. */
8513
8514 static void
8515 break_out_comdat_types (dw_die_ref die)
8516 {
8517 dw_die_ref c;
8518 dw_die_ref first;
8519 dw_die_ref prev = NULL;
8520 dw_die_ref next = NULL;
8521 dw_die_ref unit = NULL;
8522
8523 first = c = die->die_child;
8524 if (c)
8525 next = c->die_sib;
8526 if (c) do {
8527 if (prev == NULL || prev->die_sib == c)
8528 prev = c;
8529 c = next;
8530 next = (c == first ? NULL : c->die_sib);
8531 if (should_move_die_to_comdat (c))
8532 {
8533 dw_die_ref replacement;
8534 comdat_type_node *type_node;
8535
8536 /* Break out nested types into their own type units. */
8537 break_out_comdat_types (c);
8538
8539 /* Create a new type unit DIE as the root for the new tree, and
8540 add it to the list of comdat types. */
8541 unit = new_die (DW_TAG_type_unit, NULL, NULL);
8542 add_AT_unsigned (unit, DW_AT_language,
8543 get_AT_unsigned (comp_unit_die (), DW_AT_language));
8544 type_node = ggc_cleared_alloc<comdat_type_node> ();
8545 type_node->root_die = unit;
8546 type_node->next = comdat_type_list;
8547 comdat_type_list = type_node;
8548
8549 /* Generate the type signature. */
8550 generate_type_signature (c, type_node);
8551
8552 /* Copy the declaration context, attributes, and children of the
8553 declaration into the new type unit DIE, then remove this DIE
8554 from the main CU (or replace it with a skeleton if necessary). */
8555 replacement = remove_child_or_replace_with_skeleton (unit, c, prev);
8556 type_node->skeleton_die = replacement;
8557
8558 /* Add the DIE to the new compunit. */
8559 add_child_die (unit, c);
8560
8561 /* Types can reference DWARF procedures for type size or data location
8562 expressions. Calls in DWARF expressions cannot target procedures
8563 that are not in the same section. So we must copy DWARF procedures
8564 along with this type and then rewrite references to them. */
8565 hash_map<dw_die_ref, dw_die_ref> copied_dwarf_procs;
8566 copy_dwarf_procs_ref_in_dies (c, type_node, copied_dwarf_procs);
8567
8568 if (replacement != NULL)
8569 c = replacement;
8570 }
8571 else if (c->die_tag == DW_TAG_namespace
8572 || c->die_tag == DW_TAG_class_type
8573 || c->die_tag == DW_TAG_structure_type
8574 || c->die_tag == DW_TAG_union_type)
8575 {
8576 /* Look for nested types that can be broken out. */
8577 break_out_comdat_types (c);
8578 }
8579 } while (next != NULL);
8580 }
8581
8582 /* Like clone_tree, but copy DW_TAG_subprogram DIEs as declarations.
8583 Enter all the cloned children into the hash table decl_table. */
8584
8585 static dw_die_ref
8586 clone_tree_partial (dw_die_ref die, decl_hash_type *decl_table)
8587 {
8588 dw_die_ref c;
8589 dw_die_ref clone;
8590 struct decl_table_entry *entry;
8591 decl_table_entry **slot;
8592
8593 if (die->die_tag == DW_TAG_subprogram)
8594 clone = clone_as_declaration (die);
8595 else
8596 clone = clone_die (die);
8597
8598 slot = decl_table->find_slot_with_hash (die,
8599 htab_hash_pointer (die), INSERT);
8600
8601 /* Assert that DIE isn't in the hash table yet. If it would be there
8602 before, the ancestors would be necessarily there as well, therefore
8603 clone_tree_partial wouldn't be called. */
8604 gcc_assert (*slot == HTAB_EMPTY_ENTRY);
8605
8606 entry = XCNEW (struct decl_table_entry);
8607 entry->orig = die;
8608 entry->copy = clone;
8609 *slot = entry;
8610
8611 if (die->die_tag != DW_TAG_subprogram)
8612 FOR_EACH_CHILD (die, c,
8613 add_child_die (clone, clone_tree_partial (c, decl_table)));
8614
8615 return clone;
8616 }
8617
8618 /* Walk the DIE and its children, looking for references to incomplete
8619 or trivial types that are unmarked (i.e., that are not in the current
8620 type_unit). */
8621
8622 static void
8623 copy_decls_walk (dw_die_ref unit, dw_die_ref die, decl_hash_type *decl_table)
8624 {
8625 dw_die_ref c;
8626 dw_attr_node *a;
8627 unsigned ix;
8628
8629 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8630 {
8631 if (AT_class (a) == dw_val_class_die_ref)
8632 {
8633 dw_die_ref targ = AT_ref (a);
8634 decl_table_entry **slot;
8635 struct decl_table_entry *entry;
8636
8637 if (targ->die_mark != 0 || targ->comdat_type_p)
8638 continue;
8639
8640 slot = decl_table->find_slot_with_hash (targ,
8641 htab_hash_pointer (targ),
8642 INSERT);
8643
8644 if (*slot != HTAB_EMPTY_ENTRY)
8645 {
8646 /* TARG has already been copied, so we just need to
8647 modify the reference to point to the copy. */
8648 entry = *slot;
8649 a->dw_attr_val.v.val_die_ref.die = entry->copy;
8650 }
8651 else
8652 {
8653 dw_die_ref parent = unit;
8654 dw_die_ref copy = clone_die (targ);
8655
8656 /* Record in DECL_TABLE that TARG has been copied.
8657 Need to do this now, before the recursive call,
8658 because DECL_TABLE may be expanded and SLOT
8659 would no longer be a valid pointer. */
8660 entry = XCNEW (struct decl_table_entry);
8661 entry->orig = targ;
8662 entry->copy = copy;
8663 *slot = entry;
8664
8665 /* If TARG is not a declaration DIE, we need to copy its
8666 children. */
8667 if (!is_declaration_die (targ))
8668 {
8669 FOR_EACH_CHILD (
8670 targ, c,
8671 add_child_die (copy,
8672 clone_tree_partial (c, decl_table)));
8673 }
8674
8675 /* Make sure the cloned tree is marked as part of the
8676 type unit. */
8677 mark_dies (copy);
8678
8679 /* If TARG has surrounding context, copy its ancestor tree
8680 into the new type unit. */
8681 if (targ->die_parent != NULL
8682 && !is_unit_die (targ->die_parent))
8683 parent = copy_ancestor_tree (unit, targ->die_parent,
8684 decl_table);
8685
8686 add_child_die (parent, copy);
8687 a->dw_attr_val.v.val_die_ref.die = copy;
8688
8689 /* Make sure the newly-copied DIE is walked. If it was
8690 installed in a previously-added context, it won't
8691 get visited otherwise. */
8692 if (parent != unit)
8693 {
8694 /* Find the highest point of the newly-added tree,
8695 mark each node along the way, and walk from there. */
8696 parent->die_mark = 1;
8697 while (parent->die_parent
8698 && parent->die_parent->die_mark == 0)
8699 {
8700 parent = parent->die_parent;
8701 parent->die_mark = 1;
8702 }
8703 copy_decls_walk (unit, parent, decl_table);
8704 }
8705 }
8706 }
8707 }
8708
8709 FOR_EACH_CHILD (die, c, copy_decls_walk (unit, c, decl_table));
8710 }
8711
8712 /* Copy declarations for "unworthy" types into the new comdat section.
8713 Incomplete types, modified types, and certain other types aren't broken
8714 out into comdat sections of their own, so they don't have a signature,
8715 and we need to copy the declaration into the same section so that we
8716 don't have an external reference. */
8717
8718 static void
8719 copy_decls_for_unworthy_types (dw_die_ref unit)
8720 {
8721 mark_dies (unit);
8722 decl_hash_type decl_table (10);
8723 copy_decls_walk (unit, unit, &decl_table);
8724 unmark_dies (unit);
8725 }
8726
8727 /* Traverse the DIE and add a sibling attribute if it may have the
8728 effect of speeding up access to siblings. To save some space,
8729 avoid generating sibling attributes for DIE's without children. */
8730
8731 static void
8732 add_sibling_attributes (dw_die_ref die)
8733 {
8734 dw_die_ref c;
8735
8736 if (! die->die_child)
8737 return;
8738
8739 if (die->die_parent && die != die->die_parent->die_child)
8740 add_AT_die_ref (die, DW_AT_sibling, die->die_sib);
8741
8742 FOR_EACH_CHILD (die, c, add_sibling_attributes (c));
8743 }
8744
8745 /* Output all location lists for the DIE and its children. */
8746
8747 static void
8748 output_location_lists (dw_die_ref die)
8749 {
8750 dw_die_ref c;
8751 dw_attr_node *a;
8752 unsigned ix;
8753
8754 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8755 if (AT_class (a) == dw_val_class_loc_list)
8756 output_loc_list (AT_loc_list (a));
8757
8758 FOR_EACH_CHILD (die, c, output_location_lists (c));
8759 }
8760
8761 /* During assign_location_list_indexes and output_loclists_offset the
8762 current index, after it the number of assigned indexes (i.e. how
8763 large the .debug_loclists* offset table should be). */
8764 static unsigned int loc_list_idx;
8765
8766 /* Output all location list offsets for the DIE and its children. */
8767
8768 static void
8769 output_loclists_offsets (dw_die_ref die)
8770 {
8771 dw_die_ref c;
8772 dw_attr_node *a;
8773 unsigned ix;
8774
8775 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8776 if (AT_class (a) == dw_val_class_loc_list)
8777 {
8778 dw_loc_list_ref l = AT_loc_list (a);
8779 if (l->offset_emitted)
8780 continue;
8781 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l->ll_symbol,
8782 loc_section_label, NULL);
8783 gcc_assert (l->hash == loc_list_idx);
8784 loc_list_idx++;
8785 l->offset_emitted = true;
8786 }
8787
8788 FOR_EACH_CHILD (die, c, output_loclists_offsets (c));
8789 }
8790
8791 /* Recursively set indexes of location lists. */
8792
8793 static void
8794 assign_location_list_indexes (dw_die_ref die)
8795 {
8796 dw_die_ref c;
8797 dw_attr_node *a;
8798 unsigned ix;
8799
8800 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8801 if (AT_class (a) == dw_val_class_loc_list)
8802 {
8803 dw_loc_list_ref list = AT_loc_list (a);
8804 if (!list->num_assigned)
8805 {
8806 list->num_assigned = true;
8807 list->hash = loc_list_idx++;
8808 }
8809 }
8810
8811 FOR_EACH_CHILD (die, c, assign_location_list_indexes (c));
8812 }
8813
8814 /* We want to limit the number of external references, because they are
8815 larger than local references: a relocation takes multiple words, and
8816 even a sig8 reference is always eight bytes, whereas a local reference
8817 can be as small as one byte (though DW_FORM_ref is usually 4 in GCC).
8818 So if we encounter multiple external references to the same type DIE, we
8819 make a local typedef stub for it and redirect all references there.
8820
8821 This is the element of the hash table for keeping track of these
8822 references. */
8823
8824 struct external_ref
8825 {
8826 dw_die_ref type;
8827 dw_die_ref stub;
8828 unsigned n_refs;
8829 };
8830
8831 /* Hashtable helpers. */
8832
8833 struct external_ref_hasher : free_ptr_hash <external_ref>
8834 {
8835 static inline hashval_t hash (const external_ref *);
8836 static inline bool equal (const external_ref *, const external_ref *);
8837 };
8838
8839 inline hashval_t
8840 external_ref_hasher::hash (const external_ref *r)
8841 {
8842 dw_die_ref die = r->type;
8843 hashval_t h = 0;
8844
8845 /* We can't use the address of the DIE for hashing, because
8846 that will make the order of the stub DIEs non-deterministic. */
8847 if (! die->comdat_type_p)
8848 /* We have a symbol; use it to compute a hash. */
8849 h = htab_hash_string (die->die_id.die_symbol);
8850 else
8851 {
8852 /* We have a type signature; use a subset of the bits as the hash.
8853 The 8-byte signature is at least as large as hashval_t. */
8854 comdat_type_node *type_node = die->die_id.die_type_node;
8855 memcpy (&h, type_node->signature, sizeof (h));
8856 }
8857 return h;
8858 }
8859
8860 inline bool
8861 external_ref_hasher::equal (const external_ref *r1, const external_ref *r2)
8862 {
8863 return r1->type == r2->type;
8864 }
8865
8866 typedef hash_table<external_ref_hasher> external_ref_hash_type;
8867
8868 /* Return a pointer to the external_ref for references to DIE. */
8869
8870 static struct external_ref *
8871 lookup_external_ref (external_ref_hash_type *map, dw_die_ref die)
8872 {
8873 struct external_ref ref, *ref_p;
8874 external_ref **slot;
8875
8876 ref.type = die;
8877 slot = map->find_slot (&ref, INSERT);
8878 if (*slot != HTAB_EMPTY_ENTRY)
8879 return *slot;
8880
8881 ref_p = XCNEW (struct external_ref);
8882 ref_p->type = die;
8883 *slot = ref_p;
8884 return ref_p;
8885 }
8886
8887 /* Subroutine of optimize_external_refs, below.
8888
8889 If we see a type skeleton, record it as our stub. If we see external
8890 references, remember how many we've seen. */
8891
8892 static void
8893 optimize_external_refs_1 (dw_die_ref die, external_ref_hash_type *map)
8894 {
8895 dw_die_ref c;
8896 dw_attr_node *a;
8897 unsigned ix;
8898 struct external_ref *ref_p;
8899
8900 if (is_type_die (die)
8901 && (c = get_AT_ref (die, DW_AT_signature)))
8902 {
8903 /* This is a local skeleton; use it for local references. */
8904 ref_p = lookup_external_ref (map, c);
8905 ref_p->stub = die;
8906 }
8907
8908 /* Scan the DIE references, and remember any that refer to DIEs from
8909 other CUs (i.e. those which are not marked). */
8910 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8911 if (AT_class (a) == dw_val_class_die_ref
8912 && (c = AT_ref (a))->die_mark == 0
8913 && is_type_die (c))
8914 {
8915 ref_p = lookup_external_ref (map, c);
8916 ref_p->n_refs++;
8917 }
8918
8919 FOR_EACH_CHILD (die, c, optimize_external_refs_1 (c, map));
8920 }
8921
8922 /* htab_traverse callback function for optimize_external_refs, below. SLOT
8923 points to an external_ref, DATA is the CU we're processing. If we don't
8924 already have a local stub, and we have multiple refs, build a stub. */
8925
8926 int
8927 dwarf2_build_local_stub (external_ref **slot, dw_die_ref data)
8928 {
8929 struct external_ref *ref_p = *slot;
8930
8931 if (ref_p->stub == NULL && ref_p->n_refs > 1 && !dwarf_strict)
8932 {
8933 /* We have multiple references to this type, so build a small stub.
8934 Both of these forms are a bit dodgy from the perspective of the
8935 DWARF standard, since technically they should have names. */
8936 dw_die_ref cu = data;
8937 dw_die_ref type = ref_p->type;
8938 dw_die_ref stub = NULL;
8939
8940 if (type->comdat_type_p)
8941 {
8942 /* If we refer to this type via sig8, use AT_signature. */
8943 stub = new_die (type->die_tag, cu, NULL_TREE);
8944 add_AT_die_ref (stub, DW_AT_signature, type);
8945 }
8946 else
8947 {
8948 /* Otherwise, use a typedef with no name. */
8949 stub = new_die (DW_TAG_typedef, cu, NULL_TREE);
8950 add_AT_die_ref (stub, DW_AT_type, type);
8951 }
8952
8953 stub->die_mark++;
8954 ref_p->stub = stub;
8955 }
8956 return 1;
8957 }
8958
8959 /* DIE is a unit; look through all the DIE references to see if there are
8960 any external references to types, and if so, create local stubs for
8961 them which will be applied in build_abbrev_table. This is useful because
8962 references to local DIEs are smaller. */
8963
8964 static external_ref_hash_type *
8965 optimize_external_refs (dw_die_ref die)
8966 {
8967 external_ref_hash_type *map = new external_ref_hash_type (10);
8968 optimize_external_refs_1 (die, map);
8969 map->traverse <dw_die_ref, dwarf2_build_local_stub> (die);
8970 return map;
8971 }
8972
8973 /* The following 3 variables are temporaries that are computed only during the
8974 build_abbrev_table call and used and released during the following
8975 optimize_abbrev_table call. */
8976
8977 /* First abbrev_id that can be optimized based on usage. */
8978 static unsigned int abbrev_opt_start;
8979
8980 /* Maximum abbrev_id of a base type plus one (we can't optimize DIEs with
8981 abbrev_id smaller than this, because they must be already sized
8982 during build_abbrev_table). */
8983 static unsigned int abbrev_opt_base_type_end;
8984
8985 /* Vector of usage counts during build_abbrev_table. Indexed by
8986 abbrev_id - abbrev_opt_start. */
8987 static vec<unsigned int> abbrev_usage_count;
8988
8989 /* Vector of all DIEs added with die_abbrev >= abbrev_opt_start. */
8990 static vec<dw_die_ref> sorted_abbrev_dies;
8991
8992 /* The format of each DIE (and its attribute value pairs) is encoded in an
8993 abbreviation table. This routine builds the abbreviation table and assigns
8994 a unique abbreviation id for each abbreviation entry. The children of each
8995 die are visited recursively. */
8996
8997 static void
8998 build_abbrev_table (dw_die_ref die, external_ref_hash_type *extern_map)
8999 {
9000 unsigned int abbrev_id = 0;
9001 dw_die_ref c;
9002 dw_attr_node *a;
9003 unsigned ix;
9004 dw_die_ref abbrev;
9005
9006 /* Scan the DIE references, and replace any that refer to
9007 DIEs from other CUs (i.e. those which are not marked) with
9008 the local stubs we built in optimize_external_refs. */
9009 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9010 if (AT_class (a) == dw_val_class_die_ref
9011 && (c = AT_ref (a))->die_mark == 0)
9012 {
9013 struct external_ref *ref_p;
9014 gcc_assert (AT_ref (a)->comdat_type_p || AT_ref (a)->die_id.die_symbol);
9015
9016 ref_p = lookup_external_ref (extern_map, c);
9017 if (ref_p->stub && ref_p->stub != die)
9018 change_AT_die_ref (a, ref_p->stub);
9019 else
9020 /* We aren't changing this reference, so mark it external. */
9021 set_AT_ref_external (a, 1);
9022 }
9023
9024 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
9025 {
9026 dw_attr_node *die_a, *abbrev_a;
9027 unsigned ix;
9028 bool ok = true;
9029
9030 if (abbrev_id == 0)
9031 continue;
9032 if (abbrev->die_tag != die->die_tag)
9033 continue;
9034 if ((abbrev->die_child != NULL) != (die->die_child != NULL))
9035 continue;
9036
9037 if (vec_safe_length (abbrev->die_attr) != vec_safe_length (die->die_attr))
9038 continue;
9039
9040 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, die_a)
9041 {
9042 abbrev_a = &(*abbrev->die_attr)[ix];
9043 if ((abbrev_a->dw_attr != die_a->dw_attr)
9044 || (value_format (abbrev_a) != value_format (die_a)))
9045 {
9046 ok = false;
9047 break;
9048 }
9049 }
9050 if (ok)
9051 break;
9052 }
9053
9054 if (abbrev_id >= vec_safe_length (abbrev_die_table))
9055 {
9056 vec_safe_push (abbrev_die_table, die);
9057 if (abbrev_opt_start)
9058 abbrev_usage_count.safe_push (0);
9059 }
9060 if (abbrev_opt_start && abbrev_id >= abbrev_opt_start)
9061 {
9062 abbrev_usage_count[abbrev_id - abbrev_opt_start]++;
9063 sorted_abbrev_dies.safe_push (die);
9064 }
9065
9066 die->die_abbrev = abbrev_id;
9067 FOR_EACH_CHILD (die, c, build_abbrev_table (c, extern_map));
9068 }
9069
9070 /* Callback function for sorted_abbrev_dies vector sorting. We sort
9071 by die_abbrev's usage count, from the most commonly used
9072 abbreviation to the least. */
9073
9074 static int
9075 die_abbrev_cmp (const void *p1, const void *p2)
9076 {
9077 dw_die_ref die1 = *(const dw_die_ref *) p1;
9078 dw_die_ref die2 = *(const dw_die_ref *) p2;
9079
9080 gcc_checking_assert (die1->die_abbrev >= abbrev_opt_start);
9081 gcc_checking_assert (die2->die_abbrev >= abbrev_opt_start);
9082
9083 if (die1->die_abbrev >= abbrev_opt_base_type_end
9084 && die2->die_abbrev >= abbrev_opt_base_type_end)
9085 {
9086 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9087 > abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9088 return -1;
9089 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9090 < abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9091 return 1;
9092 }
9093
9094 /* Stabilize the sort. */
9095 if (die1->die_abbrev < die2->die_abbrev)
9096 return -1;
9097 if (die1->die_abbrev > die2->die_abbrev)
9098 return 1;
9099
9100 return 0;
9101 }
9102
9103 /* Convert dw_val_class_const and dw_val_class_unsigned_const class attributes
9104 of DIEs in between sorted_abbrev_dies[first_id] and abbrev_dies[end_id - 1]
9105 into dw_val_class_const_implicit or
9106 dw_val_class_unsigned_const_implicit. */
9107
9108 static void
9109 optimize_implicit_const (unsigned int first_id, unsigned int end,
9110 vec<bool> &implicit_consts)
9111 {
9112 /* It never makes sense if there is just one DIE using the abbreviation. */
9113 if (end < first_id + 2)
9114 return;
9115
9116 dw_attr_node *a;
9117 unsigned ix, i;
9118 dw_die_ref die = sorted_abbrev_dies[first_id];
9119 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9120 if (implicit_consts[ix])
9121 {
9122 enum dw_val_class new_class = dw_val_class_none;
9123 switch (AT_class (a))
9124 {
9125 case dw_val_class_unsigned_const:
9126 if ((HOST_WIDE_INT) AT_unsigned (a) < 0)
9127 continue;
9128
9129 /* The .debug_abbrev section will grow by
9130 size_of_sleb128 (AT_unsigned (a)) and we avoid the constants
9131 in all the DIEs using that abbreviation. */
9132 if (constant_size (AT_unsigned (a)) * (end - first_id)
9133 <= (unsigned) size_of_sleb128 (AT_unsigned (a)))
9134 continue;
9135
9136 new_class = dw_val_class_unsigned_const_implicit;
9137 break;
9138
9139 case dw_val_class_const:
9140 new_class = dw_val_class_const_implicit;
9141 break;
9142
9143 case dw_val_class_file:
9144 new_class = dw_val_class_file_implicit;
9145 break;
9146
9147 default:
9148 continue;
9149 }
9150 for (i = first_id; i < end; i++)
9151 (*sorted_abbrev_dies[i]->die_attr)[ix].dw_attr_val.val_class
9152 = new_class;
9153 }
9154 }
9155
9156 /* Attempt to optimize abbreviation table from abbrev_opt_start
9157 abbreviation above. */
9158
9159 static void
9160 optimize_abbrev_table (void)
9161 {
9162 if (abbrev_opt_start
9163 && vec_safe_length (abbrev_die_table) > abbrev_opt_start
9164 && (dwarf_version >= 5 || vec_safe_length (abbrev_die_table) > 127))
9165 {
9166 auto_vec<bool, 32> implicit_consts;
9167 sorted_abbrev_dies.qsort (die_abbrev_cmp);
9168
9169 unsigned int abbrev_id = abbrev_opt_start - 1;
9170 unsigned int first_id = ~0U;
9171 unsigned int last_abbrev_id = 0;
9172 unsigned int i;
9173 dw_die_ref die;
9174 if (abbrev_opt_base_type_end > abbrev_opt_start)
9175 abbrev_id = abbrev_opt_base_type_end - 1;
9176 /* Reassign abbreviation ids from abbrev_opt_start above, so that
9177 most commonly used abbreviations come first. */
9178 FOR_EACH_VEC_ELT (sorted_abbrev_dies, i, die)
9179 {
9180 dw_attr_node *a;
9181 unsigned ix;
9182
9183 /* If calc_base_type_die_sizes has been called, the CU and
9184 base types after it can't be optimized, because we've already
9185 calculated their DIE offsets. We've sorted them first. */
9186 if (die->die_abbrev < abbrev_opt_base_type_end)
9187 continue;
9188 if (die->die_abbrev != last_abbrev_id)
9189 {
9190 last_abbrev_id = die->die_abbrev;
9191 if (dwarf_version >= 5 && first_id != ~0U)
9192 optimize_implicit_const (first_id, i, implicit_consts);
9193 abbrev_id++;
9194 (*abbrev_die_table)[abbrev_id] = die;
9195 if (dwarf_version >= 5)
9196 {
9197 first_id = i;
9198 implicit_consts.truncate (0);
9199
9200 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9201 switch (AT_class (a))
9202 {
9203 case dw_val_class_const:
9204 case dw_val_class_unsigned_const:
9205 case dw_val_class_file:
9206 implicit_consts.safe_push (true);
9207 break;
9208 default:
9209 implicit_consts.safe_push (false);
9210 break;
9211 }
9212 }
9213 }
9214 else if (dwarf_version >= 5)
9215 {
9216 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9217 if (!implicit_consts[ix])
9218 continue;
9219 else
9220 {
9221 dw_attr_node *other_a
9222 = &(*(*abbrev_die_table)[abbrev_id]->die_attr)[ix];
9223 if (!dw_val_equal_p (&a->dw_attr_val,
9224 &other_a->dw_attr_val))
9225 implicit_consts[ix] = false;
9226 }
9227 }
9228 die->die_abbrev = abbrev_id;
9229 }
9230 gcc_assert (abbrev_id == vec_safe_length (abbrev_die_table) - 1);
9231 if (dwarf_version >= 5 && first_id != ~0U)
9232 optimize_implicit_const (first_id, i, implicit_consts);
9233 }
9234
9235 abbrev_opt_start = 0;
9236 abbrev_opt_base_type_end = 0;
9237 abbrev_usage_count.release ();
9238 sorted_abbrev_dies.release ();
9239 }
9240 \f
9241 /* Return the power-of-two number of bytes necessary to represent VALUE. */
9242
9243 static int
9244 constant_size (unsigned HOST_WIDE_INT value)
9245 {
9246 int log;
9247
9248 if (value == 0)
9249 log = 0;
9250 else
9251 log = floor_log2 (value);
9252
9253 log = log / 8;
9254 log = 1 << (floor_log2 (log) + 1);
9255
9256 return log;
9257 }
9258
9259 /* Return the size of a DIE as it is represented in the
9260 .debug_info section. */
9261
9262 static unsigned long
9263 size_of_die (dw_die_ref die)
9264 {
9265 unsigned long size = 0;
9266 dw_attr_node *a;
9267 unsigned ix;
9268 enum dwarf_form form;
9269
9270 size += size_of_uleb128 (die->die_abbrev);
9271 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9272 {
9273 switch (AT_class (a))
9274 {
9275 case dw_val_class_addr:
9276 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9277 {
9278 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9279 size += size_of_uleb128 (AT_index (a));
9280 }
9281 else
9282 size += DWARF2_ADDR_SIZE;
9283 break;
9284 case dw_val_class_offset:
9285 size += DWARF_OFFSET_SIZE;
9286 break;
9287 case dw_val_class_loc:
9288 {
9289 unsigned long lsize = size_of_locs (AT_loc (a));
9290
9291 /* Block length. */
9292 if (dwarf_version >= 4)
9293 size += size_of_uleb128 (lsize);
9294 else
9295 size += constant_size (lsize);
9296 size += lsize;
9297 }
9298 break;
9299 case dw_val_class_loc_list:
9300 case dw_val_class_view_list:
9301 if (dwarf_split_debug_info && dwarf_version >= 5)
9302 {
9303 gcc_assert (AT_loc_list (a)->num_assigned);
9304 size += size_of_uleb128 (AT_loc_list (a)->hash);
9305 }
9306 else
9307 size += DWARF_OFFSET_SIZE;
9308 break;
9309 case dw_val_class_range_list:
9310 if (value_format (a) == DW_FORM_rnglistx)
9311 {
9312 gcc_assert (rnglist_idx);
9313 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
9314 size += size_of_uleb128 (r->idx);
9315 }
9316 else
9317 size += DWARF_OFFSET_SIZE;
9318 break;
9319 case dw_val_class_const:
9320 size += size_of_sleb128 (AT_int (a));
9321 break;
9322 case dw_val_class_unsigned_const:
9323 {
9324 int csize = constant_size (AT_unsigned (a));
9325 if (dwarf_version == 3
9326 && a->dw_attr == DW_AT_data_member_location
9327 && csize >= 4)
9328 size += size_of_uleb128 (AT_unsigned (a));
9329 else
9330 size += csize;
9331 }
9332 break;
9333 case dw_val_class_symview:
9334 if (symview_upper_bound <= 0xff)
9335 size += 1;
9336 else if (symview_upper_bound <= 0xffff)
9337 size += 2;
9338 else if (symview_upper_bound <= 0xffffffff)
9339 size += 4;
9340 else
9341 size += 8;
9342 break;
9343 case dw_val_class_const_implicit:
9344 case dw_val_class_unsigned_const_implicit:
9345 case dw_val_class_file_implicit:
9346 /* These occupy no size in the DIE, just an extra sleb128 in
9347 .debug_abbrev. */
9348 break;
9349 case dw_val_class_const_double:
9350 size += HOST_BITS_PER_DOUBLE_INT / HOST_BITS_PER_CHAR;
9351 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
9352 size++; /* block */
9353 break;
9354 case dw_val_class_wide_int:
9355 size += (get_full_len (*a->dw_attr_val.v.val_wide)
9356 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
9357 if (get_full_len (*a->dw_attr_val.v.val_wide)
9358 * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
9359 size++; /* block */
9360 break;
9361 case dw_val_class_vec:
9362 size += constant_size (a->dw_attr_val.v.val_vec.length
9363 * a->dw_attr_val.v.val_vec.elt_size)
9364 + a->dw_attr_val.v.val_vec.length
9365 * a->dw_attr_val.v.val_vec.elt_size; /* block */
9366 break;
9367 case dw_val_class_flag:
9368 if (dwarf_version >= 4)
9369 /* Currently all add_AT_flag calls pass in 1 as last argument,
9370 so DW_FORM_flag_present can be used. If that ever changes,
9371 we'll need to use DW_FORM_flag and have some optimization
9372 in build_abbrev_table that will change those to
9373 DW_FORM_flag_present if it is set to 1 in all DIEs using
9374 the same abbrev entry. */
9375 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9376 else
9377 size += 1;
9378 break;
9379 case dw_val_class_die_ref:
9380 if (AT_ref_external (a))
9381 {
9382 /* In DWARF4, we use DW_FORM_ref_sig8; for earlier versions
9383 we use DW_FORM_ref_addr. In DWARF2, DW_FORM_ref_addr
9384 is sized by target address length, whereas in DWARF3
9385 it's always sized as an offset. */
9386 if (use_debug_types)
9387 size += DWARF_TYPE_SIGNATURE_SIZE;
9388 else if (dwarf_version == 2)
9389 size += DWARF2_ADDR_SIZE;
9390 else
9391 size += DWARF_OFFSET_SIZE;
9392 }
9393 else
9394 size += DWARF_OFFSET_SIZE;
9395 break;
9396 case dw_val_class_fde_ref:
9397 size += DWARF_OFFSET_SIZE;
9398 break;
9399 case dw_val_class_lbl_id:
9400 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9401 {
9402 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9403 size += size_of_uleb128 (AT_index (a));
9404 }
9405 else
9406 size += DWARF2_ADDR_SIZE;
9407 break;
9408 case dw_val_class_lineptr:
9409 case dw_val_class_macptr:
9410 case dw_val_class_loclistsptr:
9411 size += DWARF_OFFSET_SIZE;
9412 break;
9413 case dw_val_class_str:
9414 form = AT_string_form (a);
9415 if (form == DW_FORM_strp || form == DW_FORM_line_strp)
9416 size += DWARF_OFFSET_SIZE;
9417 else if (form == dwarf_FORM (DW_FORM_strx))
9418 size += size_of_uleb128 (AT_index (a));
9419 else
9420 size += strlen (a->dw_attr_val.v.val_str->str) + 1;
9421 break;
9422 case dw_val_class_file:
9423 size += constant_size (maybe_emit_file (a->dw_attr_val.v.val_file));
9424 break;
9425 case dw_val_class_data8:
9426 size += 8;
9427 break;
9428 case dw_val_class_vms_delta:
9429 size += DWARF_OFFSET_SIZE;
9430 break;
9431 case dw_val_class_high_pc:
9432 size += DWARF2_ADDR_SIZE;
9433 break;
9434 case dw_val_class_discr_value:
9435 size += size_of_discr_value (&a->dw_attr_val.v.val_discr_value);
9436 break;
9437 case dw_val_class_discr_list:
9438 {
9439 unsigned block_size = size_of_discr_list (AT_discr_list (a));
9440
9441 /* This is a block, so we have the block length and then its
9442 data. */
9443 size += constant_size (block_size) + block_size;
9444 }
9445 break;
9446 default:
9447 gcc_unreachable ();
9448 }
9449 }
9450
9451 return size;
9452 }
9453
9454 /* Size the debugging information associated with a given DIE. Visits the
9455 DIE's children recursively. Updates the global variable next_die_offset, on
9456 each time through. Uses the current value of next_die_offset to update the
9457 die_offset field in each DIE. */
9458
9459 static void
9460 calc_die_sizes (dw_die_ref die)
9461 {
9462 dw_die_ref c;
9463
9464 gcc_assert (die->die_offset == 0
9465 || (unsigned long int) die->die_offset == next_die_offset);
9466 die->die_offset = next_die_offset;
9467 next_die_offset += size_of_die (die);
9468
9469 FOR_EACH_CHILD (die, c, calc_die_sizes (c));
9470
9471 if (die->die_child != NULL)
9472 /* Count the null byte used to terminate sibling lists. */
9473 next_die_offset += 1;
9474 }
9475
9476 /* Size just the base type children at the start of the CU.
9477 This is needed because build_abbrev needs to size locs
9478 and sizing of type based stack ops needs to know die_offset
9479 values for the base types. */
9480
9481 static void
9482 calc_base_type_die_sizes (void)
9483 {
9484 unsigned long die_offset = (dwarf_split_debug_info
9485 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
9486 : DWARF_COMPILE_UNIT_HEADER_SIZE);
9487 unsigned int i;
9488 dw_die_ref base_type;
9489 #if ENABLE_ASSERT_CHECKING
9490 dw_die_ref prev = comp_unit_die ()->die_child;
9491 #endif
9492
9493 die_offset += size_of_die (comp_unit_die ());
9494 for (i = 0; base_types.iterate (i, &base_type); i++)
9495 {
9496 #if ENABLE_ASSERT_CHECKING
9497 gcc_assert (base_type->die_offset == 0
9498 && prev->die_sib == base_type
9499 && base_type->die_child == NULL
9500 && base_type->die_abbrev);
9501 prev = base_type;
9502 #endif
9503 if (abbrev_opt_start
9504 && base_type->die_abbrev >= abbrev_opt_base_type_end)
9505 abbrev_opt_base_type_end = base_type->die_abbrev + 1;
9506 base_type->die_offset = die_offset;
9507 die_offset += size_of_die (base_type);
9508 }
9509 }
9510
9511 /* Set the marks for a die and its children. We do this so
9512 that we know whether or not a reference needs to use FORM_ref_addr; only
9513 DIEs in the same CU will be marked. We used to clear out the offset
9514 and use that as the flag, but ran into ordering problems. */
9515
9516 static void
9517 mark_dies (dw_die_ref die)
9518 {
9519 dw_die_ref c;
9520
9521 gcc_assert (!die->die_mark);
9522
9523 die->die_mark = 1;
9524 FOR_EACH_CHILD (die, c, mark_dies (c));
9525 }
9526
9527 /* Clear the marks for a die and its children. */
9528
9529 static void
9530 unmark_dies (dw_die_ref die)
9531 {
9532 dw_die_ref c;
9533
9534 if (! use_debug_types)
9535 gcc_assert (die->die_mark);
9536
9537 die->die_mark = 0;
9538 FOR_EACH_CHILD (die, c, unmark_dies (c));
9539 }
9540
9541 /* Clear the marks for a die, its children and referred dies. */
9542
9543 static void
9544 unmark_all_dies (dw_die_ref die)
9545 {
9546 dw_die_ref c;
9547 dw_attr_node *a;
9548 unsigned ix;
9549
9550 if (!die->die_mark)
9551 return;
9552 die->die_mark = 0;
9553
9554 FOR_EACH_CHILD (die, c, unmark_all_dies (c));
9555
9556 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9557 if (AT_class (a) == dw_val_class_die_ref)
9558 unmark_all_dies (AT_ref (a));
9559 }
9560
9561 /* Calculate if the entry should appear in the final output file. It may be
9562 from a pruned a type. */
9563
9564 static bool
9565 include_pubname_in_output (vec<pubname_entry, va_gc> *table, pubname_entry *p)
9566 {
9567 /* By limiting gnu pubnames to definitions only, gold can generate a
9568 gdb index without entries for declarations, which don't include
9569 enough information to be useful. */
9570 if (debug_generate_pub_sections == 2 && is_declaration_die (p->die))
9571 return false;
9572
9573 if (table == pubname_table)
9574 {
9575 /* Enumerator names are part of the pubname table, but the
9576 parent DW_TAG_enumeration_type die may have been pruned.
9577 Don't output them if that is the case. */
9578 if (p->die->die_tag == DW_TAG_enumerator &&
9579 (p->die->die_parent == NULL
9580 || !p->die->die_parent->die_perennial_p))
9581 return false;
9582
9583 /* Everything else in the pubname table is included. */
9584 return true;
9585 }
9586
9587 /* The pubtypes table shouldn't include types that have been
9588 pruned. */
9589 return (p->die->die_offset != 0
9590 || !flag_eliminate_unused_debug_types);
9591 }
9592
9593 /* Return the size of the .debug_pubnames or .debug_pubtypes table
9594 generated for the compilation unit. */
9595
9596 static unsigned long
9597 size_of_pubnames (vec<pubname_entry, va_gc> *names)
9598 {
9599 unsigned long size;
9600 unsigned i;
9601 pubname_entry *p;
9602 int space_for_flags = (debug_generate_pub_sections == 2) ? 1 : 0;
9603
9604 size = DWARF_PUBNAMES_HEADER_SIZE;
9605 FOR_EACH_VEC_ELT (*names, i, p)
9606 if (include_pubname_in_output (names, p))
9607 size += strlen (p->name) + DWARF_OFFSET_SIZE + 1 + space_for_flags;
9608
9609 size += DWARF_OFFSET_SIZE;
9610 return size;
9611 }
9612
9613 /* Return the size of the information in the .debug_aranges section. */
9614
9615 static unsigned long
9616 size_of_aranges (void)
9617 {
9618 unsigned long size;
9619
9620 size = DWARF_ARANGES_HEADER_SIZE;
9621
9622 /* Count the address/length pair for this compilation unit. */
9623 if (text_section_used)
9624 size += 2 * DWARF2_ADDR_SIZE;
9625 if (cold_text_section_used)
9626 size += 2 * DWARF2_ADDR_SIZE;
9627 if (have_multiple_function_sections)
9628 {
9629 unsigned fde_idx;
9630 dw_fde_ref fde;
9631
9632 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
9633 {
9634 if (DECL_IGNORED_P (fde->decl))
9635 continue;
9636 if (!fde->in_std_section)
9637 size += 2 * DWARF2_ADDR_SIZE;
9638 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
9639 size += 2 * DWARF2_ADDR_SIZE;
9640 }
9641 }
9642
9643 /* Count the two zero words used to terminated the address range table. */
9644 size += 2 * DWARF2_ADDR_SIZE;
9645 return size;
9646 }
9647 \f
9648 /* Select the encoding of an attribute value. */
9649
9650 static enum dwarf_form
9651 value_format (dw_attr_node *a)
9652 {
9653 switch (AT_class (a))
9654 {
9655 case dw_val_class_addr:
9656 /* Only very few attributes allow DW_FORM_addr. */
9657 switch (a->dw_attr)
9658 {
9659 case DW_AT_low_pc:
9660 case DW_AT_high_pc:
9661 case DW_AT_entry_pc:
9662 case DW_AT_trampoline:
9663 return (AT_index (a) == NOT_INDEXED
9664 ? DW_FORM_addr : dwarf_FORM (DW_FORM_addrx));
9665 default:
9666 break;
9667 }
9668 switch (DWARF2_ADDR_SIZE)
9669 {
9670 case 1:
9671 return DW_FORM_data1;
9672 case 2:
9673 return DW_FORM_data2;
9674 case 4:
9675 return DW_FORM_data4;
9676 case 8:
9677 return DW_FORM_data8;
9678 default:
9679 gcc_unreachable ();
9680 }
9681 case dw_val_class_loc_list:
9682 case dw_val_class_view_list:
9683 if (dwarf_split_debug_info
9684 && dwarf_version >= 5
9685 && AT_loc_list (a)->num_assigned)
9686 return DW_FORM_loclistx;
9687 /* FALLTHRU */
9688 case dw_val_class_range_list:
9689 /* For range lists in DWARF 5, use DW_FORM_rnglistx from .debug_info.dwo
9690 but in .debug_info use DW_FORM_sec_offset, which is shorter if we
9691 care about sizes of .debug* sections in shared libraries and
9692 executables and don't take into account relocations that affect just
9693 relocatable objects - for DW_FORM_rnglistx we'd have to emit offset
9694 table in the .debug_rnglists section. */
9695 if (dwarf_split_debug_info
9696 && dwarf_version >= 5
9697 && AT_class (a) == dw_val_class_range_list
9698 && rnglist_idx
9699 && a->dw_attr_val.val_entry != RELOCATED_OFFSET)
9700 return DW_FORM_rnglistx;
9701 if (dwarf_version >= 4)
9702 return DW_FORM_sec_offset;
9703 /* FALLTHRU */
9704 case dw_val_class_vms_delta:
9705 case dw_val_class_offset:
9706 switch (DWARF_OFFSET_SIZE)
9707 {
9708 case 4:
9709 return DW_FORM_data4;
9710 case 8:
9711 return DW_FORM_data8;
9712 default:
9713 gcc_unreachable ();
9714 }
9715 case dw_val_class_loc:
9716 if (dwarf_version >= 4)
9717 return DW_FORM_exprloc;
9718 switch (constant_size (size_of_locs (AT_loc (a))))
9719 {
9720 case 1:
9721 return DW_FORM_block1;
9722 case 2:
9723 return DW_FORM_block2;
9724 case 4:
9725 return DW_FORM_block4;
9726 default:
9727 gcc_unreachable ();
9728 }
9729 case dw_val_class_const:
9730 return DW_FORM_sdata;
9731 case dw_val_class_unsigned_const:
9732 switch (constant_size (AT_unsigned (a)))
9733 {
9734 case 1:
9735 return DW_FORM_data1;
9736 case 2:
9737 return DW_FORM_data2;
9738 case 4:
9739 /* In DWARF3 DW_AT_data_member_location with
9740 DW_FORM_data4 or DW_FORM_data8 is a loclistptr, not
9741 constant, so we need to use DW_FORM_udata if we need
9742 a large constant. */
9743 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9744 return DW_FORM_udata;
9745 return DW_FORM_data4;
9746 case 8:
9747 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9748 return DW_FORM_udata;
9749 return DW_FORM_data8;
9750 default:
9751 gcc_unreachable ();
9752 }
9753 case dw_val_class_const_implicit:
9754 case dw_val_class_unsigned_const_implicit:
9755 case dw_val_class_file_implicit:
9756 return DW_FORM_implicit_const;
9757 case dw_val_class_const_double:
9758 switch (HOST_BITS_PER_WIDE_INT)
9759 {
9760 case 8:
9761 return DW_FORM_data2;
9762 case 16:
9763 return DW_FORM_data4;
9764 case 32:
9765 return DW_FORM_data8;
9766 case 64:
9767 if (dwarf_version >= 5)
9768 return DW_FORM_data16;
9769 /* FALLTHRU */
9770 default:
9771 return DW_FORM_block1;
9772 }
9773 case dw_val_class_wide_int:
9774 switch (get_full_len (*a->dw_attr_val.v.val_wide) * HOST_BITS_PER_WIDE_INT)
9775 {
9776 case 8:
9777 return DW_FORM_data1;
9778 case 16:
9779 return DW_FORM_data2;
9780 case 32:
9781 return DW_FORM_data4;
9782 case 64:
9783 return DW_FORM_data8;
9784 case 128:
9785 if (dwarf_version >= 5)
9786 return DW_FORM_data16;
9787 /* FALLTHRU */
9788 default:
9789 return DW_FORM_block1;
9790 }
9791 case dw_val_class_symview:
9792 /* ??? We might use uleb128, but then we'd have to compute
9793 .debug_info offsets in the assembler. */
9794 if (symview_upper_bound <= 0xff)
9795 return DW_FORM_data1;
9796 else if (symview_upper_bound <= 0xffff)
9797 return DW_FORM_data2;
9798 else if (symview_upper_bound <= 0xffffffff)
9799 return DW_FORM_data4;
9800 else
9801 return DW_FORM_data8;
9802 case dw_val_class_vec:
9803 switch (constant_size (a->dw_attr_val.v.val_vec.length
9804 * a->dw_attr_val.v.val_vec.elt_size))
9805 {
9806 case 1:
9807 return DW_FORM_block1;
9808 case 2:
9809 return DW_FORM_block2;
9810 case 4:
9811 return DW_FORM_block4;
9812 default:
9813 gcc_unreachable ();
9814 }
9815 case dw_val_class_flag:
9816 if (dwarf_version >= 4)
9817 {
9818 /* Currently all add_AT_flag calls pass in 1 as last argument,
9819 so DW_FORM_flag_present can be used. If that ever changes,
9820 we'll need to use DW_FORM_flag and have some optimization
9821 in build_abbrev_table that will change those to
9822 DW_FORM_flag_present if it is set to 1 in all DIEs using
9823 the same abbrev entry. */
9824 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9825 return DW_FORM_flag_present;
9826 }
9827 return DW_FORM_flag;
9828 case dw_val_class_die_ref:
9829 if (AT_ref_external (a))
9830 return use_debug_types ? DW_FORM_ref_sig8 : DW_FORM_ref_addr;
9831 else
9832 return DW_FORM_ref;
9833 case dw_val_class_fde_ref:
9834 return DW_FORM_data;
9835 case dw_val_class_lbl_id:
9836 return (AT_index (a) == NOT_INDEXED
9837 ? DW_FORM_addr : dwarf_FORM (DW_FORM_addrx));
9838 case dw_val_class_lineptr:
9839 case dw_val_class_macptr:
9840 case dw_val_class_loclistsptr:
9841 return dwarf_version >= 4 ? DW_FORM_sec_offset : DW_FORM_data;
9842 case dw_val_class_str:
9843 return AT_string_form (a);
9844 case dw_val_class_file:
9845 switch (constant_size (maybe_emit_file (a->dw_attr_val.v.val_file)))
9846 {
9847 case 1:
9848 return DW_FORM_data1;
9849 case 2:
9850 return DW_FORM_data2;
9851 case 4:
9852 return DW_FORM_data4;
9853 default:
9854 gcc_unreachable ();
9855 }
9856
9857 case dw_val_class_data8:
9858 return DW_FORM_data8;
9859
9860 case dw_val_class_high_pc:
9861 switch (DWARF2_ADDR_SIZE)
9862 {
9863 case 1:
9864 return DW_FORM_data1;
9865 case 2:
9866 return DW_FORM_data2;
9867 case 4:
9868 return DW_FORM_data4;
9869 case 8:
9870 return DW_FORM_data8;
9871 default:
9872 gcc_unreachable ();
9873 }
9874
9875 case dw_val_class_discr_value:
9876 return (a->dw_attr_val.v.val_discr_value.pos
9877 ? DW_FORM_udata
9878 : DW_FORM_sdata);
9879 case dw_val_class_discr_list:
9880 switch (constant_size (size_of_discr_list (AT_discr_list (a))))
9881 {
9882 case 1:
9883 return DW_FORM_block1;
9884 case 2:
9885 return DW_FORM_block2;
9886 case 4:
9887 return DW_FORM_block4;
9888 default:
9889 gcc_unreachable ();
9890 }
9891
9892 default:
9893 gcc_unreachable ();
9894 }
9895 }
9896
9897 /* Output the encoding of an attribute value. */
9898
9899 static void
9900 output_value_format (dw_attr_node *a)
9901 {
9902 enum dwarf_form form = value_format (a);
9903
9904 dw2_asm_output_data_uleb128 (form, "(%s)", dwarf_form_name (form));
9905 }
9906
9907 /* Given a die and id, produce the appropriate abbreviations. */
9908
9909 static void
9910 output_die_abbrevs (unsigned long abbrev_id, dw_die_ref abbrev)
9911 {
9912 unsigned ix;
9913 dw_attr_node *a_attr;
9914
9915 dw2_asm_output_data_uleb128 (abbrev_id, "(abbrev code)");
9916 dw2_asm_output_data_uleb128 (abbrev->die_tag, "(TAG: %s)",
9917 dwarf_tag_name (abbrev->die_tag));
9918
9919 if (abbrev->die_child != NULL)
9920 dw2_asm_output_data (1, DW_children_yes, "DW_children_yes");
9921 else
9922 dw2_asm_output_data (1, DW_children_no, "DW_children_no");
9923
9924 for (ix = 0; vec_safe_iterate (abbrev->die_attr, ix, &a_attr); ix++)
9925 {
9926 dw2_asm_output_data_uleb128 (a_attr->dw_attr, "(%s)",
9927 dwarf_attr_name (a_attr->dw_attr));
9928 output_value_format (a_attr);
9929 if (value_format (a_attr) == DW_FORM_implicit_const)
9930 {
9931 if (AT_class (a_attr) == dw_val_class_file_implicit)
9932 {
9933 int f = maybe_emit_file (a_attr->dw_attr_val.v.val_file);
9934 const char *filename = a_attr->dw_attr_val.v.val_file->filename;
9935 dw2_asm_output_data_sleb128 (f, "(%s)", filename);
9936 }
9937 else
9938 dw2_asm_output_data_sleb128 (a_attr->dw_attr_val.v.val_int, NULL);
9939 }
9940 }
9941
9942 dw2_asm_output_data (1, 0, NULL);
9943 dw2_asm_output_data (1, 0, NULL);
9944 }
9945
9946
9947 /* Output the .debug_abbrev section which defines the DIE abbreviation
9948 table. */
9949
9950 static void
9951 output_abbrev_section (void)
9952 {
9953 unsigned int abbrev_id;
9954 dw_die_ref abbrev;
9955
9956 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
9957 if (abbrev_id != 0)
9958 output_die_abbrevs (abbrev_id, abbrev);
9959
9960 /* Terminate the table. */
9961 dw2_asm_output_data (1, 0, NULL);
9962 }
9963
9964 /* Return a new location list, given the begin and end range, and the
9965 expression. */
9966
9967 static inline dw_loc_list_ref
9968 new_loc_list (dw_loc_descr_ref expr, const char *begin, var_loc_view vbegin,
9969 const char *end, var_loc_view vend,
9970 const char *section)
9971 {
9972 dw_loc_list_ref retlist = ggc_cleared_alloc<dw_loc_list_node> ();
9973
9974 retlist->begin = begin;
9975 retlist->begin_entry = NULL;
9976 retlist->end = end;
9977 retlist->expr = expr;
9978 retlist->section = section;
9979 retlist->vbegin = vbegin;
9980 retlist->vend = vend;
9981
9982 return retlist;
9983 }
9984
9985 /* Return true iff there's any nonzero view number in the loc list.
9986
9987 ??? When views are not enabled, we'll often extend a single range
9988 to the entire function, so that we emit a single location
9989 expression rather than a location list. With views, even with a
9990 single range, we'll output a list if start or end have a nonzero
9991 view. If we change this, we may want to stop splitting a single
9992 range in dw_loc_list just because of a nonzero view, even if it
9993 straddles across hot/cold partitions. */
9994
9995 static bool
9996 loc_list_has_views (dw_loc_list_ref list)
9997 {
9998 if (!debug_variable_location_views)
9999 return false;
10000
10001 for (dw_loc_list_ref loc = list;
10002 loc != NULL; loc = loc->dw_loc_next)
10003 if (!ZERO_VIEW_P (loc->vbegin) || !ZERO_VIEW_P (loc->vend))
10004 return true;
10005
10006 return false;
10007 }
10008
10009 /* Generate a new internal symbol for this location list node, if it
10010 hasn't got one yet. */
10011
10012 static inline void
10013 gen_llsym (dw_loc_list_ref list)
10014 {
10015 gcc_assert (!list->ll_symbol);
10016 list->ll_symbol = gen_internal_sym ("LLST");
10017
10018 if (!loc_list_has_views (list))
10019 return;
10020
10021 if (dwarf2out_locviews_in_attribute ())
10022 {
10023 /* Use the same label_num for the view list. */
10024 label_num--;
10025 list->vl_symbol = gen_internal_sym ("LVUS");
10026 }
10027 else
10028 list->vl_symbol = list->ll_symbol;
10029 }
10030
10031 /* Generate a symbol for the list, but only if we really want to emit
10032 it as a list. */
10033
10034 static inline void
10035 maybe_gen_llsym (dw_loc_list_ref list)
10036 {
10037 if (!list || (!list->dw_loc_next && !loc_list_has_views (list)))
10038 return;
10039
10040 gen_llsym (list);
10041 }
10042
10043 /* Determine whether or not to skip loc_list entry CURR. If SIZEP is
10044 NULL, don't consider size of the location expression. If we're not
10045 to skip it, and SIZEP is non-null, store the size of CURR->expr's
10046 representation in *SIZEP. */
10047
10048 static bool
10049 skip_loc_list_entry (dw_loc_list_ref curr, unsigned long *sizep = NULL)
10050 {
10051 /* Don't output an entry that starts and ends at the same address. */
10052 if (strcmp (curr->begin, curr->end) == 0
10053 && curr->vbegin == curr->vend && !curr->force)
10054 return true;
10055
10056 if (!sizep)
10057 return false;
10058
10059 unsigned long size = size_of_locs (curr->expr);
10060
10061 /* If the expression is too large, drop it on the floor. We could
10062 perhaps put it into DW_TAG_dwarf_procedure and refer to that
10063 in the expression, but >= 64KB expressions for a single value
10064 in a single range are unlikely very useful. */
10065 if (dwarf_version < 5 && size > 0xffff)
10066 return true;
10067
10068 *sizep = size;
10069
10070 return false;
10071 }
10072
10073 /* Output a view pair loclist entry for CURR, if it requires one. */
10074
10075 static void
10076 dwarf2out_maybe_output_loclist_view_pair (dw_loc_list_ref curr)
10077 {
10078 if (!dwarf2out_locviews_in_loclist ())
10079 return;
10080
10081 if (ZERO_VIEW_P (curr->vbegin) && ZERO_VIEW_P (curr->vend))
10082 return;
10083
10084 #ifdef DW_LLE_view_pair
10085 dw2_asm_output_data (1, DW_LLE_view_pair, "DW_LLE_view_pair");
10086
10087 if (dwarf2out_as_locview_support)
10088 {
10089 if (ZERO_VIEW_P (curr->vbegin))
10090 dw2_asm_output_data_uleb128 (0, "Location view begin");
10091 else
10092 {
10093 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10094 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10095 dw2_asm_output_symname_uleb128 (label, "Location view begin");
10096 }
10097
10098 if (ZERO_VIEW_P (curr->vend))
10099 dw2_asm_output_data_uleb128 (0, "Location view end");
10100 else
10101 {
10102 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10103 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10104 dw2_asm_output_symname_uleb128 (label, "Location view end");
10105 }
10106 }
10107 else
10108 {
10109 dw2_asm_output_data_uleb128 (curr->vbegin, "Location view begin");
10110 dw2_asm_output_data_uleb128 (curr->vend, "Location view end");
10111 }
10112 #endif /* DW_LLE_view_pair */
10113
10114 return;
10115 }
10116
10117 /* Output the location list given to us. */
10118
10119 static void
10120 output_loc_list (dw_loc_list_ref list_head)
10121 {
10122 int vcount = 0, lcount = 0;
10123
10124 if (list_head->emitted)
10125 return;
10126 list_head->emitted = true;
10127
10128 if (list_head->vl_symbol && dwarf2out_locviews_in_attribute ())
10129 {
10130 ASM_OUTPUT_LABEL (asm_out_file, list_head->vl_symbol);
10131
10132 for (dw_loc_list_ref curr = list_head; curr != NULL;
10133 curr = curr->dw_loc_next)
10134 {
10135 unsigned long size;
10136
10137 if (skip_loc_list_entry (curr, &size))
10138 continue;
10139
10140 vcount++;
10141
10142 /* ?? dwarf_split_debug_info? */
10143 if (dwarf2out_as_locview_support)
10144 {
10145 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10146
10147 if (!ZERO_VIEW_P (curr->vbegin))
10148 {
10149 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10150 dw2_asm_output_symname_uleb128 (label,
10151 "View list begin (%s)",
10152 list_head->vl_symbol);
10153 }
10154 else
10155 dw2_asm_output_data_uleb128 (0,
10156 "View list begin (%s)",
10157 list_head->vl_symbol);
10158
10159 if (!ZERO_VIEW_P (curr->vend))
10160 {
10161 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10162 dw2_asm_output_symname_uleb128 (label,
10163 "View list end (%s)",
10164 list_head->vl_symbol);
10165 }
10166 else
10167 dw2_asm_output_data_uleb128 (0,
10168 "View list end (%s)",
10169 list_head->vl_symbol);
10170 }
10171 else
10172 {
10173 dw2_asm_output_data_uleb128 (curr->vbegin,
10174 "View list begin (%s)",
10175 list_head->vl_symbol);
10176 dw2_asm_output_data_uleb128 (curr->vend,
10177 "View list end (%s)",
10178 list_head->vl_symbol);
10179 }
10180 }
10181 }
10182
10183 ASM_OUTPUT_LABEL (asm_out_file, list_head->ll_symbol);
10184
10185 const char *last_section = NULL;
10186 const char *base_label = NULL;
10187
10188 /* Walk the location list, and output each range + expression. */
10189 for (dw_loc_list_ref curr = list_head; curr != NULL;
10190 curr = curr->dw_loc_next)
10191 {
10192 unsigned long size;
10193
10194 /* Skip this entry? If we skip it here, we must skip it in the
10195 view list above as well. */
10196 if (skip_loc_list_entry (curr, &size))
10197 continue;
10198
10199 lcount++;
10200
10201 if (dwarf_version >= 5)
10202 {
10203 if (dwarf_split_debug_info)
10204 {
10205 dwarf2out_maybe_output_loclist_view_pair (curr);
10206 /* For -gsplit-dwarf, emit DW_LLE_starx_length, which has
10207 uleb128 index into .debug_addr and uleb128 length. */
10208 dw2_asm_output_data (1, DW_LLE_startx_length,
10209 "DW_LLE_startx_length (%s)",
10210 list_head->ll_symbol);
10211 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10212 "Location list range start index "
10213 "(%s)", curr->begin);
10214 /* FIXME: This will ICE ifndef HAVE_AS_LEB128.
10215 For that case we probably need to emit DW_LLE_startx_endx,
10216 but we'd need 2 .debug_addr entries rather than just one. */
10217 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10218 "Location list length (%s)",
10219 list_head->ll_symbol);
10220 }
10221 else if (!have_multiple_function_sections && HAVE_AS_LEB128)
10222 {
10223 dwarf2out_maybe_output_loclist_view_pair (curr);
10224 /* If all code is in .text section, the base address is
10225 already provided by the CU attributes. Use
10226 DW_LLE_offset_pair where both addresses are uleb128 encoded
10227 offsets against that base. */
10228 dw2_asm_output_data (1, DW_LLE_offset_pair,
10229 "DW_LLE_offset_pair (%s)",
10230 list_head->ll_symbol);
10231 dw2_asm_output_delta_uleb128 (curr->begin, curr->section,
10232 "Location list begin address (%s)",
10233 list_head->ll_symbol);
10234 dw2_asm_output_delta_uleb128 (curr->end, curr->section,
10235 "Location list end address (%s)",
10236 list_head->ll_symbol);
10237 }
10238 else if (HAVE_AS_LEB128)
10239 {
10240 /* Otherwise, find out how many consecutive entries could share
10241 the same base entry. If just one, emit DW_LLE_start_length,
10242 otherwise emit DW_LLE_base_address for the base address
10243 followed by a series of DW_LLE_offset_pair. */
10244 if (last_section == NULL || curr->section != last_section)
10245 {
10246 dw_loc_list_ref curr2;
10247 for (curr2 = curr->dw_loc_next; curr2 != NULL;
10248 curr2 = curr2->dw_loc_next)
10249 {
10250 if (strcmp (curr2->begin, curr2->end) == 0
10251 && !curr2->force)
10252 continue;
10253 break;
10254 }
10255 if (curr2 == NULL || curr->section != curr2->section)
10256 last_section = NULL;
10257 else
10258 {
10259 last_section = curr->section;
10260 base_label = curr->begin;
10261 dw2_asm_output_data (1, DW_LLE_base_address,
10262 "DW_LLE_base_address (%s)",
10263 list_head->ll_symbol);
10264 dw2_asm_output_addr (DWARF2_ADDR_SIZE, base_label,
10265 "Base address (%s)",
10266 list_head->ll_symbol);
10267 }
10268 }
10269 /* Only one entry with the same base address. Use
10270 DW_LLE_start_length with absolute address and uleb128
10271 length. */
10272 if (last_section == NULL)
10273 {
10274 dwarf2out_maybe_output_loclist_view_pair (curr);
10275 dw2_asm_output_data (1, DW_LLE_start_length,
10276 "DW_LLE_start_length (%s)",
10277 list_head->ll_symbol);
10278 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10279 "Location list begin address (%s)",
10280 list_head->ll_symbol);
10281 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10282 "Location list length "
10283 "(%s)", list_head->ll_symbol);
10284 }
10285 /* Otherwise emit DW_LLE_offset_pair, relative to above emitted
10286 DW_LLE_base_address. */
10287 else
10288 {
10289 dwarf2out_maybe_output_loclist_view_pair (curr);
10290 dw2_asm_output_data (1, DW_LLE_offset_pair,
10291 "DW_LLE_offset_pair (%s)",
10292 list_head->ll_symbol);
10293 dw2_asm_output_delta_uleb128 (curr->begin, base_label,
10294 "Location list begin address "
10295 "(%s)", list_head->ll_symbol);
10296 dw2_asm_output_delta_uleb128 (curr->end, base_label,
10297 "Location list end address "
10298 "(%s)", list_head->ll_symbol);
10299 }
10300 }
10301 /* The assembler does not support .uleb128 directive. Emit
10302 DW_LLE_start_end with a pair of absolute addresses. */
10303 else
10304 {
10305 dwarf2out_maybe_output_loclist_view_pair (curr);
10306 dw2_asm_output_data (1, DW_LLE_start_end,
10307 "DW_LLE_start_end (%s)",
10308 list_head->ll_symbol);
10309 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10310 "Location list begin address (%s)",
10311 list_head->ll_symbol);
10312 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10313 "Location list end address (%s)",
10314 list_head->ll_symbol);
10315 }
10316 }
10317 else if (dwarf_split_debug_info)
10318 {
10319 /* For -gsplit-dwarf -gdwarf-{2,3,4} emit index into .debug_addr
10320 and 4 byte length. */
10321 dw2_asm_output_data (1, DW_LLE_GNU_start_length_entry,
10322 "Location list start/length entry (%s)",
10323 list_head->ll_symbol);
10324 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10325 "Location list range start index (%s)",
10326 curr->begin);
10327 /* The length field is 4 bytes. If we ever need to support
10328 an 8-byte length, we can add a new DW_LLE code or fall back
10329 to DW_LLE_GNU_start_end_entry. */
10330 dw2_asm_output_delta (4, curr->end, curr->begin,
10331 "Location list range length (%s)",
10332 list_head->ll_symbol);
10333 }
10334 else if (!have_multiple_function_sections)
10335 {
10336 /* Pair of relative addresses against start of text section. */
10337 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->begin, curr->section,
10338 "Location list begin address (%s)",
10339 list_head->ll_symbol);
10340 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->end, curr->section,
10341 "Location list end address (%s)",
10342 list_head->ll_symbol);
10343 }
10344 else
10345 {
10346 /* Pair of absolute addresses. */
10347 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10348 "Location list begin address (%s)",
10349 list_head->ll_symbol);
10350 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10351 "Location list end address (%s)",
10352 list_head->ll_symbol);
10353 }
10354
10355 /* Output the block length for this list of location operations. */
10356 if (dwarf_version >= 5)
10357 dw2_asm_output_data_uleb128 (size, "Location expression size");
10358 else
10359 {
10360 gcc_assert (size <= 0xffff);
10361 dw2_asm_output_data (2, size, "Location expression size");
10362 }
10363
10364 output_loc_sequence (curr->expr, -1);
10365 }
10366
10367 /* And finally list termination. */
10368 if (dwarf_version >= 5)
10369 dw2_asm_output_data (1, DW_LLE_end_of_list,
10370 "DW_LLE_end_of_list (%s)", list_head->ll_symbol);
10371 else if (dwarf_split_debug_info)
10372 dw2_asm_output_data (1, DW_LLE_GNU_end_of_list_entry,
10373 "Location list terminator (%s)",
10374 list_head->ll_symbol);
10375 else
10376 {
10377 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10378 "Location list terminator begin (%s)",
10379 list_head->ll_symbol);
10380 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10381 "Location list terminator end (%s)",
10382 list_head->ll_symbol);
10383 }
10384
10385 gcc_assert (!list_head->vl_symbol
10386 || vcount == lcount * (dwarf2out_locviews_in_attribute () ? 1 : 0));
10387 }
10388
10389 /* Output a range_list offset into the .debug_ranges or .debug_rnglists
10390 section. Emit a relocated reference if val_entry is NULL, otherwise,
10391 emit an indirect reference. */
10392
10393 static void
10394 output_range_list_offset (dw_attr_node *a)
10395 {
10396 const char *name = dwarf_attr_name (a->dw_attr);
10397
10398 if (a->dw_attr_val.val_entry == RELOCATED_OFFSET)
10399 {
10400 if (dwarf_version >= 5)
10401 {
10402 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10403 dw2_asm_output_offset (DWARF_OFFSET_SIZE, r->label,
10404 debug_ranges_section, "%s", name);
10405 }
10406 else
10407 {
10408 char *p = strchr (ranges_section_label, '\0');
10409 sprintf (p, "+" HOST_WIDE_INT_PRINT_HEX,
10410 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE);
10411 dw2_asm_output_offset (DWARF_OFFSET_SIZE, ranges_section_label,
10412 debug_ranges_section, "%s", name);
10413 *p = '\0';
10414 }
10415 }
10416 else if (dwarf_version >= 5)
10417 {
10418 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10419 gcc_assert (rnglist_idx);
10420 dw2_asm_output_data_uleb128 (r->idx, "%s", name);
10421 }
10422 else
10423 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10424 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE,
10425 "%s (offset from %s)", name, ranges_section_label);
10426 }
10427
10428 /* Output the offset into the debug_loc section. */
10429
10430 static void
10431 output_loc_list_offset (dw_attr_node *a)
10432 {
10433 char *sym = AT_loc_list (a)->ll_symbol;
10434
10435 gcc_assert (sym);
10436 if (!dwarf_split_debug_info)
10437 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10438 "%s", dwarf_attr_name (a->dw_attr));
10439 else if (dwarf_version >= 5)
10440 {
10441 gcc_assert (AT_loc_list (a)->num_assigned);
10442 dw2_asm_output_data_uleb128 (AT_loc_list (a)->hash, "%s (%s)",
10443 dwarf_attr_name (a->dw_attr),
10444 sym);
10445 }
10446 else
10447 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10448 "%s", dwarf_attr_name (a->dw_attr));
10449 }
10450
10451 /* Output the offset into the debug_loc section. */
10452
10453 static void
10454 output_view_list_offset (dw_attr_node *a)
10455 {
10456 char *sym = (*AT_loc_list_ptr (a))->vl_symbol;
10457
10458 gcc_assert (sym);
10459 if (dwarf_split_debug_info)
10460 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10461 "%s", dwarf_attr_name (a->dw_attr));
10462 else
10463 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10464 "%s", dwarf_attr_name (a->dw_attr));
10465 }
10466
10467 /* Output an attribute's index or value appropriately. */
10468
10469 static void
10470 output_attr_index_or_value (dw_attr_node *a)
10471 {
10472 const char *name = dwarf_attr_name (a->dw_attr);
10473
10474 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
10475 {
10476 dw2_asm_output_data_uleb128 (AT_index (a), "%s", name);
10477 return;
10478 }
10479 switch (AT_class (a))
10480 {
10481 case dw_val_class_addr:
10482 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, AT_addr (a), "%s", name);
10483 break;
10484 case dw_val_class_high_pc:
10485 case dw_val_class_lbl_id:
10486 dw2_asm_output_addr (DWARF2_ADDR_SIZE, AT_lbl (a), "%s", name);
10487 break;
10488 default:
10489 gcc_unreachable ();
10490 }
10491 }
10492
10493 /* Output a type signature. */
10494
10495 static inline void
10496 output_signature (const char *sig, const char *name)
10497 {
10498 int i;
10499
10500 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
10501 dw2_asm_output_data (1, sig[i], i == 0 ? "%s" : NULL, name);
10502 }
10503
10504 /* Output a discriminant value. */
10505
10506 static inline void
10507 output_discr_value (dw_discr_value *discr_value, const char *name)
10508 {
10509 if (discr_value->pos)
10510 dw2_asm_output_data_uleb128 (discr_value->v.uval, "%s", name);
10511 else
10512 dw2_asm_output_data_sleb128 (discr_value->v.sval, "%s", name);
10513 }
10514
10515 /* Output the DIE and its attributes. Called recursively to generate
10516 the definitions of each child DIE. */
10517
10518 static void
10519 output_die (dw_die_ref die)
10520 {
10521 dw_attr_node *a;
10522 dw_die_ref c;
10523 unsigned long size;
10524 unsigned ix;
10525
10526 dw2_asm_output_data_uleb128 (die->die_abbrev, "(DIE (%#lx) %s)",
10527 (unsigned long)die->die_offset,
10528 dwarf_tag_name (die->die_tag));
10529
10530 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
10531 {
10532 const char *name = dwarf_attr_name (a->dw_attr);
10533
10534 switch (AT_class (a))
10535 {
10536 case dw_val_class_addr:
10537 output_attr_index_or_value (a);
10538 break;
10539
10540 case dw_val_class_offset:
10541 dw2_asm_output_data (DWARF_OFFSET_SIZE, a->dw_attr_val.v.val_offset,
10542 "%s", name);
10543 break;
10544
10545 case dw_val_class_range_list:
10546 output_range_list_offset (a);
10547 break;
10548
10549 case dw_val_class_loc:
10550 size = size_of_locs (AT_loc (a));
10551
10552 /* Output the block length for this list of location operations. */
10553 if (dwarf_version >= 4)
10554 dw2_asm_output_data_uleb128 (size, "%s", name);
10555 else
10556 dw2_asm_output_data (constant_size (size), size, "%s", name);
10557
10558 output_loc_sequence (AT_loc (a), -1);
10559 break;
10560
10561 case dw_val_class_const:
10562 /* ??? It would be slightly more efficient to use a scheme like is
10563 used for unsigned constants below, but gdb 4.x does not sign
10564 extend. Gdb 5.x does sign extend. */
10565 dw2_asm_output_data_sleb128 (AT_int (a), "%s", name);
10566 break;
10567
10568 case dw_val_class_unsigned_const:
10569 {
10570 int csize = constant_size (AT_unsigned (a));
10571 if (dwarf_version == 3
10572 && a->dw_attr == DW_AT_data_member_location
10573 && csize >= 4)
10574 dw2_asm_output_data_uleb128 (AT_unsigned (a), "%s", name);
10575 else
10576 dw2_asm_output_data (csize, AT_unsigned (a), "%s", name);
10577 }
10578 break;
10579
10580 case dw_val_class_symview:
10581 {
10582 int vsize;
10583 if (symview_upper_bound <= 0xff)
10584 vsize = 1;
10585 else if (symview_upper_bound <= 0xffff)
10586 vsize = 2;
10587 else if (symview_upper_bound <= 0xffffffff)
10588 vsize = 4;
10589 else
10590 vsize = 8;
10591 dw2_asm_output_addr (vsize, a->dw_attr_val.v.val_symbolic_view,
10592 "%s", name);
10593 }
10594 break;
10595
10596 case dw_val_class_const_implicit:
10597 if (flag_debug_asm)
10598 fprintf (asm_out_file, "\t\t\t%s %s ("
10599 HOST_WIDE_INT_PRINT_DEC ")\n",
10600 ASM_COMMENT_START, name, AT_int (a));
10601 break;
10602
10603 case dw_val_class_unsigned_const_implicit:
10604 if (flag_debug_asm)
10605 fprintf (asm_out_file, "\t\t\t%s %s ("
10606 HOST_WIDE_INT_PRINT_HEX ")\n",
10607 ASM_COMMENT_START, name, AT_unsigned (a));
10608 break;
10609
10610 case dw_val_class_const_double:
10611 {
10612 unsigned HOST_WIDE_INT first, second;
10613
10614 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
10615 dw2_asm_output_data (1,
10616 HOST_BITS_PER_DOUBLE_INT
10617 / HOST_BITS_PER_CHAR,
10618 NULL);
10619
10620 if (WORDS_BIG_ENDIAN)
10621 {
10622 first = a->dw_attr_val.v.val_double.high;
10623 second = a->dw_attr_val.v.val_double.low;
10624 }
10625 else
10626 {
10627 first = a->dw_attr_val.v.val_double.low;
10628 second = a->dw_attr_val.v.val_double.high;
10629 }
10630
10631 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10632 first, "%s", name);
10633 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10634 second, NULL);
10635 }
10636 break;
10637
10638 case dw_val_class_wide_int:
10639 {
10640 int i;
10641 int len = get_full_len (*a->dw_attr_val.v.val_wide);
10642 int l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
10643 if (len * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
10644 dw2_asm_output_data (1, get_full_len (*a->dw_attr_val.v.val_wide)
10645 * l, NULL);
10646
10647 if (WORDS_BIG_ENDIAN)
10648 for (i = len - 1; i >= 0; --i)
10649 {
10650 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10651 "%s", name);
10652 name = "";
10653 }
10654 else
10655 for (i = 0; i < len; ++i)
10656 {
10657 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10658 "%s", name);
10659 name = "";
10660 }
10661 }
10662 break;
10663
10664 case dw_val_class_vec:
10665 {
10666 unsigned int elt_size = a->dw_attr_val.v.val_vec.elt_size;
10667 unsigned int len = a->dw_attr_val.v.val_vec.length;
10668 unsigned int i;
10669 unsigned char *p;
10670
10671 dw2_asm_output_data (constant_size (len * elt_size),
10672 len * elt_size, "%s", name);
10673 if (elt_size > sizeof (HOST_WIDE_INT))
10674 {
10675 elt_size /= 2;
10676 len *= 2;
10677 }
10678 for (i = 0, p = (unsigned char *) a->dw_attr_val.v.val_vec.array;
10679 i < len;
10680 i++, p += elt_size)
10681 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
10682 "fp or vector constant word %u", i);
10683 break;
10684 }
10685
10686 case dw_val_class_flag:
10687 if (dwarf_version >= 4)
10688 {
10689 /* Currently all add_AT_flag calls pass in 1 as last argument,
10690 so DW_FORM_flag_present can be used. If that ever changes,
10691 we'll need to use DW_FORM_flag and have some optimization
10692 in build_abbrev_table that will change those to
10693 DW_FORM_flag_present if it is set to 1 in all DIEs using
10694 the same abbrev entry. */
10695 gcc_assert (AT_flag (a) == 1);
10696 if (flag_debug_asm)
10697 fprintf (asm_out_file, "\t\t\t%s %s\n",
10698 ASM_COMMENT_START, name);
10699 break;
10700 }
10701 dw2_asm_output_data (1, AT_flag (a), "%s", name);
10702 break;
10703
10704 case dw_val_class_loc_list:
10705 output_loc_list_offset (a);
10706 break;
10707
10708 case dw_val_class_view_list:
10709 output_view_list_offset (a);
10710 break;
10711
10712 case dw_val_class_die_ref:
10713 if (AT_ref_external (a))
10714 {
10715 if (AT_ref (a)->comdat_type_p)
10716 {
10717 comdat_type_node *type_node
10718 = AT_ref (a)->die_id.die_type_node;
10719
10720 gcc_assert (type_node);
10721 output_signature (type_node->signature, name);
10722 }
10723 else
10724 {
10725 const char *sym = AT_ref (a)->die_id.die_symbol;
10726 int size;
10727
10728 gcc_assert (sym);
10729 /* In DWARF2, DW_FORM_ref_addr is sized by target address
10730 length, whereas in DWARF3 it's always sized as an
10731 offset. */
10732 if (dwarf_version == 2)
10733 size = DWARF2_ADDR_SIZE;
10734 else
10735 size = DWARF_OFFSET_SIZE;
10736 /* ??? We cannot unconditionally output die_offset if
10737 non-zero - others might create references to those
10738 DIEs via symbols.
10739 And we do not clear its DIE offset after outputting it
10740 (and the label refers to the actual DIEs, not the
10741 DWARF CU unit header which is when using label + offset
10742 would be the correct thing to do).
10743 ??? This is the reason for the with_offset flag. */
10744 if (AT_ref (a)->with_offset)
10745 dw2_asm_output_offset (size, sym, AT_ref (a)->die_offset,
10746 debug_info_section, "%s", name);
10747 else
10748 dw2_asm_output_offset (size, sym, debug_info_section, "%s",
10749 name);
10750 }
10751 }
10752 else
10753 {
10754 gcc_assert (AT_ref (a)->die_offset);
10755 dw2_asm_output_data (DWARF_OFFSET_SIZE, AT_ref (a)->die_offset,
10756 "%s", name);
10757 }
10758 break;
10759
10760 case dw_val_class_fde_ref:
10761 {
10762 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
10763
10764 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_LABEL,
10765 a->dw_attr_val.v.val_fde_index * 2);
10766 dw2_asm_output_offset (DWARF_OFFSET_SIZE, l1, debug_frame_section,
10767 "%s", name);
10768 }
10769 break;
10770
10771 case dw_val_class_vms_delta:
10772 #ifdef ASM_OUTPUT_DWARF_VMS_DELTA
10773 dw2_asm_output_vms_delta (DWARF_OFFSET_SIZE,
10774 AT_vms_delta2 (a), AT_vms_delta1 (a),
10775 "%s", name);
10776 #else
10777 dw2_asm_output_delta (DWARF_OFFSET_SIZE,
10778 AT_vms_delta2 (a), AT_vms_delta1 (a),
10779 "%s", name);
10780 #endif
10781 break;
10782
10783 case dw_val_class_lbl_id:
10784 output_attr_index_or_value (a);
10785 break;
10786
10787 case dw_val_class_lineptr:
10788 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10789 debug_line_section, "%s", name);
10790 break;
10791
10792 case dw_val_class_macptr:
10793 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10794 debug_macinfo_section, "%s", name);
10795 break;
10796
10797 case dw_val_class_loclistsptr:
10798 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10799 debug_loc_section, "%s", name);
10800 break;
10801
10802 case dw_val_class_str:
10803 if (a->dw_attr_val.v.val_str->form == DW_FORM_strp)
10804 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10805 a->dw_attr_val.v.val_str->label,
10806 debug_str_section,
10807 "%s: \"%s\"", name, AT_string (a));
10808 else if (a->dw_attr_val.v.val_str->form == DW_FORM_line_strp)
10809 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10810 a->dw_attr_val.v.val_str->label,
10811 debug_line_str_section,
10812 "%s: \"%s\"", name, AT_string (a));
10813 else if (a->dw_attr_val.v.val_str->form == dwarf_FORM (DW_FORM_strx))
10814 dw2_asm_output_data_uleb128 (AT_index (a),
10815 "%s: \"%s\"", name, AT_string (a));
10816 else
10817 dw2_asm_output_nstring (AT_string (a), -1, "%s", name);
10818 break;
10819
10820 case dw_val_class_file:
10821 {
10822 int f = maybe_emit_file (a->dw_attr_val.v.val_file);
10823
10824 dw2_asm_output_data (constant_size (f), f, "%s (%s)", name,
10825 a->dw_attr_val.v.val_file->filename);
10826 break;
10827 }
10828
10829 case dw_val_class_file_implicit:
10830 if (flag_debug_asm)
10831 fprintf (asm_out_file, "\t\t\t%s %s (%d, %s)\n",
10832 ASM_COMMENT_START, name,
10833 maybe_emit_file (a->dw_attr_val.v.val_file),
10834 a->dw_attr_val.v.val_file->filename);
10835 break;
10836
10837 case dw_val_class_data8:
10838 {
10839 int i;
10840
10841 for (i = 0; i < 8; i++)
10842 dw2_asm_output_data (1, a->dw_attr_val.v.val_data8[i],
10843 i == 0 ? "%s" : NULL, name);
10844 break;
10845 }
10846
10847 case dw_val_class_high_pc:
10848 dw2_asm_output_delta (DWARF2_ADDR_SIZE, AT_lbl (a),
10849 get_AT_low_pc (die), "DW_AT_high_pc");
10850 break;
10851
10852 case dw_val_class_discr_value:
10853 output_discr_value (&a->dw_attr_val.v.val_discr_value, name);
10854 break;
10855
10856 case dw_val_class_discr_list:
10857 {
10858 dw_discr_list_ref list = AT_discr_list (a);
10859 const int size = size_of_discr_list (list);
10860
10861 /* This is a block, so output its length first. */
10862 dw2_asm_output_data (constant_size (size), size,
10863 "%s: block size", name);
10864
10865 for (; list != NULL; list = list->dw_discr_next)
10866 {
10867 /* One byte for the discriminant value descriptor, and then as
10868 many LEB128 numbers as required. */
10869 if (list->dw_discr_range)
10870 dw2_asm_output_data (1, DW_DSC_range,
10871 "%s: DW_DSC_range", name);
10872 else
10873 dw2_asm_output_data (1, DW_DSC_label,
10874 "%s: DW_DSC_label", name);
10875
10876 output_discr_value (&list->dw_discr_lower_bound, name);
10877 if (list->dw_discr_range)
10878 output_discr_value (&list->dw_discr_upper_bound, name);
10879 }
10880 break;
10881 }
10882
10883 default:
10884 gcc_unreachable ();
10885 }
10886 }
10887
10888 FOR_EACH_CHILD (die, c, output_die (c));
10889
10890 /* Add null byte to terminate sibling list. */
10891 if (die->die_child != NULL)
10892 dw2_asm_output_data (1, 0, "end of children of DIE %#lx",
10893 (unsigned long) die->die_offset);
10894 }
10895
10896 /* Output the dwarf version number. */
10897
10898 static void
10899 output_dwarf_version ()
10900 {
10901 /* ??? For now, if -gdwarf-6 is specified, we output version 5 with
10902 views in loclist. That will change eventually. */
10903 if (dwarf_version == 6)
10904 {
10905 static bool once;
10906 if (!once)
10907 {
10908 warning (0,
10909 "-gdwarf-6 is output as version 5 with incompatibilities");
10910 once = true;
10911 }
10912 dw2_asm_output_data (2, 5, "DWARF version number");
10913 }
10914 else
10915 dw2_asm_output_data (2, dwarf_version, "DWARF version number");
10916 }
10917
10918 /* Output the compilation unit that appears at the beginning of the
10919 .debug_info section, and precedes the DIE descriptions. */
10920
10921 static void
10922 output_compilation_unit_header (enum dwarf_unit_type ut)
10923 {
10924 if (!XCOFF_DEBUGGING_INFO)
10925 {
10926 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
10927 dw2_asm_output_data (4, 0xffffffff,
10928 "Initial length escape value indicating 64-bit DWARF extension");
10929 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10930 next_die_offset - DWARF_INITIAL_LENGTH_SIZE,
10931 "Length of Compilation Unit Info");
10932 }
10933
10934 output_dwarf_version ();
10935 if (dwarf_version >= 5)
10936 {
10937 const char *name;
10938 switch (ut)
10939 {
10940 case DW_UT_compile: name = "DW_UT_compile"; break;
10941 case DW_UT_type: name = "DW_UT_type"; break;
10942 case DW_UT_split_compile: name = "DW_UT_split_compile"; break;
10943 case DW_UT_split_type: name = "DW_UT_split_type"; break;
10944 default: gcc_unreachable ();
10945 }
10946 dw2_asm_output_data (1, ut, "%s", name);
10947 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10948 }
10949 dw2_asm_output_offset (DWARF_OFFSET_SIZE, abbrev_section_label,
10950 debug_abbrev_section,
10951 "Offset Into Abbrev. Section");
10952 if (dwarf_version < 5)
10953 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10954 }
10955
10956 /* Output the compilation unit DIE and its children. */
10957
10958 static void
10959 output_comp_unit (dw_die_ref die, int output_if_empty,
10960 const unsigned char *dwo_id)
10961 {
10962 const char *secname, *oldsym;
10963 char *tmp;
10964
10965 /* Unless we are outputting main CU, we may throw away empty ones. */
10966 if (!output_if_empty && die->die_child == NULL)
10967 return;
10968
10969 /* Even if there are no children of this DIE, we must output the information
10970 about the compilation unit. Otherwise, on an empty translation unit, we
10971 will generate a present, but empty, .debug_info section. IRIX 6.5 `nm'
10972 will then complain when examining the file. First mark all the DIEs in
10973 this CU so we know which get local refs. */
10974 mark_dies (die);
10975
10976 external_ref_hash_type *extern_map = optimize_external_refs (die);
10977
10978 /* For now, optimize only the main CU, in order to optimize the rest
10979 we'd need to see all of them earlier. Leave the rest for post-linking
10980 tools like DWZ. */
10981 if (die == comp_unit_die ())
10982 abbrev_opt_start = vec_safe_length (abbrev_die_table);
10983
10984 build_abbrev_table (die, extern_map);
10985
10986 optimize_abbrev_table ();
10987
10988 delete extern_map;
10989
10990 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
10991 next_die_offset = (dwo_id
10992 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
10993 : DWARF_COMPILE_UNIT_HEADER_SIZE);
10994 calc_die_sizes (die);
10995
10996 oldsym = die->die_id.die_symbol;
10997 if (oldsym && die->comdat_type_p)
10998 {
10999 tmp = XALLOCAVEC (char, strlen (oldsym) + 24);
11000
11001 sprintf (tmp, ".gnu.linkonce.wi.%s", oldsym);
11002 secname = tmp;
11003 die->die_id.die_symbol = NULL;
11004 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11005 }
11006 else
11007 {
11008 switch_to_section (debug_info_section);
11009 ASM_OUTPUT_LABEL (asm_out_file, debug_info_section_label);
11010 info_section_emitted = true;
11011 }
11012
11013 /* For LTO cross unit DIE refs we want a symbol on the start of the
11014 debuginfo section, not on the CU DIE. */
11015 if ((flag_generate_lto || flag_generate_offload) && oldsym)
11016 {
11017 /* ??? No way to get visibility assembled without a decl. */
11018 tree decl = build_decl (UNKNOWN_LOCATION, VAR_DECL,
11019 get_identifier (oldsym), char_type_node);
11020 TREE_PUBLIC (decl) = true;
11021 TREE_STATIC (decl) = true;
11022 DECL_ARTIFICIAL (decl) = true;
11023 DECL_VISIBILITY (decl) = VISIBILITY_HIDDEN;
11024 DECL_VISIBILITY_SPECIFIED (decl) = true;
11025 targetm.asm_out.assemble_visibility (decl, VISIBILITY_HIDDEN);
11026 #ifdef ASM_WEAKEN_LABEL
11027 /* We prefer a .weak because that handles duplicates from duplicate
11028 archive members in a graceful way. */
11029 ASM_WEAKEN_LABEL (asm_out_file, oldsym);
11030 #else
11031 targetm.asm_out.globalize_label (asm_out_file, oldsym);
11032 #endif
11033 ASM_OUTPUT_LABEL (asm_out_file, oldsym);
11034 }
11035
11036 /* Output debugging information. */
11037 output_compilation_unit_header (dwo_id
11038 ? DW_UT_split_compile : DW_UT_compile);
11039 if (dwarf_version >= 5)
11040 {
11041 if (dwo_id != NULL)
11042 for (int i = 0; i < 8; i++)
11043 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11044 }
11045 output_die (die);
11046
11047 /* Leave the marks on the main CU, so we can check them in
11048 output_pubnames. */
11049 if (oldsym)
11050 {
11051 unmark_dies (die);
11052 die->die_id.die_symbol = oldsym;
11053 }
11054 }
11055
11056 /* Whether to generate the DWARF accelerator tables in .debug_pubnames
11057 and .debug_pubtypes. This is configured per-target, but can be
11058 overridden by the -gpubnames or -gno-pubnames options. */
11059
11060 static inline bool
11061 want_pubnames (void)
11062 {
11063 if (debug_info_level <= DINFO_LEVEL_TERSE)
11064 return false;
11065 if (debug_generate_pub_sections != -1)
11066 return debug_generate_pub_sections;
11067 return targetm.want_debug_pub_sections;
11068 }
11069
11070 /* Add the DW_AT_GNU_pubnames and DW_AT_GNU_pubtypes attributes. */
11071
11072 static void
11073 add_AT_pubnames (dw_die_ref die)
11074 {
11075 if (want_pubnames ())
11076 add_AT_flag (die, DW_AT_GNU_pubnames, 1);
11077 }
11078
11079 /* Add a string attribute value to a skeleton DIE. */
11080
11081 static inline void
11082 add_skeleton_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind,
11083 const char *str)
11084 {
11085 dw_attr_node attr;
11086 struct indirect_string_node *node;
11087
11088 if (! skeleton_debug_str_hash)
11089 skeleton_debug_str_hash
11090 = hash_table<indirect_string_hasher>::create_ggc (10);
11091
11092 node = find_AT_string_in_table (str, skeleton_debug_str_hash);
11093 find_string_form (node);
11094 if (node->form == dwarf_FORM (DW_FORM_strx))
11095 node->form = DW_FORM_strp;
11096
11097 attr.dw_attr = attr_kind;
11098 attr.dw_attr_val.val_class = dw_val_class_str;
11099 attr.dw_attr_val.val_entry = NULL;
11100 attr.dw_attr_val.v.val_str = node;
11101 add_dwarf_attr (die, &attr);
11102 }
11103
11104 /* Helper function to generate top-level dies for skeleton debug_info and
11105 debug_types. */
11106
11107 static void
11108 add_top_level_skeleton_die_attrs (dw_die_ref die)
11109 {
11110 const char *dwo_file_name = concat (aux_base_name, ".dwo", NULL);
11111 const char *comp_dir = comp_dir_string ();
11112
11113 add_skeleton_AT_string (die, dwarf_AT (DW_AT_dwo_name), dwo_file_name);
11114 if (comp_dir != NULL)
11115 add_skeleton_AT_string (die, DW_AT_comp_dir, comp_dir);
11116 add_AT_pubnames (die);
11117 add_AT_lineptr (die, dwarf_AT (DW_AT_addr_base), debug_addr_section_label);
11118 }
11119
11120 /* Output skeleton debug sections that point to the dwo file. */
11121
11122 static void
11123 output_skeleton_debug_sections (dw_die_ref comp_unit,
11124 const unsigned char *dwo_id)
11125 {
11126 /* These attributes will be found in the full debug_info section. */
11127 remove_AT (comp_unit, DW_AT_producer);
11128 remove_AT (comp_unit, DW_AT_language);
11129
11130 switch_to_section (debug_skeleton_info_section);
11131 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_info_section_label);
11132
11133 /* Produce the skeleton compilation-unit header. This one differs enough from
11134 a normal CU header that it's better not to call output_compilation_unit
11135 header. */
11136 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11137 dw2_asm_output_data (4, 0xffffffff,
11138 "Initial length escape value indicating 64-bit "
11139 "DWARF extension");
11140
11141 dw2_asm_output_data (DWARF_OFFSET_SIZE,
11142 DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11143 - DWARF_INITIAL_LENGTH_SIZE
11144 + size_of_die (comp_unit),
11145 "Length of Compilation Unit Info");
11146 output_dwarf_version ();
11147 if (dwarf_version >= 5)
11148 {
11149 dw2_asm_output_data (1, DW_UT_skeleton, "DW_UT_skeleton");
11150 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11151 }
11152 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_abbrev_section_label,
11153 debug_skeleton_abbrev_section,
11154 "Offset Into Abbrev. Section");
11155 if (dwarf_version < 5)
11156 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11157 else
11158 for (int i = 0; i < 8; i++)
11159 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11160
11161 comp_unit->die_abbrev = SKELETON_COMP_DIE_ABBREV;
11162 output_die (comp_unit);
11163
11164 /* Build the skeleton debug_abbrev section. */
11165 switch_to_section (debug_skeleton_abbrev_section);
11166 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_abbrev_section_label);
11167
11168 output_die_abbrevs (SKELETON_COMP_DIE_ABBREV, comp_unit);
11169
11170 dw2_asm_output_data (1, 0, "end of skeleton .debug_abbrev");
11171 }
11172
11173 /* Output a comdat type unit DIE and its children. */
11174
11175 static void
11176 output_comdat_type_unit (comdat_type_node *node)
11177 {
11178 const char *secname;
11179 char *tmp;
11180 int i;
11181 #if defined (OBJECT_FORMAT_ELF)
11182 tree comdat_key;
11183 #endif
11184
11185 /* First mark all the DIEs in this CU so we know which get local refs. */
11186 mark_dies (node->root_die);
11187
11188 external_ref_hash_type *extern_map = optimize_external_refs (node->root_die);
11189
11190 build_abbrev_table (node->root_die, extern_map);
11191
11192 delete extern_map;
11193 extern_map = NULL;
11194
11195 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11196 next_die_offset = DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE;
11197 calc_die_sizes (node->root_die);
11198
11199 #if defined (OBJECT_FORMAT_ELF)
11200 if (dwarf_version >= 5)
11201 {
11202 if (!dwarf_split_debug_info)
11203 secname = ".debug_info";
11204 else
11205 secname = ".debug_info.dwo";
11206 }
11207 else if (!dwarf_split_debug_info)
11208 secname = ".debug_types";
11209 else
11210 secname = ".debug_types.dwo";
11211
11212 tmp = XALLOCAVEC (char, 4 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11213 sprintf (tmp, dwarf_version >= 5 ? "wi." : "wt.");
11214 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11215 sprintf (tmp + 3 + i * 2, "%02x", node->signature[i] & 0xff);
11216 comdat_key = get_identifier (tmp);
11217 targetm.asm_out.named_section (secname,
11218 SECTION_DEBUG | SECTION_LINKONCE,
11219 comdat_key);
11220 #else
11221 tmp = XALLOCAVEC (char, 18 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11222 sprintf (tmp, (dwarf_version >= 5
11223 ? ".gnu.linkonce.wi." : ".gnu.linkonce.wt."));
11224 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11225 sprintf (tmp + 17 + i * 2, "%02x", node->signature[i] & 0xff);
11226 secname = tmp;
11227 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11228 #endif
11229
11230 /* Output debugging information. */
11231 output_compilation_unit_header (dwarf_split_debug_info
11232 ? DW_UT_split_type : DW_UT_type);
11233 output_signature (node->signature, "Type Signature");
11234 dw2_asm_output_data (DWARF_OFFSET_SIZE, node->type_die->die_offset,
11235 "Offset to Type DIE");
11236 output_die (node->root_die);
11237
11238 unmark_dies (node->root_die);
11239 }
11240
11241 /* Return the DWARF2/3 pubname associated with a decl. */
11242
11243 static const char *
11244 dwarf2_name (tree decl, int scope)
11245 {
11246 if (DECL_NAMELESS (decl))
11247 return NULL;
11248 return lang_hooks.dwarf_name (decl, scope ? 1 : 0);
11249 }
11250
11251 /* Add a new entry to .debug_pubnames if appropriate. */
11252
11253 static void
11254 add_pubname_string (const char *str, dw_die_ref die)
11255 {
11256 pubname_entry e;
11257
11258 e.die = die;
11259 e.name = xstrdup (str);
11260 vec_safe_push (pubname_table, e);
11261 }
11262
11263 static void
11264 add_pubname (tree decl, dw_die_ref die)
11265 {
11266 if (!want_pubnames ())
11267 return;
11268
11269 /* Don't add items to the table when we expect that the consumer will have
11270 just read the enclosing die. For example, if the consumer is looking at a
11271 class_member, it will either be inside the class already, or will have just
11272 looked up the class to find the member. Either way, searching the class is
11273 faster than searching the index. */
11274 if ((TREE_PUBLIC (decl) && !class_scope_p (die->die_parent))
11275 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11276 {
11277 const char *name = dwarf2_name (decl, 1);
11278
11279 if (name)
11280 add_pubname_string (name, die);
11281 }
11282 }
11283
11284 /* Add an enumerator to the pubnames section. */
11285
11286 static void
11287 add_enumerator_pubname (const char *scope_name, dw_die_ref die)
11288 {
11289 pubname_entry e;
11290
11291 gcc_assert (scope_name);
11292 e.name = concat (scope_name, get_AT_string (die, DW_AT_name), NULL);
11293 e.die = die;
11294 vec_safe_push (pubname_table, e);
11295 }
11296
11297 /* Add a new entry to .debug_pubtypes if appropriate. */
11298
11299 static void
11300 add_pubtype (tree decl, dw_die_ref die)
11301 {
11302 pubname_entry e;
11303
11304 if (!want_pubnames ())
11305 return;
11306
11307 if ((TREE_PUBLIC (decl)
11308 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11309 && (die->die_tag == DW_TAG_typedef || COMPLETE_TYPE_P (decl)))
11310 {
11311 tree scope = NULL;
11312 const char *scope_name = "";
11313 const char *sep = is_cxx () ? "::" : ".";
11314 const char *name;
11315
11316 scope = TYPE_P (decl) ? TYPE_CONTEXT (decl) : NULL;
11317 if (scope && TREE_CODE (scope) == NAMESPACE_DECL)
11318 {
11319 scope_name = lang_hooks.dwarf_name (scope, 1);
11320 if (scope_name != NULL && scope_name[0] != '\0')
11321 scope_name = concat (scope_name, sep, NULL);
11322 else
11323 scope_name = "";
11324 }
11325
11326 if (TYPE_P (decl))
11327 name = type_tag (decl);
11328 else
11329 name = lang_hooks.dwarf_name (decl, 1);
11330
11331 /* If we don't have a name for the type, there's no point in adding
11332 it to the table. */
11333 if (name != NULL && name[0] != '\0')
11334 {
11335 e.die = die;
11336 e.name = concat (scope_name, name, NULL);
11337 vec_safe_push (pubtype_table, e);
11338 }
11339
11340 /* Although it might be more consistent to add the pubinfo for the
11341 enumerators as their dies are created, they should only be added if the
11342 enum type meets the criteria above. So rather than re-check the parent
11343 enum type whenever an enumerator die is created, just output them all
11344 here. This isn't protected by the name conditional because anonymous
11345 enums don't have names. */
11346 if (die->die_tag == DW_TAG_enumeration_type)
11347 {
11348 dw_die_ref c;
11349
11350 FOR_EACH_CHILD (die, c, add_enumerator_pubname (scope_name, c));
11351 }
11352 }
11353 }
11354
11355 /* Output a single entry in the pubnames table. */
11356
11357 static void
11358 output_pubname (dw_offset die_offset, pubname_entry *entry)
11359 {
11360 dw_die_ref die = entry->die;
11361 int is_static = get_AT_flag (die, DW_AT_external) ? 0 : 1;
11362
11363 dw2_asm_output_data (DWARF_OFFSET_SIZE, die_offset, "DIE offset");
11364
11365 if (debug_generate_pub_sections == 2)
11366 {
11367 /* This logic follows gdb's method for determining the value of the flag
11368 byte. */
11369 uint32_t flags = GDB_INDEX_SYMBOL_KIND_NONE;
11370 switch (die->die_tag)
11371 {
11372 case DW_TAG_typedef:
11373 case DW_TAG_base_type:
11374 case DW_TAG_subrange_type:
11375 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11376 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11377 break;
11378 case DW_TAG_enumerator:
11379 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11380 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11381 if (!is_cxx ())
11382 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11383 break;
11384 case DW_TAG_subprogram:
11385 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11386 GDB_INDEX_SYMBOL_KIND_FUNCTION);
11387 if (!is_ada ())
11388 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11389 break;
11390 case DW_TAG_constant:
11391 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11392 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11393 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11394 break;
11395 case DW_TAG_variable:
11396 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11397 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11398 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11399 break;
11400 case DW_TAG_namespace:
11401 case DW_TAG_imported_declaration:
11402 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11403 break;
11404 case DW_TAG_class_type:
11405 case DW_TAG_interface_type:
11406 case DW_TAG_structure_type:
11407 case DW_TAG_union_type:
11408 case DW_TAG_enumeration_type:
11409 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11410 if (!is_cxx ())
11411 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11412 break;
11413 default:
11414 /* An unusual tag. Leave the flag-byte empty. */
11415 break;
11416 }
11417 dw2_asm_output_data (1, flags >> GDB_INDEX_CU_BITSIZE,
11418 "GDB-index flags");
11419 }
11420
11421 dw2_asm_output_nstring (entry->name, -1, "external name");
11422 }
11423
11424
11425 /* Output the public names table used to speed up access to externally
11426 visible names; or the public types table used to find type definitions. */
11427
11428 static void
11429 output_pubnames (vec<pubname_entry, va_gc> *names)
11430 {
11431 unsigned i;
11432 unsigned long pubnames_length = size_of_pubnames (names);
11433 pubname_entry *pub;
11434
11435 if (!XCOFF_DEBUGGING_INFO)
11436 {
11437 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11438 dw2_asm_output_data (4, 0xffffffff,
11439 "Initial length escape value indicating 64-bit DWARF extension");
11440 dw2_asm_output_data (DWARF_OFFSET_SIZE, pubnames_length,
11441 "Pub Info Length");
11442 }
11443
11444 /* Version number for pubnames/pubtypes is independent of dwarf version. */
11445 dw2_asm_output_data (2, 2, "DWARF pubnames/pubtypes version");
11446
11447 if (dwarf_split_debug_info)
11448 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11449 debug_skeleton_info_section,
11450 "Offset of Compilation Unit Info");
11451 else
11452 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11453 debug_info_section,
11454 "Offset of Compilation Unit Info");
11455 dw2_asm_output_data (DWARF_OFFSET_SIZE, next_die_offset,
11456 "Compilation Unit Length");
11457
11458 FOR_EACH_VEC_ELT (*names, i, pub)
11459 {
11460 if (include_pubname_in_output (names, pub))
11461 {
11462 dw_offset die_offset = pub->die->die_offset;
11463
11464 /* We shouldn't see pubnames for DIEs outside of the main CU. */
11465 if (names == pubname_table && pub->die->die_tag != DW_TAG_enumerator)
11466 gcc_assert (pub->die->die_mark);
11467
11468 /* If we're putting types in their own .debug_types sections,
11469 the .debug_pubtypes table will still point to the compile
11470 unit (not the type unit), so we want to use the offset of
11471 the skeleton DIE (if there is one). */
11472 if (pub->die->comdat_type_p && names == pubtype_table)
11473 {
11474 comdat_type_node *type_node = pub->die->die_id.die_type_node;
11475
11476 if (type_node != NULL)
11477 die_offset = (type_node->skeleton_die != NULL
11478 ? type_node->skeleton_die->die_offset
11479 : comp_unit_die ()->die_offset);
11480 }
11481
11482 output_pubname (die_offset, pub);
11483 }
11484 }
11485
11486 dw2_asm_output_data (DWARF_OFFSET_SIZE, 0, NULL);
11487 }
11488
11489 /* Output public names and types tables if necessary. */
11490
11491 static void
11492 output_pubtables (void)
11493 {
11494 if (!want_pubnames () || !info_section_emitted)
11495 return;
11496
11497 switch_to_section (debug_pubnames_section);
11498 output_pubnames (pubname_table);
11499 /* ??? Only defined by DWARF3, but emitted by Darwin for DWARF2.
11500 It shouldn't hurt to emit it always, since pure DWARF2 consumers
11501 simply won't look for the section. */
11502 switch_to_section (debug_pubtypes_section);
11503 output_pubnames (pubtype_table);
11504 }
11505
11506
11507 /* Output the information that goes into the .debug_aranges table.
11508 Namely, define the beginning and ending address range of the
11509 text section generated for this compilation unit. */
11510
11511 static void
11512 output_aranges (void)
11513 {
11514 unsigned i;
11515 unsigned long aranges_length = size_of_aranges ();
11516
11517 if (!XCOFF_DEBUGGING_INFO)
11518 {
11519 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11520 dw2_asm_output_data (4, 0xffffffff,
11521 "Initial length escape value indicating 64-bit DWARF extension");
11522 dw2_asm_output_data (DWARF_OFFSET_SIZE, aranges_length,
11523 "Length of Address Ranges Info");
11524 }
11525
11526 /* Version number for aranges is still 2, even up to DWARF5. */
11527 dw2_asm_output_data (2, 2, "DWARF aranges version");
11528 if (dwarf_split_debug_info)
11529 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11530 debug_skeleton_info_section,
11531 "Offset of Compilation Unit Info");
11532 else
11533 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11534 debug_info_section,
11535 "Offset of Compilation Unit Info");
11536 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
11537 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
11538
11539 /* We need to align to twice the pointer size here. */
11540 if (DWARF_ARANGES_PAD_SIZE)
11541 {
11542 /* Pad using a 2 byte words so that padding is correct for any
11543 pointer size. */
11544 dw2_asm_output_data (2, 0, "Pad to %d byte boundary",
11545 2 * DWARF2_ADDR_SIZE);
11546 for (i = 2; i < (unsigned) DWARF_ARANGES_PAD_SIZE; i += 2)
11547 dw2_asm_output_data (2, 0, NULL);
11548 }
11549
11550 /* It is necessary not to output these entries if the sections were
11551 not used; if the sections were not used, the length will be 0 and
11552 the address may end up as 0 if the section is discarded by ld
11553 --gc-sections, leaving an invalid (0, 0) entry that can be
11554 confused with the terminator. */
11555 if (text_section_used)
11556 {
11557 dw2_asm_output_addr (DWARF2_ADDR_SIZE, text_section_label, "Address");
11558 dw2_asm_output_delta (DWARF2_ADDR_SIZE, text_end_label,
11559 text_section_label, "Length");
11560 }
11561 if (cold_text_section_used)
11562 {
11563 dw2_asm_output_addr (DWARF2_ADDR_SIZE, cold_text_section_label,
11564 "Address");
11565 dw2_asm_output_delta (DWARF2_ADDR_SIZE, cold_end_label,
11566 cold_text_section_label, "Length");
11567 }
11568
11569 if (have_multiple_function_sections)
11570 {
11571 unsigned fde_idx;
11572 dw_fde_ref fde;
11573
11574 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
11575 {
11576 if (DECL_IGNORED_P (fde->decl))
11577 continue;
11578 if (!fde->in_std_section)
11579 {
11580 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_begin,
11581 "Address");
11582 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_end,
11583 fde->dw_fde_begin, "Length");
11584 }
11585 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
11586 {
11587 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_second_begin,
11588 "Address");
11589 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_second_end,
11590 fde->dw_fde_second_begin, "Length");
11591 }
11592 }
11593 }
11594
11595 /* Output the terminator words. */
11596 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11597 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11598 }
11599
11600 /* Add a new entry to .debug_ranges. Return its index into
11601 ranges_table vector. */
11602
11603 static unsigned int
11604 add_ranges_num (int num, bool maybe_new_sec)
11605 {
11606 dw_ranges r = { NULL, num, 0, maybe_new_sec };
11607 vec_safe_push (ranges_table, r);
11608 return vec_safe_length (ranges_table) - 1;
11609 }
11610
11611 /* Add a new entry to .debug_ranges corresponding to a block, or a
11612 range terminator if BLOCK is NULL. MAYBE_NEW_SEC is true if
11613 this entry might be in a different section from previous range. */
11614
11615 static unsigned int
11616 add_ranges (const_tree block, bool maybe_new_sec)
11617 {
11618 return add_ranges_num (block ? BLOCK_NUMBER (block) : 0, maybe_new_sec);
11619 }
11620
11621 /* Note that (*rnglist_table)[offset] is either a head of a rnglist
11622 chain, or middle entry of a chain that will be directly referred to. */
11623
11624 static void
11625 note_rnglist_head (unsigned int offset)
11626 {
11627 if (dwarf_version < 5 || (*ranges_table)[offset].label)
11628 return;
11629 (*ranges_table)[offset].label = gen_internal_sym ("LLRL");
11630 }
11631
11632 /* Add a new entry to .debug_ranges corresponding to a pair of labels.
11633 When using dwarf_split_debug_info, address attributes in dies destined
11634 for the final executable should be direct references--setting the
11635 parameter force_direct ensures this behavior. */
11636
11637 static void
11638 add_ranges_by_labels (dw_die_ref die, const char *begin, const char *end,
11639 bool *added, bool force_direct)
11640 {
11641 unsigned int in_use = vec_safe_length (ranges_by_label);
11642 unsigned int offset;
11643 dw_ranges_by_label rbl = { begin, end };
11644 vec_safe_push (ranges_by_label, rbl);
11645 offset = add_ranges_num (-(int)in_use - 1, true);
11646 if (!*added)
11647 {
11648 add_AT_range_list (die, DW_AT_ranges, offset, force_direct);
11649 *added = true;
11650 note_rnglist_head (offset);
11651 }
11652 }
11653
11654 /* Emit .debug_ranges section. */
11655
11656 static void
11657 output_ranges (void)
11658 {
11659 unsigned i;
11660 static const char *const start_fmt = "Offset %#x";
11661 const char *fmt = start_fmt;
11662 dw_ranges *r;
11663
11664 switch_to_section (debug_ranges_section);
11665 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11666 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11667 {
11668 int block_num = r->num;
11669
11670 if (block_num > 0)
11671 {
11672 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11673 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11674
11675 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11676 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11677
11678 /* If all code is in the text section, then the compilation
11679 unit base address defaults to DW_AT_low_pc, which is the
11680 base of the text section. */
11681 if (!have_multiple_function_sections)
11682 {
11683 dw2_asm_output_delta (DWARF2_ADDR_SIZE, blabel,
11684 text_section_label,
11685 fmt, i * 2 * DWARF2_ADDR_SIZE);
11686 dw2_asm_output_delta (DWARF2_ADDR_SIZE, elabel,
11687 text_section_label, NULL);
11688 }
11689
11690 /* Otherwise, the compilation unit base address is zero,
11691 which allows us to use absolute addresses, and not worry
11692 about whether the target supports cross-section
11693 arithmetic. */
11694 else
11695 {
11696 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11697 fmt, i * 2 * DWARF2_ADDR_SIZE);
11698 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel, NULL);
11699 }
11700
11701 fmt = NULL;
11702 }
11703
11704 /* Negative block_num stands for an index into ranges_by_label. */
11705 else if (block_num < 0)
11706 {
11707 int lab_idx = - block_num - 1;
11708
11709 if (!have_multiple_function_sections)
11710 {
11711 gcc_unreachable ();
11712 #if 0
11713 /* If we ever use add_ranges_by_labels () for a single
11714 function section, all we have to do is to take out
11715 the #if 0 above. */
11716 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11717 (*ranges_by_label)[lab_idx].begin,
11718 text_section_label,
11719 fmt, i * 2 * DWARF2_ADDR_SIZE);
11720 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11721 (*ranges_by_label)[lab_idx].end,
11722 text_section_label, NULL);
11723 #endif
11724 }
11725 else
11726 {
11727 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11728 (*ranges_by_label)[lab_idx].begin,
11729 fmt, i * 2 * DWARF2_ADDR_SIZE);
11730 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11731 (*ranges_by_label)[lab_idx].end,
11732 NULL);
11733 }
11734 }
11735 else
11736 {
11737 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11738 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11739 fmt = start_fmt;
11740 }
11741 }
11742 }
11743
11744 /* Non-zero if .debug_line_str should be used for .debug_line section
11745 strings or strings that are likely shareable with those. */
11746 #define DWARF5_USE_DEBUG_LINE_STR \
11747 (!DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET \
11748 && (DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) != 0 \
11749 /* FIXME: there is no .debug_line_str.dwo section, \
11750 for -gsplit-dwarf we should use DW_FORM_strx instead. */ \
11751 && !dwarf_split_debug_info)
11752
11753 /* Assign .debug_rnglists indexes. */
11754
11755 static void
11756 index_rnglists (void)
11757 {
11758 unsigned i;
11759 dw_ranges *r;
11760
11761 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11762 if (r->label)
11763 r->idx = rnglist_idx++;
11764 }
11765
11766 /* Emit .debug_rnglists section. */
11767
11768 static void
11769 output_rnglists (unsigned generation)
11770 {
11771 unsigned i;
11772 dw_ranges *r;
11773 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
11774 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
11775 char basebuf[MAX_ARTIFICIAL_LABEL_BYTES];
11776
11777 switch_to_section (debug_ranges_section);
11778 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11779 /* There are up to 4 unique ranges labels per generation.
11780 See also init_sections_and_labels. */
11781 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_RANGES_SECTION_LABEL,
11782 2 + generation * 4);
11783 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_RANGES_SECTION_LABEL,
11784 3 + generation * 4);
11785 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11786 dw2_asm_output_data (4, 0xffffffff,
11787 "Initial length escape value indicating "
11788 "64-bit DWARF extension");
11789 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
11790 "Length of Range Lists");
11791 ASM_OUTPUT_LABEL (asm_out_file, l1);
11792 output_dwarf_version ();
11793 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
11794 dw2_asm_output_data (1, 0, "Segment Size");
11795 /* Emit the offset table only for -gsplit-dwarf. If we don't care
11796 about relocation sizes and primarily care about the size of .debug*
11797 sections in linked shared libraries and executables, then
11798 the offset table plus corresponding DW_FORM_rnglistx uleb128 indexes
11799 into it are usually larger than just DW_FORM_sec_offset offsets
11800 into the .debug_rnglists section. */
11801 dw2_asm_output_data (4, dwarf_split_debug_info ? rnglist_idx : 0,
11802 "Offset Entry Count");
11803 if (dwarf_split_debug_info)
11804 {
11805 ASM_OUTPUT_LABEL (asm_out_file, ranges_base_label);
11806 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11807 if (r->label)
11808 dw2_asm_output_delta (DWARF_OFFSET_SIZE, r->label,
11809 ranges_base_label, NULL);
11810 }
11811
11812 const char *lab = "";
11813 unsigned int len = vec_safe_length (ranges_table);
11814 const char *base = NULL;
11815 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11816 {
11817 int block_num = r->num;
11818
11819 if (r->label)
11820 {
11821 ASM_OUTPUT_LABEL (asm_out_file, r->label);
11822 lab = r->label;
11823 }
11824 if (HAVE_AS_LEB128 && (r->label || r->maybe_new_sec))
11825 base = NULL;
11826 if (block_num > 0)
11827 {
11828 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11829 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11830
11831 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11832 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11833
11834 if (HAVE_AS_LEB128)
11835 {
11836 /* If all code is in the text section, then the compilation
11837 unit base address defaults to DW_AT_low_pc, which is the
11838 base of the text section. */
11839 if (!have_multiple_function_sections)
11840 {
11841 dw2_asm_output_data (1, DW_RLE_offset_pair,
11842 "DW_RLE_offset_pair (%s)", lab);
11843 dw2_asm_output_delta_uleb128 (blabel, text_section_label,
11844 "Range begin address (%s)", lab);
11845 dw2_asm_output_delta_uleb128 (elabel, text_section_label,
11846 "Range end address (%s)", lab);
11847 continue;
11848 }
11849 if (base == NULL)
11850 {
11851 dw_ranges *r2 = NULL;
11852 if (i < len - 1)
11853 r2 = &(*ranges_table)[i + 1];
11854 if (r2
11855 && r2->num != 0
11856 && r2->label == NULL
11857 && !r2->maybe_new_sec)
11858 {
11859 dw2_asm_output_data (1, DW_RLE_base_address,
11860 "DW_RLE_base_address (%s)", lab);
11861 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11862 "Base address (%s)", lab);
11863 strcpy (basebuf, blabel);
11864 base = basebuf;
11865 }
11866 }
11867 if (base)
11868 {
11869 dw2_asm_output_data (1, DW_RLE_offset_pair,
11870 "DW_RLE_offset_pair (%s)", lab);
11871 dw2_asm_output_delta_uleb128 (blabel, base,
11872 "Range begin address (%s)", lab);
11873 dw2_asm_output_delta_uleb128 (elabel, base,
11874 "Range end address (%s)", lab);
11875 continue;
11876 }
11877 dw2_asm_output_data (1, DW_RLE_start_length,
11878 "DW_RLE_start_length (%s)", lab);
11879 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11880 "Range begin address (%s)", lab);
11881 dw2_asm_output_delta_uleb128 (elabel, blabel,
11882 "Range length (%s)", lab);
11883 }
11884 else
11885 {
11886 dw2_asm_output_data (1, DW_RLE_start_end,
11887 "DW_RLE_start_end (%s)", lab);
11888 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11889 "Range begin address (%s)", lab);
11890 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11891 "Range end address (%s)", lab);
11892 }
11893 }
11894
11895 /* Negative block_num stands for an index into ranges_by_label. */
11896 else if (block_num < 0)
11897 {
11898 int lab_idx = - block_num - 1;
11899 const char *blabel = (*ranges_by_label)[lab_idx].begin;
11900 const char *elabel = (*ranges_by_label)[lab_idx].end;
11901
11902 if (!have_multiple_function_sections)
11903 gcc_unreachable ();
11904 if (HAVE_AS_LEB128)
11905 {
11906 dw2_asm_output_data (1, DW_RLE_start_length,
11907 "DW_RLE_start_length (%s)", lab);
11908 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11909 "Range begin address (%s)", lab);
11910 dw2_asm_output_delta_uleb128 (elabel, blabel,
11911 "Range length (%s)", lab);
11912 }
11913 else
11914 {
11915 dw2_asm_output_data (1, DW_RLE_start_end,
11916 "DW_RLE_start_end (%s)", lab);
11917 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11918 "Range begin address (%s)", lab);
11919 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11920 "Range end address (%s)", lab);
11921 }
11922 }
11923 else
11924 dw2_asm_output_data (1, DW_RLE_end_of_list,
11925 "DW_RLE_end_of_list (%s)", lab);
11926 }
11927 ASM_OUTPUT_LABEL (asm_out_file, l2);
11928 }
11929
11930 /* Data structure containing information about input files. */
11931 struct file_info
11932 {
11933 const char *path; /* Complete file name. */
11934 const char *fname; /* File name part. */
11935 int length; /* Length of entire string. */
11936 struct dwarf_file_data * file_idx; /* Index in input file table. */
11937 int dir_idx; /* Index in directory table. */
11938 };
11939
11940 /* Data structure containing information about directories with source
11941 files. */
11942 struct dir_info
11943 {
11944 const char *path; /* Path including directory name. */
11945 int length; /* Path length. */
11946 int prefix; /* Index of directory entry which is a prefix. */
11947 int count; /* Number of files in this directory. */
11948 int dir_idx; /* Index of directory used as base. */
11949 };
11950
11951 /* Callback function for file_info comparison. We sort by looking at
11952 the directories in the path. */
11953
11954 static int
11955 file_info_cmp (const void *p1, const void *p2)
11956 {
11957 const struct file_info *const s1 = (const struct file_info *) p1;
11958 const struct file_info *const s2 = (const struct file_info *) p2;
11959 const unsigned char *cp1;
11960 const unsigned char *cp2;
11961
11962 /* Take care of file names without directories. We need to make sure that
11963 we return consistent values to qsort since some will get confused if
11964 we return the same value when identical operands are passed in opposite
11965 orders. So if neither has a directory, return 0 and otherwise return
11966 1 or -1 depending on which one has the directory. We want the one with
11967 the directory to sort after the one without, so all no directory files
11968 are at the start (normally only the compilation unit file). */
11969 if ((s1->path == s1->fname || s2->path == s2->fname))
11970 return (s2->path == s2->fname) - (s1->path == s1->fname);
11971
11972 cp1 = (const unsigned char *) s1->path;
11973 cp2 = (const unsigned char *) s2->path;
11974
11975 while (1)
11976 {
11977 ++cp1;
11978 ++cp2;
11979 /* Reached the end of the first path? If so, handle like above,
11980 but now we want longer directory prefixes before shorter ones. */
11981 if ((cp1 == (const unsigned char *) s1->fname)
11982 || (cp2 == (const unsigned char *) s2->fname))
11983 return ((cp1 == (const unsigned char *) s1->fname)
11984 - (cp2 == (const unsigned char *) s2->fname));
11985
11986 /* Character of current path component the same? */
11987 else if (*cp1 != *cp2)
11988 return *cp1 - *cp2;
11989 }
11990 }
11991
11992 struct file_name_acquire_data
11993 {
11994 struct file_info *files;
11995 int used_files;
11996 int max_files;
11997 };
11998
11999 /* Traversal function for the hash table. */
12000
12001 int
12002 file_name_acquire (dwarf_file_data **slot, file_name_acquire_data *fnad)
12003 {
12004 struct dwarf_file_data *d = *slot;
12005 struct file_info *fi;
12006 const char *f;
12007
12008 gcc_assert (fnad->max_files >= d->emitted_number);
12009
12010 if (! d->emitted_number)
12011 return 1;
12012
12013 gcc_assert (fnad->max_files != fnad->used_files);
12014
12015 fi = fnad->files + fnad->used_files++;
12016
12017 /* Skip all leading "./". */
12018 f = d->filename;
12019 while (f[0] == '.' && IS_DIR_SEPARATOR (f[1]))
12020 f += 2;
12021
12022 /* Create a new array entry. */
12023 fi->path = f;
12024 fi->length = strlen (f);
12025 fi->file_idx = d;
12026
12027 /* Search for the file name part. */
12028 f = strrchr (f, DIR_SEPARATOR);
12029 #if defined (DIR_SEPARATOR_2)
12030 {
12031 char *g = strrchr (fi->path, DIR_SEPARATOR_2);
12032
12033 if (g != NULL)
12034 {
12035 if (f == NULL || f < g)
12036 f = g;
12037 }
12038 }
12039 #endif
12040
12041 fi->fname = f == NULL ? fi->path : f + 1;
12042 return 1;
12043 }
12044
12045 /* Helper function for output_file_names. Emit a FORM encoded
12046 string STR, with assembly comment start ENTRY_KIND and
12047 index IDX */
12048
12049 static void
12050 output_line_string (enum dwarf_form form, const char *str,
12051 const char *entry_kind, unsigned int idx)
12052 {
12053 switch (form)
12054 {
12055 case DW_FORM_string:
12056 dw2_asm_output_nstring (str, -1, "%s: %#x", entry_kind, idx);
12057 break;
12058 case DW_FORM_line_strp:
12059 if (!debug_line_str_hash)
12060 debug_line_str_hash
12061 = hash_table<indirect_string_hasher>::create_ggc (10);
12062
12063 struct indirect_string_node *node;
12064 node = find_AT_string_in_table (str, debug_line_str_hash);
12065 set_indirect_string (node);
12066 node->form = form;
12067 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
12068 debug_line_str_section, "%s: %#x: \"%s\"",
12069 entry_kind, 0, node->str);
12070 break;
12071 default:
12072 gcc_unreachable ();
12073 }
12074 }
12075
12076 /* Output the directory table and the file name table. We try to minimize
12077 the total amount of memory needed. A heuristic is used to avoid large
12078 slowdowns with many input files. */
12079
12080 static void
12081 output_file_names (void)
12082 {
12083 struct file_name_acquire_data fnad;
12084 int numfiles;
12085 struct file_info *files;
12086 struct dir_info *dirs;
12087 int *saved;
12088 int *savehere;
12089 int *backmap;
12090 int ndirs;
12091 int idx_offset;
12092 int i;
12093
12094 if (!last_emitted_file)
12095 {
12096 if (dwarf_version >= 5)
12097 {
12098 dw2_asm_output_data (1, 0, "Directory entry format count");
12099 dw2_asm_output_data_uleb128 (0, "Directories count");
12100 dw2_asm_output_data (1, 0, "File name entry format count");
12101 dw2_asm_output_data_uleb128 (0, "File names count");
12102 }
12103 else
12104 {
12105 dw2_asm_output_data (1, 0, "End directory table");
12106 dw2_asm_output_data (1, 0, "End file name table");
12107 }
12108 return;
12109 }
12110
12111 numfiles = last_emitted_file->emitted_number;
12112
12113 /* Allocate the various arrays we need. */
12114 files = XALLOCAVEC (struct file_info, numfiles);
12115 dirs = XALLOCAVEC (struct dir_info, numfiles);
12116
12117 fnad.files = files;
12118 fnad.used_files = 0;
12119 fnad.max_files = numfiles;
12120 file_table->traverse<file_name_acquire_data *, file_name_acquire> (&fnad);
12121 gcc_assert (fnad.used_files == fnad.max_files);
12122
12123 qsort (files, numfiles, sizeof (files[0]), file_info_cmp);
12124
12125 /* Find all the different directories used. */
12126 dirs[0].path = files[0].path;
12127 dirs[0].length = files[0].fname - files[0].path;
12128 dirs[0].prefix = -1;
12129 dirs[0].count = 1;
12130 dirs[0].dir_idx = 0;
12131 files[0].dir_idx = 0;
12132 ndirs = 1;
12133
12134 for (i = 1; i < numfiles; i++)
12135 if (files[i].fname - files[i].path == dirs[ndirs - 1].length
12136 && memcmp (dirs[ndirs - 1].path, files[i].path,
12137 dirs[ndirs - 1].length) == 0)
12138 {
12139 /* Same directory as last entry. */
12140 files[i].dir_idx = ndirs - 1;
12141 ++dirs[ndirs - 1].count;
12142 }
12143 else
12144 {
12145 int j;
12146
12147 /* This is a new directory. */
12148 dirs[ndirs].path = files[i].path;
12149 dirs[ndirs].length = files[i].fname - files[i].path;
12150 dirs[ndirs].count = 1;
12151 dirs[ndirs].dir_idx = ndirs;
12152 files[i].dir_idx = ndirs;
12153
12154 /* Search for a prefix. */
12155 dirs[ndirs].prefix = -1;
12156 for (j = 0; j < ndirs; j++)
12157 if (dirs[j].length < dirs[ndirs].length
12158 && dirs[j].length > 1
12159 && (dirs[ndirs].prefix == -1
12160 || dirs[j].length > dirs[dirs[ndirs].prefix].length)
12161 && memcmp (dirs[j].path, dirs[ndirs].path, dirs[j].length) == 0)
12162 dirs[ndirs].prefix = j;
12163
12164 ++ndirs;
12165 }
12166
12167 /* Now to the actual work. We have to find a subset of the directories which
12168 allow expressing the file name using references to the directory table
12169 with the least amount of characters. We do not do an exhaustive search
12170 where we would have to check out every combination of every single
12171 possible prefix. Instead we use a heuristic which provides nearly optimal
12172 results in most cases and never is much off. */
12173 saved = XALLOCAVEC (int, ndirs);
12174 savehere = XALLOCAVEC (int, ndirs);
12175
12176 memset (saved, '\0', ndirs * sizeof (saved[0]));
12177 for (i = 0; i < ndirs; i++)
12178 {
12179 int j;
12180 int total;
12181
12182 /* We can always save some space for the current directory. But this
12183 does not mean it will be enough to justify adding the directory. */
12184 savehere[i] = dirs[i].length;
12185 total = (savehere[i] - saved[i]) * dirs[i].count;
12186
12187 for (j = i + 1; j < ndirs; j++)
12188 {
12189 savehere[j] = 0;
12190 if (saved[j] < dirs[i].length)
12191 {
12192 /* Determine whether the dirs[i] path is a prefix of the
12193 dirs[j] path. */
12194 int k;
12195
12196 k = dirs[j].prefix;
12197 while (k != -1 && k != (int) i)
12198 k = dirs[k].prefix;
12199
12200 if (k == (int) i)
12201 {
12202 /* Yes it is. We can possibly save some memory by
12203 writing the filenames in dirs[j] relative to
12204 dirs[i]. */
12205 savehere[j] = dirs[i].length;
12206 total += (savehere[j] - saved[j]) * dirs[j].count;
12207 }
12208 }
12209 }
12210
12211 /* Check whether we can save enough to justify adding the dirs[i]
12212 directory. */
12213 if (total > dirs[i].length + 1)
12214 {
12215 /* It's worthwhile adding. */
12216 for (j = i; j < ndirs; j++)
12217 if (savehere[j] > 0)
12218 {
12219 /* Remember how much we saved for this directory so far. */
12220 saved[j] = savehere[j];
12221
12222 /* Remember the prefix directory. */
12223 dirs[j].dir_idx = i;
12224 }
12225 }
12226 }
12227
12228 /* Emit the directory name table. */
12229 idx_offset = dirs[0].length > 0 ? 1 : 0;
12230 enum dwarf_form str_form = DW_FORM_string;
12231 enum dwarf_form idx_form = DW_FORM_udata;
12232 if (dwarf_version >= 5)
12233 {
12234 const char *comp_dir = comp_dir_string ();
12235 if (comp_dir == NULL)
12236 comp_dir = "";
12237 dw2_asm_output_data (1, 1, "Directory entry format count");
12238 if (DWARF5_USE_DEBUG_LINE_STR)
12239 str_form = DW_FORM_line_strp;
12240 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12241 dw2_asm_output_data_uleb128 (str_form, "%s",
12242 get_DW_FORM_name (str_form));
12243 dw2_asm_output_data_uleb128 (ndirs + idx_offset, "Directories count");
12244 if (str_form == DW_FORM_string)
12245 {
12246 dw2_asm_output_nstring (comp_dir, -1, "Directory Entry: %#x", 0);
12247 for (i = 1 - idx_offset; i < ndirs; i++)
12248 dw2_asm_output_nstring (dirs[i].path,
12249 dirs[i].length
12250 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12251 "Directory Entry: %#x", i + idx_offset);
12252 }
12253 else
12254 {
12255 output_line_string (str_form, comp_dir, "Directory Entry", 0);
12256 for (i = 1 - idx_offset; i < ndirs; i++)
12257 {
12258 const char *str
12259 = ggc_alloc_string (dirs[i].path,
12260 dirs[i].length
12261 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR);
12262 output_line_string (str_form, str, "Directory Entry",
12263 (unsigned) i + idx_offset);
12264 }
12265 }
12266 }
12267 else
12268 {
12269 for (i = 1 - idx_offset; i < ndirs; i++)
12270 dw2_asm_output_nstring (dirs[i].path,
12271 dirs[i].length
12272 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12273 "Directory Entry: %#x", i + idx_offset);
12274
12275 dw2_asm_output_data (1, 0, "End directory table");
12276 }
12277
12278 /* We have to emit them in the order of emitted_number since that's
12279 used in the debug info generation. To do this efficiently we
12280 generate a back-mapping of the indices first. */
12281 backmap = XALLOCAVEC (int, numfiles);
12282 for (i = 0; i < numfiles; i++)
12283 backmap[files[i].file_idx->emitted_number - 1] = i;
12284
12285 if (dwarf_version >= 5)
12286 {
12287 const char *filename0 = get_AT_string (comp_unit_die (), DW_AT_name);
12288 if (filename0 == NULL)
12289 filename0 = "";
12290 /* DW_LNCT_directory_index can use DW_FORM_udata, DW_FORM_data1 and
12291 DW_FORM_data2. Choose one based on the number of directories
12292 and how much space would they occupy in each encoding.
12293 If we have at most 256 directories, all indexes fit into
12294 a single byte, so DW_FORM_data1 is most compact (if there
12295 are at most 128 directories, DW_FORM_udata would be as
12296 compact as that, but not shorter and slower to decode). */
12297 if (ndirs + idx_offset <= 256)
12298 idx_form = DW_FORM_data1;
12299 /* If there are more than 65536 directories, we have to use
12300 DW_FORM_udata, DW_FORM_data2 can't refer to them.
12301 Otherwise, compute what space would occupy if all the indexes
12302 used DW_FORM_udata - sum - and compare that to how large would
12303 be DW_FORM_data2 encoding, and pick the more efficient one. */
12304 else if (ndirs + idx_offset <= 65536)
12305 {
12306 unsigned HOST_WIDE_INT sum = 1;
12307 for (i = 0; i < numfiles; i++)
12308 {
12309 int file_idx = backmap[i];
12310 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12311 sum += size_of_uleb128 (dir_idx);
12312 }
12313 if (sum >= HOST_WIDE_INT_UC (2) * (numfiles + 1))
12314 idx_form = DW_FORM_data2;
12315 }
12316 #ifdef VMS_DEBUGGING_INFO
12317 dw2_asm_output_data (1, 4, "File name entry format count");
12318 #else
12319 dw2_asm_output_data (1, 2, "File name entry format count");
12320 #endif
12321 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12322 dw2_asm_output_data_uleb128 (str_form, "%s",
12323 get_DW_FORM_name (str_form));
12324 dw2_asm_output_data_uleb128 (DW_LNCT_directory_index,
12325 "DW_LNCT_directory_index");
12326 dw2_asm_output_data_uleb128 (idx_form, "%s",
12327 get_DW_FORM_name (idx_form));
12328 #ifdef VMS_DEBUGGING_INFO
12329 dw2_asm_output_data_uleb128 (DW_LNCT_timestamp, "DW_LNCT_timestamp");
12330 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12331 dw2_asm_output_data_uleb128 (DW_LNCT_size, "DW_LNCT_size");
12332 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12333 #endif
12334 dw2_asm_output_data_uleb128 (numfiles + 1, "File names count");
12335
12336 output_line_string (str_form, filename0, "File Entry", 0);
12337
12338 /* Include directory index. */
12339 if (idx_form != DW_FORM_udata)
12340 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12341 0, NULL);
12342 else
12343 dw2_asm_output_data_uleb128 (0, NULL);
12344
12345 #ifdef VMS_DEBUGGING_INFO
12346 dw2_asm_output_data_uleb128 (0, NULL);
12347 dw2_asm_output_data_uleb128 (0, NULL);
12348 #endif
12349 }
12350
12351 /* Now write all the file names. */
12352 for (i = 0; i < numfiles; i++)
12353 {
12354 int file_idx = backmap[i];
12355 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12356
12357 #ifdef VMS_DEBUGGING_INFO
12358 #define MAX_VMS_VERSION_LEN 6 /* ";32768" */
12359
12360 /* Setting these fields can lead to debugger miscomparisons,
12361 but VMS Debug requires them to be set correctly. */
12362
12363 int ver;
12364 long long cdt;
12365 long siz;
12366 int maxfilelen = (strlen (files[file_idx].path)
12367 + dirs[dir_idx].length
12368 + MAX_VMS_VERSION_LEN + 1);
12369 char *filebuf = XALLOCAVEC (char, maxfilelen);
12370
12371 vms_file_stats_name (files[file_idx].path, 0, 0, 0, &ver);
12372 snprintf (filebuf, maxfilelen, "%s;%d",
12373 files[file_idx].path + dirs[dir_idx].length, ver);
12374
12375 output_line_string (str_form, filebuf, "File Entry", (unsigned) i + 1);
12376
12377 /* Include directory index. */
12378 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12379 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12380 dir_idx + idx_offset, NULL);
12381 else
12382 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12383
12384 /* Modification time. */
12385 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12386 &cdt, 0, 0, 0) == 0)
12387 ? cdt : 0, NULL);
12388
12389 /* File length in bytes. */
12390 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12391 0, &siz, 0, 0) == 0)
12392 ? siz : 0, NULL);
12393 #else
12394 output_line_string (str_form,
12395 files[file_idx].path + dirs[dir_idx].length,
12396 "File Entry", (unsigned) i + 1);
12397
12398 /* Include directory index. */
12399 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12400 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12401 dir_idx + idx_offset, NULL);
12402 else
12403 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12404
12405 if (dwarf_version >= 5)
12406 continue;
12407
12408 /* Modification time. */
12409 dw2_asm_output_data_uleb128 (0, NULL);
12410
12411 /* File length in bytes. */
12412 dw2_asm_output_data_uleb128 (0, NULL);
12413 #endif /* VMS_DEBUGGING_INFO */
12414 }
12415
12416 if (dwarf_version < 5)
12417 dw2_asm_output_data (1, 0, "End file name table");
12418 }
12419
12420
12421 /* Output one line number table into the .debug_line section. */
12422
12423 static void
12424 output_one_line_info_table (dw_line_info_table *table)
12425 {
12426 char line_label[MAX_ARTIFICIAL_LABEL_BYTES];
12427 unsigned int current_line = 1;
12428 bool current_is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
12429 dw_line_info_entry *ent, *prev_addr;
12430 size_t i;
12431 unsigned int view;
12432
12433 view = 0;
12434
12435 FOR_EACH_VEC_SAFE_ELT (table->entries, i, ent)
12436 {
12437 switch (ent->opcode)
12438 {
12439 case LI_set_address:
12440 /* ??? Unfortunately, we have little choice here currently, and
12441 must always use the most general form. GCC does not know the
12442 address delta itself, so we can't use DW_LNS_advance_pc. Many
12443 ports do have length attributes which will give an upper bound
12444 on the address range. We could perhaps use length attributes
12445 to determine when it is safe to use DW_LNS_fixed_advance_pc. */
12446 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12447
12448 view = 0;
12449
12450 /* This can handle any delta. This takes
12451 4+DWARF2_ADDR_SIZE bytes. */
12452 dw2_asm_output_data (1, 0, "set address %s%s", line_label,
12453 debug_variable_location_views
12454 ? ", reset view to 0" : "");
12455 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12456 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12457 dw2_asm_output_addr (DWARF2_ADDR_SIZE, line_label, NULL);
12458
12459 prev_addr = ent;
12460 break;
12461
12462 case LI_adv_address:
12463 {
12464 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12465 char prev_label[MAX_ARTIFICIAL_LABEL_BYTES];
12466 ASM_GENERATE_INTERNAL_LABEL (prev_label, LINE_CODE_LABEL, prev_addr->val);
12467
12468 view++;
12469
12470 dw2_asm_output_data (1, DW_LNS_fixed_advance_pc, "fixed advance PC, increment view to %i", view);
12471 dw2_asm_output_delta (2, line_label, prev_label,
12472 "from %s to %s", prev_label, line_label);
12473
12474 prev_addr = ent;
12475 break;
12476 }
12477
12478 case LI_set_line:
12479 if (ent->val == current_line)
12480 {
12481 /* We still need to start a new row, so output a copy insn. */
12482 dw2_asm_output_data (1, DW_LNS_copy,
12483 "copy line %u", current_line);
12484 }
12485 else
12486 {
12487 int line_offset = ent->val - current_line;
12488 int line_delta = line_offset - DWARF_LINE_BASE;
12489
12490 current_line = ent->val;
12491 if (line_delta >= 0 && line_delta < (DWARF_LINE_RANGE - 1))
12492 {
12493 /* This can handle deltas from -10 to 234, using the current
12494 definitions of DWARF_LINE_BASE and DWARF_LINE_RANGE.
12495 This takes 1 byte. */
12496 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE + line_delta,
12497 "line %u", current_line);
12498 }
12499 else
12500 {
12501 /* This can handle any delta. This takes at least 4 bytes,
12502 depending on the value being encoded. */
12503 dw2_asm_output_data (1, DW_LNS_advance_line,
12504 "advance to line %u", current_line);
12505 dw2_asm_output_data_sleb128 (line_offset, NULL);
12506 dw2_asm_output_data (1, DW_LNS_copy, NULL);
12507 }
12508 }
12509 break;
12510
12511 case LI_set_file:
12512 dw2_asm_output_data (1, DW_LNS_set_file, "set file %u", ent->val);
12513 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12514 break;
12515
12516 case LI_set_column:
12517 dw2_asm_output_data (1, DW_LNS_set_column, "column %u", ent->val);
12518 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12519 break;
12520
12521 case LI_negate_stmt:
12522 current_is_stmt = !current_is_stmt;
12523 dw2_asm_output_data (1, DW_LNS_negate_stmt,
12524 "is_stmt %d", current_is_stmt);
12525 break;
12526
12527 case LI_set_prologue_end:
12528 dw2_asm_output_data (1, DW_LNS_set_prologue_end,
12529 "set prologue end");
12530 break;
12531
12532 case LI_set_epilogue_begin:
12533 dw2_asm_output_data (1, DW_LNS_set_epilogue_begin,
12534 "set epilogue begin");
12535 break;
12536
12537 case LI_set_discriminator:
12538 dw2_asm_output_data (1, 0, "discriminator %u", ent->val);
12539 dw2_asm_output_data_uleb128 (1 + size_of_uleb128 (ent->val), NULL);
12540 dw2_asm_output_data (1, DW_LNE_set_discriminator, NULL);
12541 dw2_asm_output_data_uleb128 (ent->val, NULL);
12542 break;
12543 }
12544 }
12545
12546 /* Emit debug info for the address of the end of the table. */
12547 dw2_asm_output_data (1, 0, "set address %s", table->end_label);
12548 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12549 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12550 dw2_asm_output_addr (DWARF2_ADDR_SIZE, table->end_label, NULL);
12551
12552 dw2_asm_output_data (1, 0, "end sequence");
12553 dw2_asm_output_data_uleb128 (1, NULL);
12554 dw2_asm_output_data (1, DW_LNE_end_sequence, NULL);
12555 }
12556
12557 /* Output the source line number correspondence information. This
12558 information goes into the .debug_line section. */
12559
12560 static void
12561 output_line_info (bool prologue_only)
12562 {
12563 static unsigned int generation;
12564 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
12565 char p1[MAX_ARTIFICIAL_LABEL_BYTES], p2[MAX_ARTIFICIAL_LABEL_BYTES];
12566 bool saw_one = false;
12567 int opc;
12568
12569 ASM_GENERATE_INTERNAL_LABEL (l1, LINE_NUMBER_BEGIN_LABEL, generation);
12570 ASM_GENERATE_INTERNAL_LABEL (l2, LINE_NUMBER_END_LABEL, generation);
12571 ASM_GENERATE_INTERNAL_LABEL (p1, LN_PROLOG_AS_LABEL, generation);
12572 ASM_GENERATE_INTERNAL_LABEL (p2, LN_PROLOG_END_LABEL, generation++);
12573
12574 if (!XCOFF_DEBUGGING_INFO)
12575 {
12576 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
12577 dw2_asm_output_data (4, 0xffffffff,
12578 "Initial length escape value indicating 64-bit DWARF extension");
12579 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
12580 "Length of Source Line Info");
12581 }
12582
12583 ASM_OUTPUT_LABEL (asm_out_file, l1);
12584
12585 output_dwarf_version ();
12586 if (dwarf_version >= 5)
12587 {
12588 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
12589 dw2_asm_output_data (1, 0, "Segment Size");
12590 }
12591 dw2_asm_output_delta (DWARF_OFFSET_SIZE, p2, p1, "Prolog Length");
12592 ASM_OUTPUT_LABEL (asm_out_file, p1);
12593
12594 /* Define the architecture-dependent minimum instruction length (in bytes).
12595 In this implementation of DWARF, this field is used for information
12596 purposes only. Since GCC generates assembly language, we have no
12597 a priori knowledge of how many instruction bytes are generated for each
12598 source line, and therefore can use only the DW_LNE_set_address and
12599 DW_LNS_fixed_advance_pc line information commands. Accordingly, we fix
12600 this as '1', which is "correct enough" for all architectures,
12601 and don't let the target override. */
12602 dw2_asm_output_data (1, 1, "Minimum Instruction Length");
12603
12604 if (dwarf_version >= 4)
12605 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN,
12606 "Maximum Operations Per Instruction");
12607 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_IS_STMT_START,
12608 "Default is_stmt_start flag");
12609 dw2_asm_output_data (1, DWARF_LINE_BASE,
12610 "Line Base Value (Special Opcodes)");
12611 dw2_asm_output_data (1, DWARF_LINE_RANGE,
12612 "Line Range Value (Special Opcodes)");
12613 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE,
12614 "Special Opcode Base");
12615
12616 for (opc = 1; opc < DWARF_LINE_OPCODE_BASE; opc++)
12617 {
12618 int n_op_args;
12619 switch (opc)
12620 {
12621 case DW_LNS_advance_pc:
12622 case DW_LNS_advance_line:
12623 case DW_LNS_set_file:
12624 case DW_LNS_set_column:
12625 case DW_LNS_fixed_advance_pc:
12626 case DW_LNS_set_isa:
12627 n_op_args = 1;
12628 break;
12629 default:
12630 n_op_args = 0;
12631 break;
12632 }
12633
12634 dw2_asm_output_data (1, n_op_args, "opcode: %#x has %d args",
12635 opc, n_op_args);
12636 }
12637
12638 /* Write out the information about the files we use. */
12639 output_file_names ();
12640 ASM_OUTPUT_LABEL (asm_out_file, p2);
12641 if (prologue_only)
12642 {
12643 /* Output the marker for the end of the line number info. */
12644 ASM_OUTPUT_LABEL (asm_out_file, l2);
12645 return;
12646 }
12647
12648 if (separate_line_info)
12649 {
12650 dw_line_info_table *table;
12651 size_t i;
12652
12653 FOR_EACH_VEC_ELT (*separate_line_info, i, table)
12654 if (table->in_use)
12655 {
12656 output_one_line_info_table (table);
12657 saw_one = true;
12658 }
12659 }
12660 if (cold_text_section_line_info && cold_text_section_line_info->in_use)
12661 {
12662 output_one_line_info_table (cold_text_section_line_info);
12663 saw_one = true;
12664 }
12665
12666 /* ??? Some Darwin linkers crash on a .debug_line section with no
12667 sequences. Further, merely a DW_LNE_end_sequence entry is not
12668 sufficient -- the address column must also be initialized.
12669 Make sure to output at least one set_address/end_sequence pair,
12670 choosing .text since that section is always present. */
12671 if (text_section_line_info->in_use || !saw_one)
12672 output_one_line_info_table (text_section_line_info);
12673
12674 /* Output the marker for the end of the line number info. */
12675 ASM_OUTPUT_LABEL (asm_out_file, l2);
12676 }
12677 \f
12678 /* Return true if DW_AT_endianity should be emitted according to REVERSE. */
12679
12680 static inline bool
12681 need_endianity_attribute_p (bool reverse)
12682 {
12683 return reverse && (dwarf_version >= 3 || !dwarf_strict);
12684 }
12685
12686 /* Given a pointer to a tree node for some base type, return a pointer to
12687 a DIE that describes the given type. REVERSE is true if the type is
12688 to be interpreted in the reverse storage order wrt the target order.
12689
12690 This routine must only be called for GCC type nodes that correspond to
12691 Dwarf base (fundamental) types. */
12692
12693 static dw_die_ref
12694 base_type_die (tree type, bool reverse)
12695 {
12696 dw_die_ref base_type_result;
12697 enum dwarf_type encoding;
12698 bool fpt_used = false;
12699 struct fixed_point_type_info fpt_info;
12700 tree type_bias = NULL_TREE;
12701
12702 /* If this is a subtype that should not be emitted as a subrange type,
12703 use the base type. See subrange_type_for_debug_p. */
12704 if (TREE_CODE (type) == INTEGER_TYPE && TREE_TYPE (type) != NULL_TREE)
12705 type = TREE_TYPE (type);
12706
12707 switch (TREE_CODE (type))
12708 {
12709 case INTEGER_TYPE:
12710 if ((dwarf_version >= 4 || !dwarf_strict)
12711 && TYPE_NAME (type)
12712 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
12713 && DECL_IS_BUILTIN (TYPE_NAME (type))
12714 && DECL_NAME (TYPE_NAME (type)))
12715 {
12716 const char *name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
12717 if (strcmp (name, "char16_t") == 0
12718 || strcmp (name, "char32_t") == 0)
12719 {
12720 encoding = DW_ATE_UTF;
12721 break;
12722 }
12723 }
12724 if ((dwarf_version >= 3 || !dwarf_strict)
12725 && lang_hooks.types.get_fixed_point_type_info)
12726 {
12727 memset (&fpt_info, 0, sizeof (fpt_info));
12728 if (lang_hooks.types.get_fixed_point_type_info (type, &fpt_info))
12729 {
12730 fpt_used = true;
12731 encoding = ((TYPE_UNSIGNED (type))
12732 ? DW_ATE_unsigned_fixed
12733 : DW_ATE_signed_fixed);
12734 break;
12735 }
12736 }
12737 if (TYPE_STRING_FLAG (type))
12738 {
12739 if (TYPE_UNSIGNED (type))
12740 encoding = DW_ATE_unsigned_char;
12741 else
12742 encoding = DW_ATE_signed_char;
12743 }
12744 else if (TYPE_UNSIGNED (type))
12745 encoding = DW_ATE_unsigned;
12746 else
12747 encoding = DW_ATE_signed;
12748
12749 if (!dwarf_strict
12750 && lang_hooks.types.get_type_bias)
12751 type_bias = lang_hooks.types.get_type_bias (type);
12752 break;
12753
12754 case REAL_TYPE:
12755 if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type)))
12756 {
12757 if (dwarf_version >= 3 || !dwarf_strict)
12758 encoding = DW_ATE_decimal_float;
12759 else
12760 encoding = DW_ATE_lo_user;
12761 }
12762 else
12763 encoding = DW_ATE_float;
12764 break;
12765
12766 case FIXED_POINT_TYPE:
12767 if (!(dwarf_version >= 3 || !dwarf_strict))
12768 encoding = DW_ATE_lo_user;
12769 else if (TYPE_UNSIGNED (type))
12770 encoding = DW_ATE_unsigned_fixed;
12771 else
12772 encoding = DW_ATE_signed_fixed;
12773 break;
12774
12775 /* Dwarf2 doesn't know anything about complex ints, so use
12776 a user defined type for it. */
12777 case COMPLEX_TYPE:
12778 if (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE)
12779 encoding = DW_ATE_complex_float;
12780 else
12781 encoding = DW_ATE_lo_user;
12782 break;
12783
12784 case BOOLEAN_TYPE:
12785 /* GNU FORTRAN/Ada/C++ BOOLEAN type. */
12786 encoding = DW_ATE_boolean;
12787 break;
12788
12789 default:
12790 /* No other TREE_CODEs are Dwarf fundamental types. */
12791 gcc_unreachable ();
12792 }
12793
12794 base_type_result = new_die_raw (DW_TAG_base_type);
12795
12796 add_AT_unsigned (base_type_result, DW_AT_byte_size,
12797 int_size_in_bytes (type));
12798 add_AT_unsigned (base_type_result, DW_AT_encoding, encoding);
12799
12800 if (need_endianity_attribute_p (reverse))
12801 add_AT_unsigned (base_type_result, DW_AT_endianity,
12802 BYTES_BIG_ENDIAN ? DW_END_little : DW_END_big);
12803
12804 add_alignment_attribute (base_type_result, type);
12805
12806 if (fpt_used)
12807 {
12808 switch (fpt_info.scale_factor_kind)
12809 {
12810 case fixed_point_scale_factor_binary:
12811 add_AT_int (base_type_result, DW_AT_binary_scale,
12812 fpt_info.scale_factor.binary);
12813 break;
12814
12815 case fixed_point_scale_factor_decimal:
12816 add_AT_int (base_type_result, DW_AT_decimal_scale,
12817 fpt_info.scale_factor.decimal);
12818 break;
12819
12820 case fixed_point_scale_factor_arbitrary:
12821 /* Arbitrary scale factors cannot be described in standard DWARF,
12822 yet. */
12823 if (!dwarf_strict)
12824 {
12825 /* Describe the scale factor as a rational constant. */
12826 const dw_die_ref scale_factor
12827 = new_die (DW_TAG_constant, comp_unit_die (), type);
12828
12829 add_AT_unsigned (scale_factor, DW_AT_GNU_numerator,
12830 fpt_info.scale_factor.arbitrary.numerator);
12831 add_AT_int (scale_factor, DW_AT_GNU_denominator,
12832 fpt_info.scale_factor.arbitrary.denominator);
12833
12834 add_AT_die_ref (base_type_result, DW_AT_small, scale_factor);
12835 }
12836 break;
12837
12838 default:
12839 gcc_unreachable ();
12840 }
12841 }
12842
12843 if (type_bias)
12844 add_scalar_info (base_type_result, DW_AT_GNU_bias, type_bias,
12845 dw_scalar_form_constant
12846 | dw_scalar_form_exprloc
12847 | dw_scalar_form_reference,
12848 NULL);
12849
12850 return base_type_result;
12851 }
12852
12853 /* A C++ function with deduced return type can have a TEMPLATE_TYPE_PARM
12854 named 'auto' in its type: return true for it, false otherwise. */
12855
12856 static inline bool
12857 is_cxx_auto (tree type)
12858 {
12859 if (is_cxx ())
12860 {
12861 tree name = TYPE_IDENTIFIER (type);
12862 if (name == get_identifier ("auto")
12863 || name == get_identifier ("decltype(auto)"))
12864 return true;
12865 }
12866 return false;
12867 }
12868
12869 /* Given a pointer to an arbitrary ..._TYPE tree node, return nonzero if the
12870 given input type is a Dwarf "fundamental" type. Otherwise return null. */
12871
12872 static inline int
12873 is_base_type (tree type)
12874 {
12875 switch (TREE_CODE (type))
12876 {
12877 case INTEGER_TYPE:
12878 case REAL_TYPE:
12879 case FIXED_POINT_TYPE:
12880 case COMPLEX_TYPE:
12881 case BOOLEAN_TYPE:
12882 return 1;
12883
12884 case VOID_TYPE:
12885 case ARRAY_TYPE:
12886 case RECORD_TYPE:
12887 case UNION_TYPE:
12888 case QUAL_UNION_TYPE:
12889 case ENUMERAL_TYPE:
12890 case FUNCTION_TYPE:
12891 case METHOD_TYPE:
12892 case POINTER_TYPE:
12893 case REFERENCE_TYPE:
12894 case NULLPTR_TYPE:
12895 case OFFSET_TYPE:
12896 case LANG_TYPE:
12897 case VECTOR_TYPE:
12898 return 0;
12899
12900 default:
12901 if (is_cxx_auto (type))
12902 return 0;
12903 gcc_unreachable ();
12904 }
12905
12906 return 0;
12907 }
12908
12909 /* Given a pointer to a tree node, assumed to be some kind of a ..._TYPE
12910 node, return the size in bits for the type if it is a constant, or else
12911 return the alignment for the type if the type's size is not constant, or
12912 else return BITS_PER_WORD if the type actually turns out to be an
12913 ERROR_MARK node. */
12914
12915 static inline unsigned HOST_WIDE_INT
12916 simple_type_size_in_bits (const_tree type)
12917 {
12918 if (TREE_CODE (type) == ERROR_MARK)
12919 return BITS_PER_WORD;
12920 else if (TYPE_SIZE (type) == NULL_TREE)
12921 return 0;
12922 else if (tree_fits_uhwi_p (TYPE_SIZE (type)))
12923 return tree_to_uhwi (TYPE_SIZE (type));
12924 else
12925 return TYPE_ALIGN (type);
12926 }
12927
12928 /* Similarly, but return an offset_int instead of UHWI. */
12929
12930 static inline offset_int
12931 offset_int_type_size_in_bits (const_tree type)
12932 {
12933 if (TREE_CODE (type) == ERROR_MARK)
12934 return BITS_PER_WORD;
12935 else if (TYPE_SIZE (type) == NULL_TREE)
12936 return 0;
12937 else if (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
12938 return wi::to_offset (TYPE_SIZE (type));
12939 else
12940 return TYPE_ALIGN (type);
12941 }
12942
12943 /* Given a pointer to a tree node for a subrange type, return a pointer
12944 to a DIE that describes the given type. */
12945
12946 static dw_die_ref
12947 subrange_type_die (tree type, tree low, tree high, tree bias,
12948 dw_die_ref context_die)
12949 {
12950 dw_die_ref subrange_die;
12951 const HOST_WIDE_INT size_in_bytes = int_size_in_bytes (type);
12952
12953 if (context_die == NULL)
12954 context_die = comp_unit_die ();
12955
12956 subrange_die = new_die (DW_TAG_subrange_type, context_die, type);
12957
12958 if (int_size_in_bytes (TREE_TYPE (type)) != size_in_bytes)
12959 {
12960 /* The size of the subrange type and its base type do not match,
12961 so we need to generate a size attribute for the subrange type. */
12962 add_AT_unsigned (subrange_die, DW_AT_byte_size, size_in_bytes);
12963 }
12964
12965 add_alignment_attribute (subrange_die, type);
12966
12967 if (low)
12968 add_bound_info (subrange_die, DW_AT_lower_bound, low, NULL);
12969 if (high)
12970 add_bound_info (subrange_die, DW_AT_upper_bound, high, NULL);
12971 if (bias && !dwarf_strict)
12972 add_scalar_info (subrange_die, DW_AT_GNU_bias, bias,
12973 dw_scalar_form_constant
12974 | dw_scalar_form_exprloc
12975 | dw_scalar_form_reference,
12976 NULL);
12977
12978 return subrange_die;
12979 }
12980
12981 /* Returns the (const and/or volatile) cv_qualifiers associated with
12982 the decl node. This will normally be augmented with the
12983 cv_qualifiers of the underlying type in add_type_attribute. */
12984
12985 static int
12986 decl_quals (const_tree decl)
12987 {
12988 return ((TREE_READONLY (decl)
12989 /* The C++ front-end correctly marks reference-typed
12990 variables as readonly, but from a language (and debug
12991 info) standpoint they are not const-qualified. */
12992 && TREE_CODE (TREE_TYPE (decl)) != REFERENCE_TYPE
12993 ? TYPE_QUAL_CONST : TYPE_UNQUALIFIED)
12994 | (TREE_THIS_VOLATILE (decl)
12995 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED));
12996 }
12997
12998 /* Determine the TYPE whose qualifiers match the largest strict subset
12999 of the given TYPE_QUALS, and return its qualifiers. Ignore all
13000 qualifiers outside QUAL_MASK. */
13001
13002 static int
13003 get_nearest_type_subqualifiers (tree type, int type_quals, int qual_mask)
13004 {
13005 tree t;
13006 int best_rank = 0, best_qual = 0, max_rank;
13007
13008 type_quals &= qual_mask;
13009 max_rank = popcount_hwi (type_quals) - 1;
13010
13011 for (t = TYPE_MAIN_VARIANT (type); t && best_rank < max_rank;
13012 t = TYPE_NEXT_VARIANT (t))
13013 {
13014 int q = TYPE_QUALS (t) & qual_mask;
13015
13016 if ((q & type_quals) == q && q != type_quals
13017 && check_base_type (t, type))
13018 {
13019 int rank = popcount_hwi (q);
13020
13021 if (rank > best_rank)
13022 {
13023 best_rank = rank;
13024 best_qual = q;
13025 }
13026 }
13027 }
13028
13029 return best_qual;
13030 }
13031
13032 struct dwarf_qual_info_t { int q; enum dwarf_tag t; };
13033 static const dwarf_qual_info_t dwarf_qual_info[] =
13034 {
13035 { TYPE_QUAL_CONST, DW_TAG_const_type },
13036 { TYPE_QUAL_VOLATILE, DW_TAG_volatile_type },
13037 { TYPE_QUAL_RESTRICT, DW_TAG_restrict_type },
13038 { TYPE_QUAL_ATOMIC, DW_TAG_atomic_type }
13039 };
13040 static const unsigned int dwarf_qual_info_size
13041 = sizeof (dwarf_qual_info) / sizeof (dwarf_qual_info[0]);
13042
13043 /* If DIE is a qualified DIE of some base DIE with the same parent,
13044 return the base DIE, otherwise return NULL. Set MASK to the
13045 qualifiers added compared to the returned DIE. */
13046
13047 static dw_die_ref
13048 qualified_die_p (dw_die_ref die, int *mask, unsigned int depth)
13049 {
13050 unsigned int i;
13051 for (i = 0; i < dwarf_qual_info_size; i++)
13052 if (die->die_tag == dwarf_qual_info[i].t)
13053 break;
13054 if (i == dwarf_qual_info_size)
13055 return NULL;
13056 if (vec_safe_length (die->die_attr) != 1)
13057 return NULL;
13058 dw_die_ref type = get_AT_ref (die, DW_AT_type);
13059 if (type == NULL || type->die_parent != die->die_parent)
13060 return NULL;
13061 *mask |= dwarf_qual_info[i].q;
13062 if (depth)
13063 {
13064 dw_die_ref ret = qualified_die_p (type, mask, depth - 1);
13065 if (ret)
13066 return ret;
13067 }
13068 return type;
13069 }
13070
13071 /* Given a pointer to an arbitrary ..._TYPE tree node, return a debugging
13072 entry that chains the modifiers specified by CV_QUALS in front of the
13073 given type. REVERSE is true if the type is to be interpreted in the
13074 reverse storage order wrt the target order. */
13075
13076 static dw_die_ref
13077 modified_type_die (tree type, int cv_quals, bool reverse,
13078 dw_die_ref context_die)
13079 {
13080 enum tree_code code = TREE_CODE (type);
13081 dw_die_ref mod_type_die;
13082 dw_die_ref sub_die = NULL;
13083 tree item_type = NULL;
13084 tree qualified_type;
13085 tree name, low, high;
13086 dw_die_ref mod_scope;
13087 /* Only these cv-qualifiers are currently handled. */
13088 const int cv_qual_mask = (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE
13089 | TYPE_QUAL_RESTRICT | TYPE_QUAL_ATOMIC |
13090 ENCODE_QUAL_ADDR_SPACE(~0U));
13091 const bool reverse_base_type
13092 = need_endianity_attribute_p (reverse) && is_base_type (type);
13093
13094 if (code == ERROR_MARK)
13095 return NULL;
13096
13097 if (lang_hooks.types.get_debug_type)
13098 {
13099 tree debug_type = lang_hooks.types.get_debug_type (type);
13100
13101 if (debug_type != NULL_TREE && debug_type != type)
13102 return modified_type_die (debug_type, cv_quals, reverse, context_die);
13103 }
13104
13105 cv_quals &= cv_qual_mask;
13106
13107 /* Don't emit DW_TAG_restrict_type for DWARFv2, since it is a type
13108 tag modifier (and not an attribute) old consumers won't be able
13109 to handle it. */
13110 if (dwarf_version < 3)
13111 cv_quals &= ~TYPE_QUAL_RESTRICT;
13112
13113 /* Likewise for DW_TAG_atomic_type for DWARFv5. */
13114 if (dwarf_version < 5)
13115 cv_quals &= ~TYPE_QUAL_ATOMIC;
13116
13117 /* See if we already have the appropriately qualified variant of
13118 this type. */
13119 qualified_type = get_qualified_type (type, cv_quals);
13120
13121 if (qualified_type == sizetype)
13122 {
13123 /* Try not to expose the internal sizetype type's name. */
13124 if (TYPE_NAME (qualified_type)
13125 && TREE_CODE (TYPE_NAME (qualified_type)) == TYPE_DECL)
13126 {
13127 tree t = TREE_TYPE (TYPE_NAME (qualified_type));
13128
13129 gcc_checking_assert (TREE_CODE (t) == INTEGER_TYPE
13130 && (TYPE_PRECISION (t)
13131 == TYPE_PRECISION (qualified_type))
13132 && (TYPE_UNSIGNED (t)
13133 == TYPE_UNSIGNED (qualified_type)));
13134 qualified_type = t;
13135 }
13136 else if (qualified_type == sizetype
13137 && TREE_CODE (sizetype) == TREE_CODE (size_type_node)
13138 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (size_type_node)
13139 && TYPE_UNSIGNED (sizetype) == TYPE_UNSIGNED (size_type_node))
13140 qualified_type = size_type_node;
13141 }
13142
13143 /* If we do, then we can just use its DIE, if it exists. */
13144 if (qualified_type)
13145 {
13146 mod_type_die = lookup_type_die (qualified_type);
13147
13148 /* DW_AT_endianity doesn't come from a qualifier on the type, so it is
13149 dealt with specially: the DIE with the attribute, if it exists, is
13150 placed immediately after the regular DIE for the same base type. */
13151 if (mod_type_die
13152 && (!reverse_base_type
13153 || ((mod_type_die = mod_type_die->die_sib) != NULL
13154 && get_AT_unsigned (mod_type_die, DW_AT_endianity))))
13155 return mod_type_die;
13156 }
13157
13158 name = qualified_type ? TYPE_NAME (qualified_type) : NULL;
13159
13160 /* Handle C typedef types. */
13161 if (name
13162 && TREE_CODE (name) == TYPE_DECL
13163 && DECL_ORIGINAL_TYPE (name)
13164 && !DECL_ARTIFICIAL (name))
13165 {
13166 tree dtype = TREE_TYPE (name);
13167
13168 /* Skip the typedef for base types with DW_AT_endianity, no big deal. */
13169 if (qualified_type == dtype && !reverse_base_type)
13170 {
13171 tree origin = decl_ultimate_origin (name);
13172
13173 /* Typedef variants that have an abstract origin don't get their own
13174 type DIE (see gen_typedef_die), so fall back on the ultimate
13175 abstract origin instead. */
13176 if (origin != NULL && origin != name)
13177 return modified_type_die (TREE_TYPE (origin), cv_quals, reverse,
13178 context_die);
13179
13180 /* For a named type, use the typedef. */
13181 gen_type_die (qualified_type, context_die);
13182 return lookup_type_die (qualified_type);
13183 }
13184 else
13185 {
13186 int dquals = TYPE_QUALS_NO_ADDR_SPACE (dtype);
13187 dquals &= cv_qual_mask;
13188 if ((dquals & ~cv_quals) != TYPE_UNQUALIFIED
13189 || (cv_quals == dquals && DECL_ORIGINAL_TYPE (name) != type))
13190 /* cv-unqualified version of named type. Just use
13191 the unnamed type to which it refers. */
13192 return modified_type_die (DECL_ORIGINAL_TYPE (name), cv_quals,
13193 reverse, context_die);
13194 /* Else cv-qualified version of named type; fall through. */
13195 }
13196 }
13197
13198 mod_scope = scope_die_for (type, context_die);
13199
13200 if (cv_quals)
13201 {
13202 int sub_quals = 0, first_quals = 0;
13203 unsigned i;
13204 dw_die_ref first = NULL, last = NULL;
13205
13206 /* Determine a lesser qualified type that most closely matches
13207 this one. Then generate DW_TAG_* entries for the remaining
13208 qualifiers. */
13209 sub_quals = get_nearest_type_subqualifiers (type, cv_quals,
13210 cv_qual_mask);
13211 if (sub_quals && use_debug_types)
13212 {
13213 bool needed = false;
13214 /* If emitting type units, make sure the order of qualifiers
13215 is canonical. Thus, start from unqualified type if
13216 an earlier qualifier is missing in sub_quals, but some later
13217 one is present there. */
13218 for (i = 0; i < dwarf_qual_info_size; i++)
13219 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13220 needed = true;
13221 else if (needed && (dwarf_qual_info[i].q & cv_quals))
13222 {
13223 sub_quals = 0;
13224 break;
13225 }
13226 }
13227 mod_type_die = modified_type_die (type, sub_quals, reverse, context_die);
13228 if (mod_scope && mod_type_die && mod_type_die->die_parent == mod_scope)
13229 {
13230 /* As not all intermediate qualified DIEs have corresponding
13231 tree types, ensure that qualified DIEs in the same scope
13232 as their DW_AT_type are emitted after their DW_AT_type,
13233 only with other qualified DIEs for the same type possibly
13234 in between them. Determine the range of such qualified
13235 DIEs now (first being the base type, last being corresponding
13236 last qualified DIE for it). */
13237 unsigned int count = 0;
13238 first = qualified_die_p (mod_type_die, &first_quals,
13239 dwarf_qual_info_size);
13240 if (first == NULL)
13241 first = mod_type_die;
13242 gcc_assert ((first_quals & ~sub_quals) == 0);
13243 for (count = 0, last = first;
13244 count < (1U << dwarf_qual_info_size);
13245 count++, last = last->die_sib)
13246 {
13247 int quals = 0;
13248 if (last == mod_scope->die_child)
13249 break;
13250 if (qualified_die_p (last->die_sib, &quals, dwarf_qual_info_size)
13251 != first)
13252 break;
13253 }
13254 }
13255
13256 for (i = 0; i < dwarf_qual_info_size; i++)
13257 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13258 {
13259 dw_die_ref d;
13260 if (first && first != last)
13261 {
13262 for (d = first->die_sib; ; d = d->die_sib)
13263 {
13264 int quals = 0;
13265 qualified_die_p (d, &quals, dwarf_qual_info_size);
13266 if (quals == (first_quals | dwarf_qual_info[i].q))
13267 break;
13268 if (d == last)
13269 {
13270 d = NULL;
13271 break;
13272 }
13273 }
13274 if (d)
13275 {
13276 mod_type_die = d;
13277 continue;
13278 }
13279 }
13280 if (first)
13281 {
13282 d = new_die_raw (dwarf_qual_info[i].t);
13283 add_child_die_after (mod_scope, d, last);
13284 last = d;
13285 }
13286 else
13287 d = new_die (dwarf_qual_info[i].t, mod_scope, type);
13288 if (mod_type_die)
13289 add_AT_die_ref (d, DW_AT_type, mod_type_die);
13290 mod_type_die = d;
13291 first_quals |= dwarf_qual_info[i].q;
13292 }
13293 }
13294 else if (code == POINTER_TYPE || code == REFERENCE_TYPE)
13295 {
13296 dwarf_tag tag = DW_TAG_pointer_type;
13297 if (code == REFERENCE_TYPE)
13298 {
13299 if (TYPE_REF_IS_RVALUE (type) && dwarf_version >= 4)
13300 tag = DW_TAG_rvalue_reference_type;
13301 else
13302 tag = DW_TAG_reference_type;
13303 }
13304 mod_type_die = new_die (tag, mod_scope, type);
13305
13306 add_AT_unsigned (mod_type_die, DW_AT_byte_size,
13307 simple_type_size_in_bits (type) / BITS_PER_UNIT);
13308 add_alignment_attribute (mod_type_die, type);
13309 item_type = TREE_TYPE (type);
13310
13311 addr_space_t as = TYPE_ADDR_SPACE (item_type);
13312 if (!ADDR_SPACE_GENERIC_P (as))
13313 {
13314 int action = targetm.addr_space.debug (as);
13315 if (action >= 0)
13316 {
13317 /* Positive values indicate an address_class. */
13318 add_AT_unsigned (mod_type_die, DW_AT_address_class, action);
13319 }
13320 else
13321 {
13322 /* Negative values indicate an (inverted) segment base reg. */
13323 dw_loc_descr_ref d
13324 = one_reg_loc_descriptor (~action, VAR_INIT_STATUS_INITIALIZED);
13325 add_AT_loc (mod_type_die, DW_AT_segment, d);
13326 }
13327 }
13328 }
13329 else if (code == INTEGER_TYPE
13330 && TREE_TYPE (type) != NULL_TREE
13331 && subrange_type_for_debug_p (type, &low, &high))
13332 {
13333 tree bias = NULL_TREE;
13334 if (lang_hooks.types.get_type_bias)
13335 bias = lang_hooks.types.get_type_bias (type);
13336 mod_type_die = subrange_type_die (type, low, high, bias, context_die);
13337 item_type = TREE_TYPE (type);
13338 }
13339 else if (is_base_type (type))
13340 {
13341 mod_type_die = base_type_die (type, reverse);
13342
13343 /* The DIE with DW_AT_endianity is placed right after the naked DIE. */
13344 if (reverse_base_type)
13345 {
13346 dw_die_ref after_die
13347 = modified_type_die (type, cv_quals, false, context_die);
13348 add_child_die_after (comp_unit_die (), mod_type_die, after_die);
13349 }
13350 else
13351 add_child_die (comp_unit_die (), mod_type_die);
13352
13353 add_pubtype (type, mod_type_die);
13354 }
13355 else
13356 {
13357 gen_type_die (type, context_die);
13358
13359 /* We have to get the type_main_variant here (and pass that to the
13360 `lookup_type_die' routine) because the ..._TYPE node we have
13361 might simply be a *copy* of some original type node (where the
13362 copy was created to help us keep track of typedef names) and
13363 that copy might have a different TYPE_UID from the original
13364 ..._TYPE node. */
13365 if (TREE_CODE (type) == FUNCTION_TYPE
13366 || TREE_CODE (type) == METHOD_TYPE)
13367 {
13368 /* For function/method types, can't just use type_main_variant here,
13369 because that can have different ref-qualifiers for C++,
13370 but try to canonicalize. */
13371 tree main = TYPE_MAIN_VARIANT (type);
13372 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
13373 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
13374 && check_base_type (t, main)
13375 && check_lang_type (t, type))
13376 return lookup_type_die (t);
13377 return lookup_type_die (type);
13378 }
13379 else if (TREE_CODE (type) != VECTOR_TYPE
13380 && TREE_CODE (type) != ARRAY_TYPE)
13381 return lookup_type_die (type_main_variant (type));
13382 else
13383 /* Vectors have the debugging information in the type,
13384 not the main variant. */
13385 return lookup_type_die (type);
13386 }
13387
13388 /* Builtin types don't have a DECL_ORIGINAL_TYPE. For those,
13389 don't output a DW_TAG_typedef, since there isn't one in the
13390 user's program; just attach a DW_AT_name to the type.
13391 Don't attach a DW_AT_name to DW_TAG_const_type or DW_TAG_volatile_type
13392 if the base type already has the same name. */
13393 if (name
13394 && ((TREE_CODE (name) != TYPE_DECL
13395 && (qualified_type == TYPE_MAIN_VARIANT (type)
13396 || (cv_quals == TYPE_UNQUALIFIED)))
13397 || (TREE_CODE (name) == TYPE_DECL
13398 && TREE_TYPE (name) == qualified_type
13399 && DECL_NAME (name))))
13400 {
13401 if (TREE_CODE (name) == TYPE_DECL)
13402 /* Could just call add_name_and_src_coords_attributes here,
13403 but since this is a builtin type it doesn't have any
13404 useful source coordinates anyway. */
13405 name = DECL_NAME (name);
13406 add_name_attribute (mod_type_die, IDENTIFIER_POINTER (name));
13407 }
13408 /* This probably indicates a bug. */
13409 else if (mod_type_die && mod_type_die->die_tag == DW_TAG_base_type)
13410 {
13411 name = TYPE_IDENTIFIER (type);
13412 add_name_attribute (mod_type_die,
13413 name ? IDENTIFIER_POINTER (name) : "__unknown__");
13414 }
13415
13416 if (qualified_type && !reverse_base_type)
13417 equate_type_number_to_die (qualified_type, mod_type_die);
13418
13419 if (item_type)
13420 /* We must do this after the equate_type_number_to_die call, in case
13421 this is a recursive type. This ensures that the modified_type_die
13422 recursion will terminate even if the type is recursive. Recursive
13423 types are possible in Ada. */
13424 sub_die = modified_type_die (item_type,
13425 TYPE_QUALS_NO_ADDR_SPACE (item_type),
13426 reverse,
13427 context_die);
13428
13429 if (sub_die != NULL)
13430 add_AT_die_ref (mod_type_die, DW_AT_type, sub_die);
13431
13432 add_gnat_descriptive_type_attribute (mod_type_die, type, context_die);
13433 if (TYPE_ARTIFICIAL (type))
13434 add_AT_flag (mod_type_die, DW_AT_artificial, 1);
13435
13436 return mod_type_die;
13437 }
13438
13439 /* Generate DIEs for the generic parameters of T.
13440 T must be either a generic type or a generic function.
13441 See http://gcc.gnu.org/wiki/TemplateParmsDwarf for more. */
13442
13443 static void
13444 gen_generic_params_dies (tree t)
13445 {
13446 tree parms, args;
13447 int parms_num, i;
13448 dw_die_ref die = NULL;
13449 int non_default;
13450
13451 if (!t || (TYPE_P (t) && !COMPLETE_TYPE_P (t)))
13452 return;
13453
13454 if (TYPE_P (t))
13455 die = lookup_type_die (t);
13456 else if (DECL_P (t))
13457 die = lookup_decl_die (t);
13458
13459 gcc_assert (die);
13460
13461 parms = lang_hooks.get_innermost_generic_parms (t);
13462 if (!parms)
13463 /* T has no generic parameter. It means T is neither a generic type
13464 or function. End of story. */
13465 return;
13466
13467 parms_num = TREE_VEC_LENGTH (parms);
13468 args = lang_hooks.get_innermost_generic_args (t);
13469 if (TREE_CHAIN (args) && TREE_CODE (TREE_CHAIN (args)) == INTEGER_CST)
13470 non_default = int_cst_value (TREE_CHAIN (args));
13471 else
13472 non_default = TREE_VEC_LENGTH (args);
13473 for (i = 0; i < parms_num; i++)
13474 {
13475 tree parm, arg, arg_pack_elems;
13476 dw_die_ref parm_die;
13477
13478 parm = TREE_VEC_ELT (parms, i);
13479 arg = TREE_VEC_ELT (args, i);
13480 arg_pack_elems = lang_hooks.types.get_argument_pack_elems (arg);
13481 gcc_assert (parm && TREE_VALUE (parm) && arg);
13482
13483 if (parm && TREE_VALUE (parm) && arg)
13484 {
13485 /* If PARM represents a template parameter pack,
13486 emit a DW_TAG_GNU_template_parameter_pack DIE, followed
13487 by DW_TAG_template_*_parameter DIEs for the argument
13488 pack elements of ARG. Note that ARG would then be
13489 an argument pack. */
13490 if (arg_pack_elems)
13491 parm_die = template_parameter_pack_die (TREE_VALUE (parm),
13492 arg_pack_elems,
13493 die);
13494 else
13495 parm_die = generic_parameter_die (TREE_VALUE (parm), arg,
13496 true /* emit name */, die);
13497 if (i >= non_default)
13498 add_AT_flag (parm_die, DW_AT_default_value, 1);
13499 }
13500 }
13501 }
13502
13503 /* Create and return a DIE for PARM which should be
13504 the representation of a generic type parameter.
13505 For instance, in the C++ front end, PARM would be a template parameter.
13506 ARG is the argument to PARM.
13507 EMIT_NAME_P if tree, the DIE will have DW_AT_name attribute set to the
13508 name of the PARM.
13509 PARENT_DIE is the parent DIE which the new created DIE should be added to,
13510 as a child node. */
13511
13512 static dw_die_ref
13513 generic_parameter_die (tree parm, tree arg,
13514 bool emit_name_p,
13515 dw_die_ref parent_die)
13516 {
13517 dw_die_ref tmpl_die = NULL;
13518 const char *name = NULL;
13519
13520 if (!parm || !DECL_NAME (parm) || !arg)
13521 return NULL;
13522
13523 /* We support non-type generic parameters and arguments,
13524 type generic parameters and arguments, as well as
13525 generic generic parameters (a.k.a. template template parameters in C++)
13526 and arguments. */
13527 if (TREE_CODE (parm) == PARM_DECL)
13528 /* PARM is a nontype generic parameter */
13529 tmpl_die = new_die (DW_TAG_template_value_param, parent_die, parm);
13530 else if (TREE_CODE (parm) == TYPE_DECL)
13531 /* PARM is a type generic parameter. */
13532 tmpl_die = new_die (DW_TAG_template_type_param, parent_die, parm);
13533 else if (lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13534 /* PARM is a generic generic parameter.
13535 Its DIE is a GNU extension. It shall have a
13536 DW_AT_name attribute to represent the name of the template template
13537 parameter, and a DW_AT_GNU_template_name attribute to represent the
13538 name of the template template argument. */
13539 tmpl_die = new_die (DW_TAG_GNU_template_template_param,
13540 parent_die, parm);
13541 else
13542 gcc_unreachable ();
13543
13544 if (tmpl_die)
13545 {
13546 tree tmpl_type;
13547
13548 /* If PARM is a generic parameter pack, it means we are
13549 emitting debug info for a template argument pack element.
13550 In other terms, ARG is a template argument pack element.
13551 In that case, we don't emit any DW_AT_name attribute for
13552 the die. */
13553 if (emit_name_p)
13554 {
13555 name = IDENTIFIER_POINTER (DECL_NAME (parm));
13556 gcc_assert (name);
13557 add_AT_string (tmpl_die, DW_AT_name, name);
13558 }
13559
13560 if (!lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13561 {
13562 /* DWARF3, 5.6.8 says if PARM is a non-type generic parameter
13563 TMPL_DIE should have a child DW_AT_type attribute that is set
13564 to the type of the argument to PARM, which is ARG.
13565 If PARM is a type generic parameter, TMPL_DIE should have a
13566 child DW_AT_type that is set to ARG. */
13567 tmpl_type = TYPE_P (arg) ? arg : TREE_TYPE (arg);
13568 add_type_attribute (tmpl_die, tmpl_type,
13569 (TREE_THIS_VOLATILE (tmpl_type)
13570 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED),
13571 false, parent_die);
13572 }
13573 else
13574 {
13575 /* So TMPL_DIE is a DIE representing a
13576 a generic generic template parameter, a.k.a template template
13577 parameter in C++ and arg is a template. */
13578
13579 /* The DW_AT_GNU_template_name attribute of the DIE must be set
13580 to the name of the argument. */
13581 name = dwarf2_name (TYPE_P (arg) ? TYPE_NAME (arg) : arg, 1);
13582 if (name)
13583 add_AT_string (tmpl_die, DW_AT_GNU_template_name, name);
13584 }
13585
13586 if (TREE_CODE (parm) == PARM_DECL)
13587 /* So PARM is a non-type generic parameter.
13588 DWARF3 5.6.8 says we must set a DW_AT_const_value child
13589 attribute of TMPL_DIE which value represents the value
13590 of ARG.
13591 We must be careful here:
13592 The value of ARG might reference some function decls.
13593 We might currently be emitting debug info for a generic
13594 type and types are emitted before function decls, we don't
13595 know if the function decls referenced by ARG will actually be
13596 emitted after cgraph computations.
13597 So must defer the generation of the DW_AT_const_value to
13598 after cgraph is ready. */
13599 append_entry_to_tmpl_value_parm_die_table (tmpl_die, arg);
13600 }
13601
13602 return tmpl_die;
13603 }
13604
13605 /* Generate and return a DW_TAG_GNU_template_parameter_pack DIE representing.
13606 PARM_PACK must be a template parameter pack. The returned DIE
13607 will be child DIE of PARENT_DIE. */
13608
13609 static dw_die_ref
13610 template_parameter_pack_die (tree parm_pack,
13611 tree parm_pack_args,
13612 dw_die_ref parent_die)
13613 {
13614 dw_die_ref die;
13615 int j;
13616
13617 gcc_assert (parent_die && parm_pack);
13618
13619 die = new_die (DW_TAG_GNU_template_parameter_pack, parent_die, parm_pack);
13620 add_name_and_src_coords_attributes (die, parm_pack);
13621 for (j = 0; j < TREE_VEC_LENGTH (parm_pack_args); j++)
13622 generic_parameter_die (parm_pack,
13623 TREE_VEC_ELT (parm_pack_args, j),
13624 false /* Don't emit DW_AT_name */,
13625 die);
13626 return die;
13627 }
13628
13629 /* Return the DBX register number described by a given RTL node. */
13630
13631 static unsigned int
13632 dbx_reg_number (const_rtx rtl)
13633 {
13634 unsigned regno = REGNO (rtl);
13635
13636 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
13637
13638 #ifdef LEAF_REG_REMAP
13639 if (crtl->uses_only_leaf_regs)
13640 {
13641 int leaf_reg = LEAF_REG_REMAP (regno);
13642 if (leaf_reg != -1)
13643 regno = (unsigned) leaf_reg;
13644 }
13645 #endif
13646
13647 regno = DBX_REGISTER_NUMBER (regno);
13648 gcc_assert (regno != INVALID_REGNUM);
13649 return regno;
13650 }
13651
13652 /* Optionally add a DW_OP_piece term to a location description expression.
13653 DW_OP_piece is only added if the location description expression already
13654 doesn't end with DW_OP_piece. */
13655
13656 static void
13657 add_loc_descr_op_piece (dw_loc_descr_ref *list_head, int size)
13658 {
13659 dw_loc_descr_ref loc;
13660
13661 if (*list_head != NULL)
13662 {
13663 /* Find the end of the chain. */
13664 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
13665 ;
13666
13667 if (loc->dw_loc_opc != DW_OP_piece)
13668 loc->dw_loc_next = new_loc_descr (DW_OP_piece, size, 0);
13669 }
13670 }
13671
13672 /* Return a location descriptor that designates a machine register or
13673 zero if there is none. */
13674
13675 static dw_loc_descr_ref
13676 reg_loc_descriptor (rtx rtl, enum var_init_status initialized)
13677 {
13678 rtx regs;
13679
13680 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
13681 return 0;
13682
13683 /* We only use "frame base" when we're sure we're talking about the
13684 post-prologue local stack frame. We do this by *not* running
13685 register elimination until this point, and recognizing the special
13686 argument pointer and soft frame pointer rtx's.
13687 Use DW_OP_fbreg offset DW_OP_stack_value in this case. */
13688 if ((rtl == arg_pointer_rtx || rtl == frame_pointer_rtx)
13689 && eliminate_regs (rtl, VOIDmode, NULL_RTX) != rtl)
13690 {
13691 dw_loc_descr_ref result = NULL;
13692
13693 if (dwarf_version >= 4 || !dwarf_strict)
13694 {
13695 result = mem_loc_descriptor (rtl, GET_MODE (rtl), VOIDmode,
13696 initialized);
13697 if (result)
13698 add_loc_descr (&result,
13699 new_loc_descr (DW_OP_stack_value, 0, 0));
13700 }
13701 return result;
13702 }
13703
13704 regs = targetm.dwarf_register_span (rtl);
13705
13706 if (REG_NREGS (rtl) > 1 || regs)
13707 return multiple_reg_loc_descriptor (rtl, regs, initialized);
13708 else
13709 {
13710 unsigned int dbx_regnum = dbx_reg_number (rtl);
13711 if (dbx_regnum == IGNORED_DWARF_REGNUM)
13712 return 0;
13713 return one_reg_loc_descriptor (dbx_regnum, initialized);
13714 }
13715 }
13716
13717 /* Return a location descriptor that designates a machine register for
13718 a given hard register number. */
13719
13720 static dw_loc_descr_ref
13721 one_reg_loc_descriptor (unsigned int regno, enum var_init_status initialized)
13722 {
13723 dw_loc_descr_ref reg_loc_descr;
13724
13725 if (regno <= 31)
13726 reg_loc_descr
13727 = new_loc_descr ((enum dwarf_location_atom) (DW_OP_reg0 + regno), 0, 0);
13728 else
13729 reg_loc_descr = new_loc_descr (DW_OP_regx, regno, 0);
13730
13731 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
13732 add_loc_descr (&reg_loc_descr, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13733
13734 return reg_loc_descr;
13735 }
13736
13737 /* Given an RTL of a register, return a location descriptor that
13738 designates a value that spans more than one register. */
13739
13740 static dw_loc_descr_ref
13741 multiple_reg_loc_descriptor (rtx rtl, rtx regs,
13742 enum var_init_status initialized)
13743 {
13744 int size, i;
13745 dw_loc_descr_ref loc_result = NULL;
13746
13747 /* Simple, contiguous registers. */
13748 if (regs == NULL_RTX)
13749 {
13750 unsigned reg = REGNO (rtl);
13751 int nregs;
13752
13753 #ifdef LEAF_REG_REMAP
13754 if (crtl->uses_only_leaf_regs)
13755 {
13756 int leaf_reg = LEAF_REG_REMAP (reg);
13757 if (leaf_reg != -1)
13758 reg = (unsigned) leaf_reg;
13759 }
13760 #endif
13761
13762 gcc_assert ((unsigned) DBX_REGISTER_NUMBER (reg) == dbx_reg_number (rtl));
13763 nregs = REG_NREGS (rtl);
13764
13765 /* At present we only track constant-sized pieces. */
13766 if (!GET_MODE_SIZE (GET_MODE (rtl)).is_constant (&size))
13767 return NULL;
13768 size /= nregs;
13769
13770 loc_result = NULL;
13771 while (nregs--)
13772 {
13773 dw_loc_descr_ref t;
13774
13775 t = one_reg_loc_descriptor (DBX_REGISTER_NUMBER (reg),
13776 VAR_INIT_STATUS_INITIALIZED);
13777 add_loc_descr (&loc_result, t);
13778 add_loc_descr_op_piece (&loc_result, size);
13779 ++reg;
13780 }
13781 return loc_result;
13782 }
13783
13784 /* Now onto stupid register sets in non contiguous locations. */
13785
13786 gcc_assert (GET_CODE (regs) == PARALLEL);
13787
13788 /* At present we only track constant-sized pieces. */
13789 if (!GET_MODE_SIZE (GET_MODE (XVECEXP (regs, 0, 0))).is_constant (&size))
13790 return NULL;
13791 loc_result = NULL;
13792
13793 for (i = 0; i < XVECLEN (regs, 0); ++i)
13794 {
13795 dw_loc_descr_ref t;
13796
13797 t = one_reg_loc_descriptor (dbx_reg_number (XVECEXP (regs, 0, i)),
13798 VAR_INIT_STATUS_INITIALIZED);
13799 add_loc_descr (&loc_result, t);
13800 add_loc_descr_op_piece (&loc_result, size);
13801 }
13802
13803 if (loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
13804 add_loc_descr (&loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13805 return loc_result;
13806 }
13807
13808 static unsigned long size_of_int_loc_descriptor (HOST_WIDE_INT);
13809
13810 /* Return a location descriptor that designates a constant i,
13811 as a compound operation from constant (i >> shift), constant shift
13812 and DW_OP_shl. */
13813
13814 static dw_loc_descr_ref
13815 int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
13816 {
13817 dw_loc_descr_ref ret = int_loc_descriptor (i >> shift);
13818 add_loc_descr (&ret, int_loc_descriptor (shift));
13819 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
13820 return ret;
13821 }
13822
13823 /* Return a location descriptor that designates constant POLY_I. */
13824
13825 static dw_loc_descr_ref
13826 int_loc_descriptor (poly_int64 poly_i)
13827 {
13828 enum dwarf_location_atom op;
13829
13830 HOST_WIDE_INT i;
13831 if (!poly_i.is_constant (&i))
13832 {
13833 /* Create location descriptions for the non-constant part and
13834 add any constant offset at the end. */
13835 dw_loc_descr_ref ret = NULL;
13836 HOST_WIDE_INT constant = poly_i.coeffs[0];
13837 for (unsigned int j = 1; j < NUM_POLY_INT_COEFFS; ++j)
13838 {
13839 HOST_WIDE_INT coeff = poly_i.coeffs[j];
13840 if (coeff != 0)
13841 {
13842 dw_loc_descr_ref start = ret;
13843 unsigned int factor;
13844 int bias;
13845 unsigned int regno = targetm.dwarf_poly_indeterminate_value
13846 (j, &factor, &bias);
13847
13848 /* Add COEFF * ((REGNO / FACTOR) - BIAS) to the value:
13849 add COEFF * (REGNO / FACTOR) now and subtract
13850 COEFF * BIAS from the final constant part. */
13851 constant -= coeff * bias;
13852 add_loc_descr (&ret, new_reg_loc_descr (regno, 0));
13853 if (coeff % factor == 0)
13854 coeff /= factor;
13855 else
13856 {
13857 int amount = exact_log2 (factor);
13858 gcc_assert (amount >= 0);
13859 add_loc_descr (&ret, int_loc_descriptor (amount));
13860 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
13861 }
13862 if (coeff != 1)
13863 {
13864 add_loc_descr (&ret, int_loc_descriptor (coeff));
13865 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
13866 }
13867 if (start)
13868 add_loc_descr (&ret, new_loc_descr (DW_OP_plus, 0, 0));
13869 }
13870 }
13871 loc_descr_plus_const (&ret, constant);
13872 return ret;
13873 }
13874
13875 /* Pick the smallest representation of a constant, rather than just
13876 defaulting to the LEB encoding. */
13877 if (i >= 0)
13878 {
13879 int clz = clz_hwi (i);
13880 int ctz = ctz_hwi (i);
13881 if (i <= 31)
13882 op = (enum dwarf_location_atom) (DW_OP_lit0 + i);
13883 else if (i <= 0xff)
13884 op = DW_OP_const1u;
13885 else if (i <= 0xffff)
13886 op = DW_OP_const2u;
13887 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
13888 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
13889 /* DW_OP_litX DW_OP_litY DW_OP_shl takes just 3 bytes and
13890 DW_OP_litX DW_OP_const1u Y DW_OP_shl takes just 4 bytes,
13891 while DW_OP_const4u is 5 bytes. */
13892 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 5);
13893 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13894 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
13895 /* DW_OP_const1u X DW_OP_litY DW_OP_shl takes just 4 bytes,
13896 while DW_OP_const4u is 5 bytes. */
13897 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13898
13899 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
13900 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
13901 <= 4)
13902 {
13903 /* As i >= 2**31, the double cast above will yield a negative number.
13904 Since wrapping is defined in DWARF expressions we can output big
13905 positive integers as small negative ones, regardless of the size
13906 of host wide ints.
13907
13908 Here, since the evaluator will handle 32-bit values and since i >=
13909 2**31, we know it's going to be interpreted as a negative literal:
13910 store it this way if we can do better than 5 bytes this way. */
13911 return int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
13912 }
13913 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
13914 op = DW_OP_const4u;
13915
13916 /* Past this point, i >= 0x100000000 and thus DW_OP_constu will take at
13917 least 6 bytes: see if we can do better before falling back to it. */
13918 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13919 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
13920 /* DW_OP_const1u X DW_OP_const1u Y DW_OP_shl takes just 5 bytes. */
13921 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13922 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
13923 && clz + 16 + (size_of_uleb128 (i) > 5 ? 255 : 31)
13924 >= HOST_BITS_PER_WIDE_INT)
13925 /* DW_OP_const2u X DW_OP_litY DW_OP_shl takes just 5 bytes,
13926 DW_OP_const2u X DW_OP_const1u Y DW_OP_shl takes 6 bytes. */
13927 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 16);
13928 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
13929 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
13930 && size_of_uleb128 (i) > 6)
13931 /* DW_OP_const4u X DW_OP_litY DW_OP_shl takes just 7 bytes. */
13932 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 32);
13933 else
13934 op = DW_OP_constu;
13935 }
13936 else
13937 {
13938 if (i >= -0x80)
13939 op = DW_OP_const1s;
13940 else if (i >= -0x8000)
13941 op = DW_OP_const2s;
13942 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
13943 {
13944 if (size_of_int_loc_descriptor (i) < 5)
13945 {
13946 dw_loc_descr_ref ret = int_loc_descriptor (-i);
13947 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
13948 return ret;
13949 }
13950 op = DW_OP_const4s;
13951 }
13952 else
13953 {
13954 if (size_of_int_loc_descriptor (i)
13955 < (unsigned long) 1 + size_of_sleb128 (i))
13956 {
13957 dw_loc_descr_ref ret = int_loc_descriptor (-i);
13958 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
13959 return ret;
13960 }
13961 op = DW_OP_consts;
13962 }
13963 }
13964
13965 return new_loc_descr (op, i, 0);
13966 }
13967
13968 /* Likewise, for unsigned constants. */
13969
13970 static dw_loc_descr_ref
13971 uint_loc_descriptor (unsigned HOST_WIDE_INT i)
13972 {
13973 const unsigned HOST_WIDE_INT max_int = INTTYPE_MAXIMUM (HOST_WIDE_INT);
13974 const unsigned HOST_WIDE_INT max_uint
13975 = INTTYPE_MAXIMUM (unsigned HOST_WIDE_INT);
13976
13977 /* If possible, use the clever signed constants handling. */
13978 if (i <= max_int)
13979 return int_loc_descriptor ((HOST_WIDE_INT) i);
13980
13981 /* Here, we are left with positive numbers that cannot be represented as
13982 HOST_WIDE_INT, i.e.:
13983 max (HOST_WIDE_INT) < i <= max (unsigned HOST_WIDE_INT)
13984
13985 Using DW_OP_const4/8/./u operation to encode them consumes a lot of bytes
13986 whereas may be better to output a negative integer: thanks to integer
13987 wrapping, we know that:
13988 x = x - 2 ** DWARF2_ADDR_SIZE
13989 = x - 2 * (max (HOST_WIDE_INT) + 1)
13990 So numbers close to max (unsigned HOST_WIDE_INT) could be represented as
13991 small negative integers. Let's try that in cases it will clearly improve
13992 the encoding: there is no gain turning DW_OP_const4u into
13993 DW_OP_const4s. */
13994 if (DWARF2_ADDR_SIZE * 8 == HOST_BITS_PER_WIDE_INT
13995 && ((DWARF2_ADDR_SIZE == 4 && i > max_uint - 0x8000)
13996 || (DWARF2_ADDR_SIZE == 8 && i > max_uint - 0x80000000)))
13997 {
13998 const unsigned HOST_WIDE_INT first_shift = i - max_int - 1;
13999
14000 /* Now, -1 < first_shift <= max (HOST_WIDE_INT)
14001 i.e. 0 <= first_shift <= max (HOST_WIDE_INT). */
14002 const HOST_WIDE_INT second_shift
14003 = (HOST_WIDE_INT) first_shift - (HOST_WIDE_INT) max_int - 1;
14004
14005 /* So we finally have:
14006 -max (HOST_WIDE_INT) - 1 <= second_shift <= -1.
14007 i.e. min (HOST_WIDE_INT) <= second_shift < 0. */
14008 return int_loc_descriptor (second_shift);
14009 }
14010
14011 /* Last chance: fallback to a simple constant operation. */
14012 return new_loc_descr
14013 ((HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14014 ? DW_OP_const4u
14015 : DW_OP_const8u,
14016 i, 0);
14017 }
14018
14019 /* Generate and return a location description that computes the unsigned
14020 comparison of the two stack top entries (a OP b where b is the top-most
14021 entry and a is the second one). The KIND of comparison can be LT_EXPR,
14022 LE_EXPR, GT_EXPR or GE_EXPR. */
14023
14024 static dw_loc_descr_ref
14025 uint_comparison_loc_list (enum tree_code kind)
14026 {
14027 enum dwarf_location_atom op, flip_op;
14028 dw_loc_descr_ref ret, bra_node, jmp_node, tmp;
14029
14030 switch (kind)
14031 {
14032 case LT_EXPR:
14033 op = DW_OP_lt;
14034 break;
14035 case LE_EXPR:
14036 op = DW_OP_le;
14037 break;
14038 case GT_EXPR:
14039 op = DW_OP_gt;
14040 break;
14041 case GE_EXPR:
14042 op = DW_OP_ge;
14043 break;
14044 default:
14045 gcc_unreachable ();
14046 }
14047
14048 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14049 jmp_node = new_loc_descr (DW_OP_skip, 0, 0);
14050
14051 /* Until DWARFv4, operations all work on signed integers. It is nevertheless
14052 possible to perform unsigned comparisons: we just have to distinguish
14053 three cases:
14054
14055 1. when a and b have the same sign (as signed integers); then we should
14056 return: a OP(signed) b;
14057
14058 2. when a is a negative signed integer while b is a positive one, then a
14059 is a greater unsigned integer than b; likewise when a and b's roles
14060 are flipped.
14061
14062 So first, compare the sign of the two operands. */
14063 ret = new_loc_descr (DW_OP_over, 0, 0);
14064 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
14065 add_loc_descr (&ret, new_loc_descr (DW_OP_xor, 0, 0));
14066 /* If they have different signs (i.e. they have different sign bits), then
14067 the stack top value has now the sign bit set and thus it's smaller than
14068 zero. */
14069 add_loc_descr (&ret, new_loc_descr (DW_OP_lit0, 0, 0));
14070 add_loc_descr (&ret, new_loc_descr (DW_OP_lt, 0, 0));
14071 add_loc_descr (&ret, bra_node);
14072
14073 /* We are in case 1. At this point, we know both operands have the same
14074 sign, to it's safe to use the built-in signed comparison. */
14075 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14076 add_loc_descr (&ret, jmp_node);
14077
14078 /* We are in case 2. Here, we know both operands do not have the same sign,
14079 so we have to flip the signed comparison. */
14080 flip_op = (kind == LT_EXPR || kind == LE_EXPR) ? DW_OP_gt : DW_OP_lt;
14081 tmp = new_loc_descr (flip_op, 0, 0);
14082 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14083 bra_node->dw_loc_oprnd1.v.val_loc = tmp;
14084 add_loc_descr (&ret, tmp);
14085
14086 /* This dummy operation is necessary to make the two branches join. */
14087 tmp = new_loc_descr (DW_OP_nop, 0, 0);
14088 jmp_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14089 jmp_node->dw_loc_oprnd1.v.val_loc = tmp;
14090 add_loc_descr (&ret, tmp);
14091
14092 return ret;
14093 }
14094
14095 /* Likewise, but takes the location description lists (might be destructive on
14096 them). Return NULL if either is NULL or if concatenation fails. */
14097
14098 static dw_loc_list_ref
14099 loc_list_from_uint_comparison (dw_loc_list_ref left, dw_loc_list_ref right,
14100 enum tree_code kind)
14101 {
14102 if (left == NULL || right == NULL)
14103 return NULL;
14104
14105 add_loc_list (&left, right);
14106 if (left == NULL)
14107 return NULL;
14108
14109 add_loc_descr_to_each (left, uint_comparison_loc_list (kind));
14110 return left;
14111 }
14112
14113 /* Return size_of_locs (int_shift_loc_descriptor (i, shift))
14114 without actually allocating it. */
14115
14116 static unsigned long
14117 size_of_int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
14118 {
14119 return size_of_int_loc_descriptor (i >> shift)
14120 + size_of_int_loc_descriptor (shift)
14121 + 1;
14122 }
14123
14124 /* Return size_of_locs (int_loc_descriptor (i)) without
14125 actually allocating it. */
14126
14127 static unsigned long
14128 size_of_int_loc_descriptor (HOST_WIDE_INT i)
14129 {
14130 unsigned long s;
14131
14132 if (i >= 0)
14133 {
14134 int clz, ctz;
14135 if (i <= 31)
14136 return 1;
14137 else if (i <= 0xff)
14138 return 2;
14139 else if (i <= 0xffff)
14140 return 3;
14141 clz = clz_hwi (i);
14142 ctz = ctz_hwi (i);
14143 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
14144 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
14145 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14146 - clz - 5);
14147 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14148 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
14149 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14150 - clz - 8);
14151 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
14152 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
14153 <= 4)
14154 return size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
14155 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14156 return 5;
14157 s = size_of_uleb128 ((unsigned HOST_WIDE_INT) i);
14158 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14159 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
14160 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14161 - clz - 8);
14162 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
14163 && clz + 16 + (s > 5 ? 255 : 31) >= HOST_BITS_PER_WIDE_INT)
14164 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14165 - clz - 16);
14166 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
14167 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
14168 && s > 6)
14169 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14170 - clz - 32);
14171 else
14172 return 1 + s;
14173 }
14174 else
14175 {
14176 if (i >= -0x80)
14177 return 2;
14178 else if (i >= -0x8000)
14179 return 3;
14180 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
14181 {
14182 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14183 {
14184 s = size_of_int_loc_descriptor (-i) + 1;
14185 if (s < 5)
14186 return s;
14187 }
14188 return 5;
14189 }
14190 else
14191 {
14192 unsigned long r = 1 + size_of_sleb128 (i);
14193 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14194 {
14195 s = size_of_int_loc_descriptor (-i) + 1;
14196 if (s < r)
14197 return s;
14198 }
14199 return r;
14200 }
14201 }
14202 }
14203
14204 /* Return loc description representing "address" of integer value.
14205 This can appear only as toplevel expression. */
14206
14207 static dw_loc_descr_ref
14208 address_of_int_loc_descriptor (int size, HOST_WIDE_INT i)
14209 {
14210 int litsize;
14211 dw_loc_descr_ref loc_result = NULL;
14212
14213 if (!(dwarf_version >= 4 || !dwarf_strict))
14214 return NULL;
14215
14216 litsize = size_of_int_loc_descriptor (i);
14217 /* Determine if DW_OP_stack_value or DW_OP_implicit_value
14218 is more compact. For DW_OP_stack_value we need:
14219 litsize + 1 (DW_OP_stack_value)
14220 and for DW_OP_implicit_value:
14221 1 (DW_OP_implicit_value) + 1 (length) + size. */
14222 if ((int) DWARF2_ADDR_SIZE >= size && litsize + 1 <= 1 + 1 + size)
14223 {
14224 loc_result = int_loc_descriptor (i);
14225 add_loc_descr (&loc_result,
14226 new_loc_descr (DW_OP_stack_value, 0, 0));
14227 return loc_result;
14228 }
14229
14230 loc_result = new_loc_descr (DW_OP_implicit_value,
14231 size, 0);
14232 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
14233 loc_result->dw_loc_oprnd2.v.val_int = i;
14234 return loc_result;
14235 }
14236
14237 /* Return a location descriptor that designates a base+offset location. */
14238
14239 static dw_loc_descr_ref
14240 based_loc_descr (rtx reg, poly_int64 offset,
14241 enum var_init_status initialized)
14242 {
14243 unsigned int regno;
14244 dw_loc_descr_ref result;
14245 dw_fde_ref fde = cfun->fde;
14246
14247 /* We only use "frame base" when we're sure we're talking about the
14248 post-prologue local stack frame. We do this by *not* running
14249 register elimination until this point, and recognizing the special
14250 argument pointer and soft frame pointer rtx's. */
14251 if (reg == arg_pointer_rtx || reg == frame_pointer_rtx)
14252 {
14253 rtx elim = (ira_use_lra_p
14254 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
14255 : eliminate_regs (reg, VOIDmode, NULL_RTX));
14256
14257 if (elim != reg)
14258 {
14259 /* Allow hard frame pointer here even if frame pointer
14260 isn't used since hard frame pointer is encoded with
14261 DW_OP_fbreg which uses the DW_AT_frame_base attribute,
14262 not hard frame pointer directly. */
14263 elim = strip_offset_and_add (elim, &offset);
14264 gcc_assert (elim == hard_frame_pointer_rtx
14265 || elim == stack_pointer_rtx);
14266
14267 /* If drap register is used to align stack, use frame
14268 pointer + offset to access stack variables. If stack
14269 is aligned without drap, use stack pointer + offset to
14270 access stack variables. */
14271 if (crtl->stack_realign_tried
14272 && reg == frame_pointer_rtx)
14273 {
14274 int base_reg
14275 = DWARF_FRAME_REGNUM ((fde && fde->drap_reg != INVALID_REGNUM)
14276 ? HARD_FRAME_POINTER_REGNUM
14277 : REGNO (elim));
14278 return new_reg_loc_descr (base_reg, offset);
14279 }
14280
14281 gcc_assert (frame_pointer_fb_offset_valid);
14282 offset += frame_pointer_fb_offset;
14283 HOST_WIDE_INT const_offset;
14284 if (offset.is_constant (&const_offset))
14285 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14286 else
14287 {
14288 dw_loc_descr_ref ret = new_loc_descr (DW_OP_fbreg, 0, 0);
14289 loc_descr_plus_const (&ret, offset);
14290 return ret;
14291 }
14292 }
14293 }
14294
14295 regno = REGNO (reg);
14296 #ifdef LEAF_REG_REMAP
14297 if (crtl->uses_only_leaf_regs)
14298 {
14299 int leaf_reg = LEAF_REG_REMAP (regno);
14300 if (leaf_reg != -1)
14301 regno = (unsigned) leaf_reg;
14302 }
14303 #endif
14304 regno = DWARF_FRAME_REGNUM (regno);
14305
14306 HOST_WIDE_INT const_offset;
14307 if (!optimize && fde
14308 && (fde->drap_reg == regno || fde->vdrap_reg == regno)
14309 && offset.is_constant (&const_offset))
14310 {
14311 /* Use cfa+offset to represent the location of arguments passed
14312 on the stack when drap is used to align stack.
14313 Only do this when not optimizing, for optimized code var-tracking
14314 is supposed to track where the arguments live and the register
14315 used as vdrap or drap in some spot might be used for something
14316 else in other part of the routine. */
14317 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14318 }
14319
14320 result = new_reg_loc_descr (regno, offset);
14321
14322 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
14323 add_loc_descr (&result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
14324
14325 return result;
14326 }
14327
14328 /* Return true if this RTL expression describes a base+offset calculation. */
14329
14330 static inline int
14331 is_based_loc (const_rtx rtl)
14332 {
14333 return (GET_CODE (rtl) == PLUS
14334 && ((REG_P (XEXP (rtl, 0))
14335 && REGNO (XEXP (rtl, 0)) < FIRST_PSEUDO_REGISTER
14336 && CONST_INT_P (XEXP (rtl, 1)))));
14337 }
14338
14339 /* Try to handle TLS MEMs, for which mem_loc_descriptor on XEXP (mem, 0)
14340 failed. */
14341
14342 static dw_loc_descr_ref
14343 tls_mem_loc_descriptor (rtx mem)
14344 {
14345 tree base;
14346 dw_loc_descr_ref loc_result;
14347
14348 if (MEM_EXPR (mem) == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
14349 return NULL;
14350
14351 base = get_base_address (MEM_EXPR (mem));
14352 if (base == NULL
14353 || !VAR_P (base)
14354 || !DECL_THREAD_LOCAL_P (base))
14355 return NULL;
14356
14357 loc_result = loc_descriptor_from_tree (MEM_EXPR (mem), 1, NULL);
14358 if (loc_result == NULL)
14359 return NULL;
14360
14361 if (maybe_ne (MEM_OFFSET (mem), 0))
14362 loc_descr_plus_const (&loc_result, MEM_OFFSET (mem));
14363
14364 return loc_result;
14365 }
14366
14367 /* Output debug info about reason why we failed to expand expression as dwarf
14368 expression. */
14369
14370 static void
14371 expansion_failed (tree expr, rtx rtl, char const *reason)
14372 {
14373 if (dump_file && (dump_flags & TDF_DETAILS))
14374 {
14375 fprintf (dump_file, "Failed to expand as dwarf: ");
14376 if (expr)
14377 print_generic_expr (dump_file, expr, dump_flags);
14378 if (rtl)
14379 {
14380 fprintf (dump_file, "\n");
14381 print_rtl (dump_file, rtl);
14382 }
14383 fprintf (dump_file, "\nReason: %s\n", reason);
14384 }
14385 }
14386
14387 /* Helper function for const_ok_for_output. */
14388
14389 static bool
14390 const_ok_for_output_1 (rtx rtl)
14391 {
14392 if (targetm.const_not_ok_for_debug_p (rtl))
14393 {
14394 if (GET_CODE (rtl) != UNSPEC)
14395 {
14396 expansion_failed (NULL_TREE, rtl,
14397 "Expression rejected for debug by the backend.\n");
14398 return false;
14399 }
14400
14401 /* If delegitimize_address couldn't do anything with the UNSPEC, and
14402 the target hook doesn't explicitly allow it in debug info, assume
14403 we can't express it in the debug info. */
14404 /* Don't complain about TLS UNSPECs, those are just too hard to
14405 delegitimize. Note this could be a non-decl SYMBOL_REF such as
14406 one in a constant pool entry, so testing SYMBOL_REF_TLS_MODEL
14407 rather than DECL_THREAD_LOCAL_P is not just an optimization. */
14408 if (flag_checking
14409 && (XVECLEN (rtl, 0) == 0
14410 || GET_CODE (XVECEXP (rtl, 0, 0)) != SYMBOL_REF
14411 || SYMBOL_REF_TLS_MODEL (XVECEXP (rtl, 0, 0)) == TLS_MODEL_NONE))
14412 inform (current_function_decl
14413 ? DECL_SOURCE_LOCATION (current_function_decl)
14414 : UNKNOWN_LOCATION,
14415 #if NUM_UNSPEC_VALUES > 0
14416 "non-delegitimized UNSPEC %s (%d) found in variable location",
14417 ((XINT (rtl, 1) >= 0 && XINT (rtl, 1) < NUM_UNSPEC_VALUES)
14418 ? unspec_strings[XINT (rtl, 1)] : "unknown"),
14419 XINT (rtl, 1));
14420 #else
14421 "non-delegitimized UNSPEC %d found in variable location",
14422 XINT (rtl, 1));
14423 #endif
14424 expansion_failed (NULL_TREE, rtl,
14425 "UNSPEC hasn't been delegitimized.\n");
14426 return false;
14427 }
14428
14429 if (CONST_POLY_INT_P (rtl))
14430 return false;
14431
14432 if (targetm.const_not_ok_for_debug_p (rtl))
14433 {
14434 expansion_failed (NULL_TREE, rtl,
14435 "Expression rejected for debug by the backend.\n");
14436 return false;
14437 }
14438
14439 /* FIXME: Refer to PR60655. It is possible for simplification
14440 of rtl expressions in var tracking to produce such expressions.
14441 We should really identify / validate expressions
14442 enclosed in CONST that can be handled by assemblers on various
14443 targets and only handle legitimate cases here. */
14444 switch (GET_CODE (rtl))
14445 {
14446 case SYMBOL_REF:
14447 break;
14448 case NOT:
14449 case NEG:
14450 return false;
14451 default:
14452 return true;
14453 }
14454
14455 if (CONSTANT_POOL_ADDRESS_P (rtl))
14456 {
14457 bool marked;
14458 get_pool_constant_mark (rtl, &marked);
14459 /* If all references to this pool constant were optimized away,
14460 it was not output and thus we can't represent it. */
14461 if (!marked)
14462 {
14463 expansion_failed (NULL_TREE, rtl,
14464 "Constant was removed from constant pool.\n");
14465 return false;
14466 }
14467 }
14468
14469 if (SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
14470 return false;
14471
14472 /* Avoid references to external symbols in debug info, on several targets
14473 the linker might even refuse to link when linking a shared library,
14474 and in many other cases the relocations for .debug_info/.debug_loc are
14475 dropped, so the address becomes zero anyway. Hidden symbols, guaranteed
14476 to be defined within the same shared library or executable are fine. */
14477 if (SYMBOL_REF_EXTERNAL_P (rtl))
14478 {
14479 tree decl = SYMBOL_REF_DECL (rtl);
14480
14481 if (decl == NULL || !targetm.binds_local_p (decl))
14482 {
14483 expansion_failed (NULL_TREE, rtl,
14484 "Symbol not defined in current TU.\n");
14485 return false;
14486 }
14487 }
14488
14489 return true;
14490 }
14491
14492 /* Return true if constant RTL can be emitted in DW_OP_addr or
14493 DW_AT_const_value. TLS SYMBOL_REFs, external SYMBOL_REFs or
14494 non-marked constant pool SYMBOL_REFs can't be referenced in it. */
14495
14496 static bool
14497 const_ok_for_output (rtx rtl)
14498 {
14499 if (GET_CODE (rtl) == SYMBOL_REF)
14500 return const_ok_for_output_1 (rtl);
14501
14502 if (GET_CODE (rtl) == CONST)
14503 {
14504 subrtx_var_iterator::array_type array;
14505 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL)
14506 if (!const_ok_for_output_1 (*iter))
14507 return false;
14508 return true;
14509 }
14510
14511 return true;
14512 }
14513
14514 /* Return a reference to DW_TAG_base_type corresponding to MODE and UNSIGNEDP
14515 if possible, NULL otherwise. */
14516
14517 static dw_die_ref
14518 base_type_for_mode (machine_mode mode, bool unsignedp)
14519 {
14520 dw_die_ref type_die;
14521 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
14522
14523 if (type == NULL)
14524 return NULL;
14525 switch (TREE_CODE (type))
14526 {
14527 case INTEGER_TYPE:
14528 case REAL_TYPE:
14529 break;
14530 default:
14531 return NULL;
14532 }
14533 type_die = lookup_type_die (type);
14534 if (!type_die)
14535 type_die = modified_type_die (type, TYPE_UNQUALIFIED, false,
14536 comp_unit_die ());
14537 if (type_die == NULL || type_die->die_tag != DW_TAG_base_type)
14538 return NULL;
14539 return type_die;
14540 }
14541
14542 /* For OP descriptor assumed to be in unsigned MODE, convert it to a unsigned
14543 type matching MODE, or, if MODE is narrower than or as wide as
14544 DWARF2_ADDR_SIZE, untyped. Return NULL if the conversion is not
14545 possible. */
14546
14547 static dw_loc_descr_ref
14548 convert_descriptor_to_mode (scalar_int_mode mode, dw_loc_descr_ref op)
14549 {
14550 machine_mode outer_mode = mode;
14551 dw_die_ref type_die;
14552 dw_loc_descr_ref cvt;
14553
14554 if (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE)
14555 {
14556 add_loc_descr (&op, new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0));
14557 return op;
14558 }
14559 type_die = base_type_for_mode (outer_mode, 1);
14560 if (type_die == NULL)
14561 return NULL;
14562 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14563 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14564 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14565 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14566 add_loc_descr (&op, cvt);
14567 return op;
14568 }
14569
14570 /* Return location descriptor for comparison OP with operands OP0 and OP1. */
14571
14572 static dw_loc_descr_ref
14573 compare_loc_descriptor (enum dwarf_location_atom op, dw_loc_descr_ref op0,
14574 dw_loc_descr_ref op1)
14575 {
14576 dw_loc_descr_ref ret = op0;
14577 add_loc_descr (&ret, op1);
14578 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14579 if (STORE_FLAG_VALUE != 1)
14580 {
14581 add_loc_descr (&ret, int_loc_descriptor (STORE_FLAG_VALUE));
14582 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
14583 }
14584 return ret;
14585 }
14586
14587 /* Subroutine of scompare_loc_descriptor for the case in which we're
14588 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14589 and in which OP_MODE is bigger than DWARF2_ADDR_SIZE. */
14590
14591 static dw_loc_descr_ref
14592 scompare_loc_descriptor_wide (enum dwarf_location_atom op,
14593 scalar_int_mode op_mode,
14594 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14595 {
14596 dw_die_ref type_die = base_type_for_mode (op_mode, 0);
14597 dw_loc_descr_ref cvt;
14598
14599 if (type_die == NULL)
14600 return NULL;
14601 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14602 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14603 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14604 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14605 add_loc_descr (&op0, cvt);
14606 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14607 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14608 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14609 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14610 add_loc_descr (&op1, cvt);
14611 return compare_loc_descriptor (op, op0, op1);
14612 }
14613
14614 /* Subroutine of scompare_loc_descriptor for the case in which we're
14615 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14616 and in which OP_MODE is smaller than DWARF2_ADDR_SIZE. */
14617
14618 static dw_loc_descr_ref
14619 scompare_loc_descriptor_narrow (enum dwarf_location_atom op, rtx rtl,
14620 scalar_int_mode op_mode,
14621 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14622 {
14623 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (op_mode)) * BITS_PER_UNIT;
14624 /* For eq/ne, if the operands are known to be zero-extended,
14625 there is no need to do the fancy shifting up. */
14626 if (op == DW_OP_eq || op == DW_OP_ne)
14627 {
14628 dw_loc_descr_ref last0, last1;
14629 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14630 ;
14631 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14632 ;
14633 /* deref_size zero extends, and for constants we can check
14634 whether they are zero extended or not. */
14635 if (((last0->dw_loc_opc == DW_OP_deref_size
14636 && last0->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14637 || (CONST_INT_P (XEXP (rtl, 0))
14638 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 0))
14639 == (INTVAL (XEXP (rtl, 0)) & GET_MODE_MASK (op_mode))))
14640 && ((last1->dw_loc_opc == DW_OP_deref_size
14641 && last1->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14642 || (CONST_INT_P (XEXP (rtl, 1))
14643 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 1))
14644 == (INTVAL (XEXP (rtl, 1)) & GET_MODE_MASK (op_mode)))))
14645 return compare_loc_descriptor (op, op0, op1);
14646
14647 /* EQ/NE comparison against constant in narrower type than
14648 DWARF2_ADDR_SIZE can be performed either as
14649 DW_OP_const1u <shift> DW_OP_shl DW_OP_const* <cst << shift>
14650 DW_OP_{eq,ne}
14651 or
14652 DW_OP_const*u <mode_mask> DW_OP_and DW_OP_const* <cst & mode_mask>
14653 DW_OP_{eq,ne}. Pick whatever is shorter. */
14654 if (CONST_INT_P (XEXP (rtl, 1))
14655 && GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
14656 && (size_of_int_loc_descriptor (shift) + 1
14657 + size_of_int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift)
14658 >= size_of_int_loc_descriptor (GET_MODE_MASK (op_mode)) + 1
14659 + size_of_int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14660 & GET_MODE_MASK (op_mode))))
14661 {
14662 add_loc_descr (&op0, int_loc_descriptor (GET_MODE_MASK (op_mode)));
14663 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14664 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14665 & GET_MODE_MASK (op_mode));
14666 return compare_loc_descriptor (op, op0, op1);
14667 }
14668 }
14669 add_loc_descr (&op0, int_loc_descriptor (shift));
14670 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14671 if (CONST_INT_P (XEXP (rtl, 1)))
14672 op1 = int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift);
14673 else
14674 {
14675 add_loc_descr (&op1, int_loc_descriptor (shift));
14676 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14677 }
14678 return compare_loc_descriptor (op, op0, op1);
14679 }
14680
14681 /* Return location descriptor for unsigned comparison OP RTL. */
14682
14683 static dw_loc_descr_ref
14684 scompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14685 machine_mode mem_mode)
14686 {
14687 machine_mode op_mode = GET_MODE (XEXP (rtl, 0));
14688 dw_loc_descr_ref op0, op1;
14689
14690 if (op_mode == VOIDmode)
14691 op_mode = GET_MODE (XEXP (rtl, 1));
14692 if (op_mode == VOIDmode)
14693 return NULL;
14694
14695 scalar_int_mode int_op_mode;
14696 if (dwarf_strict
14697 && dwarf_version < 5
14698 && (!is_a <scalar_int_mode> (op_mode, &int_op_mode)
14699 || GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE))
14700 return NULL;
14701
14702 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14703 VAR_INIT_STATUS_INITIALIZED);
14704 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14705 VAR_INIT_STATUS_INITIALIZED);
14706
14707 if (op0 == NULL || op1 == NULL)
14708 return NULL;
14709
14710 if (is_a <scalar_int_mode> (op_mode, &int_op_mode))
14711 {
14712 if (GET_MODE_SIZE (int_op_mode) < DWARF2_ADDR_SIZE)
14713 return scompare_loc_descriptor_narrow (op, rtl, int_op_mode, op0, op1);
14714
14715 if (GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE)
14716 return scompare_loc_descriptor_wide (op, int_op_mode, op0, op1);
14717 }
14718 return compare_loc_descriptor (op, op0, op1);
14719 }
14720
14721 /* Return location descriptor for unsigned comparison OP RTL. */
14722
14723 static dw_loc_descr_ref
14724 ucompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14725 machine_mode mem_mode)
14726 {
14727 dw_loc_descr_ref op0, op1;
14728
14729 machine_mode test_op_mode = GET_MODE (XEXP (rtl, 0));
14730 if (test_op_mode == VOIDmode)
14731 test_op_mode = GET_MODE (XEXP (rtl, 1));
14732
14733 scalar_int_mode op_mode;
14734 if (!is_a <scalar_int_mode> (test_op_mode, &op_mode))
14735 return NULL;
14736
14737 if (dwarf_strict
14738 && dwarf_version < 5
14739 && GET_MODE_SIZE (op_mode) > DWARF2_ADDR_SIZE)
14740 return NULL;
14741
14742 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14743 VAR_INIT_STATUS_INITIALIZED);
14744 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14745 VAR_INIT_STATUS_INITIALIZED);
14746
14747 if (op0 == NULL || op1 == NULL)
14748 return NULL;
14749
14750 if (GET_MODE_SIZE (op_mode) < DWARF2_ADDR_SIZE)
14751 {
14752 HOST_WIDE_INT mask = GET_MODE_MASK (op_mode);
14753 dw_loc_descr_ref last0, last1;
14754 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14755 ;
14756 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14757 ;
14758 if (CONST_INT_P (XEXP (rtl, 0)))
14759 op0 = int_loc_descriptor (INTVAL (XEXP (rtl, 0)) & mask);
14760 /* deref_size zero extends, so no need to mask it again. */
14761 else if (last0->dw_loc_opc != DW_OP_deref_size
14762 || last0->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14763 {
14764 add_loc_descr (&op0, int_loc_descriptor (mask));
14765 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14766 }
14767 if (CONST_INT_P (XEXP (rtl, 1)))
14768 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) & mask);
14769 /* deref_size zero extends, so no need to mask it again. */
14770 else if (last1->dw_loc_opc != DW_OP_deref_size
14771 || last1->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14772 {
14773 add_loc_descr (&op1, int_loc_descriptor (mask));
14774 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14775 }
14776 }
14777 else if (GET_MODE_SIZE (op_mode) == DWARF2_ADDR_SIZE)
14778 {
14779 HOST_WIDE_INT bias = 1;
14780 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14781 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14782 if (CONST_INT_P (XEXP (rtl, 1)))
14783 op1 = int_loc_descriptor ((unsigned HOST_WIDE_INT) bias
14784 + INTVAL (XEXP (rtl, 1)));
14785 else
14786 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst,
14787 bias, 0));
14788 }
14789 return compare_loc_descriptor (op, op0, op1);
14790 }
14791
14792 /* Return location descriptor for {U,S}{MIN,MAX}. */
14793
14794 static dw_loc_descr_ref
14795 minmax_loc_descriptor (rtx rtl, machine_mode mode,
14796 machine_mode mem_mode)
14797 {
14798 enum dwarf_location_atom op;
14799 dw_loc_descr_ref op0, op1, ret;
14800 dw_loc_descr_ref bra_node, drop_node;
14801
14802 scalar_int_mode int_mode;
14803 if (dwarf_strict
14804 && dwarf_version < 5
14805 && (!is_a <scalar_int_mode> (mode, &int_mode)
14806 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE))
14807 return NULL;
14808
14809 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14810 VAR_INIT_STATUS_INITIALIZED);
14811 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14812 VAR_INIT_STATUS_INITIALIZED);
14813
14814 if (op0 == NULL || op1 == NULL)
14815 return NULL;
14816
14817 add_loc_descr (&op0, new_loc_descr (DW_OP_dup, 0, 0));
14818 add_loc_descr (&op1, new_loc_descr (DW_OP_swap, 0, 0));
14819 add_loc_descr (&op1, new_loc_descr (DW_OP_over, 0, 0));
14820 if (GET_CODE (rtl) == UMIN || GET_CODE (rtl) == UMAX)
14821 {
14822 /* Checked by the caller. */
14823 int_mode = as_a <scalar_int_mode> (mode);
14824 if (GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14825 {
14826 HOST_WIDE_INT mask = GET_MODE_MASK (int_mode);
14827 add_loc_descr (&op0, int_loc_descriptor (mask));
14828 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14829 add_loc_descr (&op1, int_loc_descriptor (mask));
14830 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14831 }
14832 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
14833 {
14834 HOST_WIDE_INT bias = 1;
14835 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14836 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14837 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14838 }
14839 }
14840 else if (is_a <scalar_int_mode> (mode, &int_mode)
14841 && GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14842 {
14843 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (int_mode)) * BITS_PER_UNIT;
14844 add_loc_descr (&op0, int_loc_descriptor (shift));
14845 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14846 add_loc_descr (&op1, int_loc_descriptor (shift));
14847 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14848 }
14849 else if (is_a <scalar_int_mode> (mode, &int_mode)
14850 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14851 {
14852 dw_die_ref type_die = base_type_for_mode (int_mode, 0);
14853 dw_loc_descr_ref cvt;
14854 if (type_die == NULL)
14855 return NULL;
14856 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14857 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14858 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14859 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14860 add_loc_descr (&op0, cvt);
14861 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14862 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14863 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14864 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14865 add_loc_descr (&op1, cvt);
14866 }
14867
14868 if (GET_CODE (rtl) == SMIN || GET_CODE (rtl) == UMIN)
14869 op = DW_OP_lt;
14870 else
14871 op = DW_OP_gt;
14872 ret = op0;
14873 add_loc_descr (&ret, op1);
14874 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14875 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14876 add_loc_descr (&ret, bra_node);
14877 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14878 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
14879 add_loc_descr (&ret, drop_node);
14880 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14881 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
14882 if ((GET_CODE (rtl) == SMIN || GET_CODE (rtl) == SMAX)
14883 && is_a <scalar_int_mode> (mode, &int_mode)
14884 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14885 ret = convert_descriptor_to_mode (int_mode, ret);
14886 return ret;
14887 }
14888
14889 /* Helper function for mem_loc_descriptor. Perform OP binary op,
14890 but after converting arguments to type_die, afterwards
14891 convert back to unsigned. */
14892
14893 static dw_loc_descr_ref
14894 typed_binop (enum dwarf_location_atom op, rtx rtl, dw_die_ref type_die,
14895 scalar_int_mode mode, machine_mode mem_mode)
14896 {
14897 dw_loc_descr_ref cvt, op0, op1;
14898
14899 if (type_die == NULL)
14900 return NULL;
14901 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14902 VAR_INIT_STATUS_INITIALIZED);
14903 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14904 VAR_INIT_STATUS_INITIALIZED);
14905 if (op0 == NULL || op1 == NULL)
14906 return NULL;
14907 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14908 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14909 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14910 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14911 add_loc_descr (&op0, cvt);
14912 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14913 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14914 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14915 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14916 add_loc_descr (&op1, cvt);
14917 add_loc_descr (&op0, op1);
14918 add_loc_descr (&op0, new_loc_descr (op, 0, 0));
14919 return convert_descriptor_to_mode (mode, op0);
14920 }
14921
14922 /* CLZ (where constV is CLZ_DEFINED_VALUE_AT_ZERO computed value,
14923 const0 is DW_OP_lit0 or corresponding typed constant,
14924 const1 is DW_OP_lit1 or corresponding typed constant
14925 and constMSB is constant with just the MSB bit set
14926 for the mode):
14927 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
14928 L1: const0 DW_OP_swap
14929 L2: DW_OP_dup constMSB DW_OP_and DW_OP_bra <L3> const1 DW_OP_shl
14930 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14931 L3: DW_OP_drop
14932 L4: DW_OP_nop
14933
14934 CTZ is similar:
14935 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
14936 L1: const0 DW_OP_swap
14937 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
14938 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14939 L3: DW_OP_drop
14940 L4: DW_OP_nop
14941
14942 FFS is similar:
14943 DW_OP_dup DW_OP_bra <L1> DW_OP_drop const0 DW_OP_skip <L4>
14944 L1: const1 DW_OP_swap
14945 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
14946 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14947 L3: DW_OP_drop
14948 L4: DW_OP_nop */
14949
14950 static dw_loc_descr_ref
14951 clz_loc_descriptor (rtx rtl, scalar_int_mode mode,
14952 machine_mode mem_mode)
14953 {
14954 dw_loc_descr_ref op0, ret, tmp;
14955 HOST_WIDE_INT valv;
14956 dw_loc_descr_ref l1jump, l1label;
14957 dw_loc_descr_ref l2jump, l2label;
14958 dw_loc_descr_ref l3jump, l3label;
14959 dw_loc_descr_ref l4jump, l4label;
14960 rtx msb;
14961
14962 if (GET_MODE (XEXP (rtl, 0)) != mode)
14963 return NULL;
14964
14965 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14966 VAR_INIT_STATUS_INITIALIZED);
14967 if (op0 == NULL)
14968 return NULL;
14969 ret = op0;
14970 if (GET_CODE (rtl) == CLZ)
14971 {
14972 if (!CLZ_DEFINED_VALUE_AT_ZERO (mode, valv))
14973 valv = GET_MODE_BITSIZE (mode);
14974 }
14975 else if (GET_CODE (rtl) == FFS)
14976 valv = 0;
14977 else if (!CTZ_DEFINED_VALUE_AT_ZERO (mode, valv))
14978 valv = GET_MODE_BITSIZE (mode);
14979 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
14980 l1jump = new_loc_descr (DW_OP_bra, 0, 0);
14981 add_loc_descr (&ret, l1jump);
14982 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
14983 tmp = mem_loc_descriptor (GEN_INT (valv), mode, mem_mode,
14984 VAR_INIT_STATUS_INITIALIZED);
14985 if (tmp == NULL)
14986 return NULL;
14987 add_loc_descr (&ret, tmp);
14988 l4jump = new_loc_descr (DW_OP_skip, 0, 0);
14989 add_loc_descr (&ret, l4jump);
14990 l1label = mem_loc_descriptor (GET_CODE (rtl) == FFS
14991 ? const1_rtx : const0_rtx,
14992 mode, mem_mode,
14993 VAR_INIT_STATUS_INITIALIZED);
14994 if (l1label == NULL)
14995 return NULL;
14996 add_loc_descr (&ret, l1label);
14997 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14998 l2label = new_loc_descr (DW_OP_dup, 0, 0);
14999 add_loc_descr (&ret, l2label);
15000 if (GET_CODE (rtl) != CLZ)
15001 msb = const1_rtx;
15002 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
15003 msb = GEN_INT (HOST_WIDE_INT_1U
15004 << (GET_MODE_BITSIZE (mode) - 1));
15005 else
15006 msb = immed_wide_int_const
15007 (wi::set_bit_in_zero (GET_MODE_PRECISION (mode) - 1,
15008 GET_MODE_PRECISION (mode)), mode);
15009 if (GET_CODE (msb) == CONST_INT && INTVAL (msb) < 0)
15010 tmp = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15011 ? DW_OP_const4u : HOST_BITS_PER_WIDE_INT == 64
15012 ? DW_OP_const8u : DW_OP_constu, INTVAL (msb), 0);
15013 else
15014 tmp = mem_loc_descriptor (msb, mode, mem_mode,
15015 VAR_INIT_STATUS_INITIALIZED);
15016 if (tmp == NULL)
15017 return NULL;
15018 add_loc_descr (&ret, tmp);
15019 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15020 l3jump = new_loc_descr (DW_OP_bra, 0, 0);
15021 add_loc_descr (&ret, l3jump);
15022 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15023 VAR_INIT_STATUS_INITIALIZED);
15024 if (tmp == NULL)
15025 return NULL;
15026 add_loc_descr (&ret, tmp);
15027 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == CLZ
15028 ? DW_OP_shl : DW_OP_shr, 0, 0));
15029 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15030 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst, 1, 0));
15031 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15032 l2jump = new_loc_descr (DW_OP_skip, 0, 0);
15033 add_loc_descr (&ret, l2jump);
15034 l3label = new_loc_descr (DW_OP_drop, 0, 0);
15035 add_loc_descr (&ret, l3label);
15036 l4label = new_loc_descr (DW_OP_nop, 0, 0);
15037 add_loc_descr (&ret, l4label);
15038 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15039 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15040 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15041 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15042 l3jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15043 l3jump->dw_loc_oprnd1.v.val_loc = l3label;
15044 l4jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15045 l4jump->dw_loc_oprnd1.v.val_loc = l4label;
15046 return ret;
15047 }
15048
15049 /* POPCOUNT (const0 is DW_OP_lit0 or corresponding typed constant,
15050 const1 is DW_OP_lit1 or corresponding typed constant):
15051 const0 DW_OP_swap
15052 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15053 DW_OP_plus DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15054 L2: DW_OP_drop
15055
15056 PARITY is similar:
15057 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15058 DW_OP_xor DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15059 L2: DW_OP_drop */
15060
15061 static dw_loc_descr_ref
15062 popcount_loc_descriptor (rtx rtl, scalar_int_mode mode,
15063 machine_mode mem_mode)
15064 {
15065 dw_loc_descr_ref op0, ret, tmp;
15066 dw_loc_descr_ref l1jump, l1label;
15067 dw_loc_descr_ref l2jump, l2label;
15068
15069 if (GET_MODE (XEXP (rtl, 0)) != mode)
15070 return NULL;
15071
15072 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15073 VAR_INIT_STATUS_INITIALIZED);
15074 if (op0 == NULL)
15075 return NULL;
15076 ret = op0;
15077 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15078 VAR_INIT_STATUS_INITIALIZED);
15079 if (tmp == NULL)
15080 return NULL;
15081 add_loc_descr (&ret, tmp);
15082 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15083 l1label = new_loc_descr (DW_OP_dup, 0, 0);
15084 add_loc_descr (&ret, l1label);
15085 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15086 add_loc_descr (&ret, l2jump);
15087 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15088 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15089 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15090 VAR_INIT_STATUS_INITIALIZED);
15091 if (tmp == NULL)
15092 return NULL;
15093 add_loc_descr (&ret, tmp);
15094 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15095 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == POPCOUNT
15096 ? DW_OP_plus : DW_OP_xor, 0, 0));
15097 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15098 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15099 VAR_INIT_STATUS_INITIALIZED);
15100 add_loc_descr (&ret, tmp);
15101 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15102 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15103 add_loc_descr (&ret, l1jump);
15104 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15105 add_loc_descr (&ret, l2label);
15106 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15107 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15108 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15109 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15110 return ret;
15111 }
15112
15113 /* BSWAP (constS is initial shift count, either 56 or 24):
15114 constS const0
15115 L1: DW_OP_pick <2> constS DW_OP_pick <3> DW_OP_minus DW_OP_shr
15116 const255 DW_OP_and DW_OP_pick <2> DW_OP_shl DW_OP_or
15117 DW_OP_swap DW_OP_dup const0 DW_OP_eq DW_OP_bra <L2> const8
15118 DW_OP_minus DW_OP_swap DW_OP_skip <L1>
15119 L2: DW_OP_drop DW_OP_swap DW_OP_drop */
15120
15121 static dw_loc_descr_ref
15122 bswap_loc_descriptor (rtx rtl, scalar_int_mode mode,
15123 machine_mode mem_mode)
15124 {
15125 dw_loc_descr_ref op0, ret, tmp;
15126 dw_loc_descr_ref l1jump, l1label;
15127 dw_loc_descr_ref l2jump, l2label;
15128
15129 if (BITS_PER_UNIT != 8
15130 || (GET_MODE_BITSIZE (mode) != 32
15131 && GET_MODE_BITSIZE (mode) != 64))
15132 return NULL;
15133
15134 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15135 VAR_INIT_STATUS_INITIALIZED);
15136 if (op0 == NULL)
15137 return NULL;
15138
15139 ret = op0;
15140 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15141 mode, mem_mode,
15142 VAR_INIT_STATUS_INITIALIZED);
15143 if (tmp == NULL)
15144 return NULL;
15145 add_loc_descr (&ret, tmp);
15146 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15147 VAR_INIT_STATUS_INITIALIZED);
15148 if (tmp == NULL)
15149 return NULL;
15150 add_loc_descr (&ret, tmp);
15151 l1label = new_loc_descr (DW_OP_pick, 2, 0);
15152 add_loc_descr (&ret, l1label);
15153 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15154 mode, mem_mode,
15155 VAR_INIT_STATUS_INITIALIZED);
15156 add_loc_descr (&ret, tmp);
15157 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 3, 0));
15158 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15159 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15160 tmp = mem_loc_descriptor (GEN_INT (255), mode, mem_mode,
15161 VAR_INIT_STATUS_INITIALIZED);
15162 if (tmp == NULL)
15163 return NULL;
15164 add_loc_descr (&ret, tmp);
15165 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15166 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 2, 0));
15167 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15168 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15169 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15170 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15171 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15172 VAR_INIT_STATUS_INITIALIZED);
15173 add_loc_descr (&ret, tmp);
15174 add_loc_descr (&ret, new_loc_descr (DW_OP_eq, 0, 0));
15175 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15176 add_loc_descr (&ret, l2jump);
15177 tmp = mem_loc_descriptor (GEN_INT (8), mode, mem_mode,
15178 VAR_INIT_STATUS_INITIALIZED);
15179 add_loc_descr (&ret, tmp);
15180 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15181 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15182 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15183 add_loc_descr (&ret, l1jump);
15184 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15185 add_loc_descr (&ret, l2label);
15186 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15187 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
15188 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15189 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15190 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15191 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15192 return ret;
15193 }
15194
15195 /* ROTATE (constMASK is mode mask, BITSIZE is bitsize of mode):
15196 DW_OP_over DW_OP_over DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15197 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_neg
15198 DW_OP_plus_uconst <BITSIZE> DW_OP_shr DW_OP_or
15199
15200 ROTATERT is similar:
15201 DW_OP_over DW_OP_over DW_OP_neg DW_OP_plus_uconst <BITSIZE>
15202 DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15203 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_shr DW_OP_or */
15204
15205 static dw_loc_descr_ref
15206 rotate_loc_descriptor (rtx rtl, scalar_int_mode mode,
15207 machine_mode mem_mode)
15208 {
15209 rtx rtlop1 = XEXP (rtl, 1);
15210 dw_loc_descr_ref op0, op1, ret, mask[2] = { NULL, NULL };
15211 int i;
15212
15213 if (is_narrower_int_mode (GET_MODE (rtlop1), mode))
15214 rtlop1 = gen_rtx_ZERO_EXTEND (mode, rtlop1);
15215 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15216 VAR_INIT_STATUS_INITIALIZED);
15217 op1 = mem_loc_descriptor (rtlop1, mode, mem_mode,
15218 VAR_INIT_STATUS_INITIALIZED);
15219 if (op0 == NULL || op1 == NULL)
15220 return NULL;
15221 if (GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE)
15222 for (i = 0; i < 2; i++)
15223 {
15224 if (GET_MODE_BITSIZE (mode) < HOST_BITS_PER_WIDE_INT)
15225 mask[i] = mem_loc_descriptor (GEN_INT (GET_MODE_MASK (mode)),
15226 mode, mem_mode,
15227 VAR_INIT_STATUS_INITIALIZED);
15228 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
15229 mask[i] = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15230 ? DW_OP_const4u
15231 : HOST_BITS_PER_WIDE_INT == 64
15232 ? DW_OP_const8u : DW_OP_constu,
15233 GET_MODE_MASK (mode), 0);
15234 else
15235 mask[i] = NULL;
15236 if (mask[i] == NULL)
15237 return NULL;
15238 add_loc_descr (&mask[i], new_loc_descr (DW_OP_and, 0, 0));
15239 }
15240 ret = op0;
15241 add_loc_descr (&ret, op1);
15242 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15243 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15244 if (GET_CODE (rtl) == ROTATERT)
15245 {
15246 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15247 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15248 GET_MODE_BITSIZE (mode), 0));
15249 }
15250 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15251 if (mask[0] != NULL)
15252 add_loc_descr (&ret, mask[0]);
15253 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15254 if (mask[1] != NULL)
15255 {
15256 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15257 add_loc_descr (&ret, mask[1]);
15258 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15259 }
15260 if (GET_CODE (rtl) == ROTATE)
15261 {
15262 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15263 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15264 GET_MODE_BITSIZE (mode), 0));
15265 }
15266 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15267 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15268 return ret;
15269 }
15270
15271 /* Helper function for mem_loc_descriptor. Return DW_OP_GNU_parameter_ref
15272 for DEBUG_PARAMETER_REF RTL. */
15273
15274 static dw_loc_descr_ref
15275 parameter_ref_descriptor (rtx rtl)
15276 {
15277 dw_loc_descr_ref ret;
15278 dw_die_ref ref;
15279
15280 if (dwarf_strict)
15281 return NULL;
15282 gcc_assert (TREE_CODE (DEBUG_PARAMETER_REF_DECL (rtl)) == PARM_DECL);
15283 /* With LTO during LTRANS we get the late DIE that refers to the early
15284 DIE, thus we add another indirection here. This seems to confuse
15285 gdb enough to make gcc.dg/guality/pr68860-1.c FAIL with LTO. */
15286 ref = lookup_decl_die (DEBUG_PARAMETER_REF_DECL (rtl));
15287 ret = new_loc_descr (DW_OP_GNU_parameter_ref, 0, 0);
15288 if (ref)
15289 {
15290 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15291 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
15292 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
15293 }
15294 else
15295 {
15296 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
15297 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_PARAMETER_REF_DECL (rtl);
15298 }
15299 return ret;
15300 }
15301
15302 /* The following routine converts the RTL for a variable or parameter
15303 (resident in memory) into an equivalent Dwarf representation of a
15304 mechanism for getting the address of that same variable onto the top of a
15305 hypothetical "address evaluation" stack.
15306
15307 When creating memory location descriptors, we are effectively transforming
15308 the RTL for a memory-resident object into its Dwarf postfix expression
15309 equivalent. This routine recursively descends an RTL tree, turning
15310 it into Dwarf postfix code as it goes.
15311
15312 MODE is the mode that should be assumed for the rtl if it is VOIDmode.
15313
15314 MEM_MODE is the mode of the memory reference, needed to handle some
15315 autoincrement addressing modes.
15316
15317 Return 0 if we can't represent the location. */
15318
15319 dw_loc_descr_ref
15320 mem_loc_descriptor (rtx rtl, machine_mode mode,
15321 machine_mode mem_mode,
15322 enum var_init_status initialized)
15323 {
15324 dw_loc_descr_ref mem_loc_result = NULL;
15325 enum dwarf_location_atom op;
15326 dw_loc_descr_ref op0, op1;
15327 rtx inner = NULL_RTX;
15328 poly_int64 offset;
15329
15330 if (mode == VOIDmode)
15331 mode = GET_MODE (rtl);
15332
15333 /* Note that for a dynamically sized array, the location we will generate a
15334 description of here will be the lowest numbered location which is
15335 actually within the array. That's *not* necessarily the same as the
15336 zeroth element of the array. */
15337
15338 rtl = targetm.delegitimize_address (rtl);
15339
15340 if (mode != GET_MODE (rtl) && GET_MODE (rtl) != VOIDmode)
15341 return NULL;
15342
15343 scalar_int_mode int_mode, inner_mode, op1_mode;
15344 switch (GET_CODE (rtl))
15345 {
15346 case POST_INC:
15347 case POST_DEC:
15348 case POST_MODIFY:
15349 return mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, initialized);
15350
15351 case SUBREG:
15352 /* The case of a subreg may arise when we have a local (register)
15353 variable or a formal (register) parameter which doesn't quite fill
15354 up an entire register. For now, just assume that it is
15355 legitimate to make the Dwarf info refer to the whole register which
15356 contains the given subreg. */
15357 if (!subreg_lowpart_p (rtl))
15358 break;
15359 inner = SUBREG_REG (rtl);
15360 /* FALLTHRU */
15361 case TRUNCATE:
15362 if (inner == NULL_RTX)
15363 inner = XEXP (rtl, 0);
15364 if (is_a <scalar_int_mode> (mode, &int_mode)
15365 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15366 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15367 #ifdef POINTERS_EXTEND_UNSIGNED
15368 || (int_mode == Pmode && mem_mode != VOIDmode)
15369 #endif
15370 )
15371 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE)
15372 {
15373 mem_loc_result = mem_loc_descriptor (inner,
15374 inner_mode,
15375 mem_mode, initialized);
15376 break;
15377 }
15378 if (dwarf_strict && dwarf_version < 5)
15379 break;
15380 if (is_a <scalar_int_mode> (mode, &int_mode)
15381 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15382 ? GET_MODE_SIZE (int_mode) <= GET_MODE_SIZE (inner_mode)
15383 : known_eq (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15384 {
15385 dw_die_ref type_die;
15386 dw_loc_descr_ref cvt;
15387
15388 mem_loc_result = mem_loc_descriptor (inner,
15389 GET_MODE (inner),
15390 mem_mode, initialized);
15391 if (mem_loc_result == NULL)
15392 break;
15393 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15394 if (type_die == NULL)
15395 {
15396 mem_loc_result = NULL;
15397 break;
15398 }
15399 if (maybe_ne (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15400 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15401 else
15402 cvt = new_loc_descr (dwarf_OP (DW_OP_reinterpret), 0, 0);
15403 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15404 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15405 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15406 add_loc_descr (&mem_loc_result, cvt);
15407 if (is_a <scalar_int_mode> (mode, &int_mode)
15408 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15409 {
15410 /* Convert it to untyped afterwards. */
15411 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15412 add_loc_descr (&mem_loc_result, cvt);
15413 }
15414 }
15415 break;
15416
15417 case REG:
15418 if (!is_a <scalar_int_mode> (mode, &int_mode)
15419 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15420 && rtl != arg_pointer_rtx
15421 && rtl != frame_pointer_rtx
15422 #ifdef POINTERS_EXTEND_UNSIGNED
15423 && (int_mode != Pmode || mem_mode == VOIDmode)
15424 #endif
15425 ))
15426 {
15427 dw_die_ref type_die;
15428 unsigned int dbx_regnum;
15429
15430 if (dwarf_strict && dwarf_version < 5)
15431 break;
15432 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
15433 break;
15434 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15435 if (type_die == NULL)
15436 break;
15437
15438 dbx_regnum = dbx_reg_number (rtl);
15439 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15440 break;
15441 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_regval_type),
15442 dbx_regnum, 0);
15443 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15444 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15445 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.external = 0;
15446 break;
15447 }
15448 /* Whenever a register number forms a part of the description of the
15449 method for calculating the (dynamic) address of a memory resident
15450 object, DWARF rules require the register number be referred to as
15451 a "base register". This distinction is not based in any way upon
15452 what category of register the hardware believes the given register
15453 belongs to. This is strictly DWARF terminology we're dealing with
15454 here. Note that in cases where the location of a memory-resident
15455 data object could be expressed as: OP_ADD (OP_BASEREG (basereg),
15456 OP_CONST (0)) the actual DWARF location descriptor that we generate
15457 may just be OP_BASEREG (basereg). This may look deceptively like
15458 the object in question was allocated to a register (rather than in
15459 memory) so DWARF consumers need to be aware of the subtle
15460 distinction between OP_REG and OP_BASEREG. */
15461 if (REGNO (rtl) < FIRST_PSEUDO_REGISTER)
15462 mem_loc_result = based_loc_descr (rtl, 0, VAR_INIT_STATUS_INITIALIZED);
15463 else if (stack_realign_drap
15464 && crtl->drap_reg
15465 && crtl->args.internal_arg_pointer == rtl
15466 && REGNO (crtl->drap_reg) < FIRST_PSEUDO_REGISTER)
15467 {
15468 /* If RTL is internal_arg_pointer, which has been optimized
15469 out, use DRAP instead. */
15470 mem_loc_result = based_loc_descr (crtl->drap_reg, 0,
15471 VAR_INIT_STATUS_INITIALIZED);
15472 }
15473 break;
15474
15475 case SIGN_EXTEND:
15476 case ZERO_EXTEND:
15477 if (!is_a <scalar_int_mode> (mode, &int_mode)
15478 || !is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode))
15479 break;
15480 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
15481 mem_mode, VAR_INIT_STATUS_INITIALIZED);
15482 if (op0 == 0)
15483 break;
15484 else if (GET_CODE (rtl) == ZERO_EXTEND
15485 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15486 && GET_MODE_BITSIZE (inner_mode) < HOST_BITS_PER_WIDE_INT
15487 /* If DW_OP_const{1,2,4}u won't be used, it is shorter
15488 to expand zero extend as two shifts instead of
15489 masking. */
15490 && GET_MODE_SIZE (inner_mode) <= 4)
15491 {
15492 mem_loc_result = op0;
15493 add_loc_descr (&mem_loc_result,
15494 int_loc_descriptor (GET_MODE_MASK (inner_mode)));
15495 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_and, 0, 0));
15496 }
15497 else if (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15498 {
15499 int shift = DWARF2_ADDR_SIZE - GET_MODE_SIZE (inner_mode);
15500 shift *= BITS_PER_UNIT;
15501 if (GET_CODE (rtl) == SIGN_EXTEND)
15502 op = DW_OP_shra;
15503 else
15504 op = DW_OP_shr;
15505 mem_loc_result = op0;
15506 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15507 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
15508 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15509 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15510 }
15511 else if (!dwarf_strict || dwarf_version >= 5)
15512 {
15513 dw_die_ref type_die1, type_die2;
15514 dw_loc_descr_ref cvt;
15515
15516 type_die1 = base_type_for_mode (inner_mode,
15517 GET_CODE (rtl) == ZERO_EXTEND);
15518 if (type_die1 == NULL)
15519 break;
15520 type_die2 = base_type_for_mode (int_mode, 1);
15521 if (type_die2 == NULL)
15522 break;
15523 mem_loc_result = op0;
15524 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15525 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15526 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die1;
15527 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15528 add_loc_descr (&mem_loc_result, cvt);
15529 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15530 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15531 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die2;
15532 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15533 add_loc_descr (&mem_loc_result, cvt);
15534 }
15535 break;
15536
15537 case MEM:
15538 {
15539 rtx new_rtl = avoid_constant_pool_reference (rtl);
15540 if (new_rtl != rtl)
15541 {
15542 mem_loc_result = mem_loc_descriptor (new_rtl, mode, mem_mode,
15543 initialized);
15544 if (mem_loc_result != NULL)
15545 return mem_loc_result;
15546 }
15547 }
15548 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0),
15549 get_address_mode (rtl), mode,
15550 VAR_INIT_STATUS_INITIALIZED);
15551 if (mem_loc_result == NULL)
15552 mem_loc_result = tls_mem_loc_descriptor (rtl);
15553 if (mem_loc_result != NULL)
15554 {
15555 if (!is_a <scalar_int_mode> (mode, &int_mode)
15556 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15557 {
15558 dw_die_ref type_die;
15559 dw_loc_descr_ref deref;
15560 HOST_WIDE_INT size;
15561
15562 if (dwarf_strict && dwarf_version < 5)
15563 return NULL;
15564 if (!GET_MODE_SIZE (mode).is_constant (&size))
15565 return NULL;
15566 type_die
15567 = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15568 if (type_die == NULL)
15569 return NULL;
15570 deref = new_loc_descr (dwarf_OP (DW_OP_deref_type), size, 0);
15571 deref->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15572 deref->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15573 deref->dw_loc_oprnd2.v.val_die_ref.external = 0;
15574 add_loc_descr (&mem_loc_result, deref);
15575 }
15576 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
15577 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_deref, 0, 0));
15578 else
15579 add_loc_descr (&mem_loc_result,
15580 new_loc_descr (DW_OP_deref_size,
15581 GET_MODE_SIZE (int_mode), 0));
15582 }
15583 break;
15584
15585 case LO_SUM:
15586 return mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, initialized);
15587
15588 case LABEL_REF:
15589 /* Some ports can transform a symbol ref into a label ref, because
15590 the symbol ref is too far away and has to be dumped into a constant
15591 pool. */
15592 case CONST:
15593 case SYMBOL_REF:
15594 if (!is_a <scalar_int_mode> (mode, &int_mode)
15595 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15596 #ifdef POINTERS_EXTEND_UNSIGNED
15597 && (int_mode != Pmode || mem_mode == VOIDmode)
15598 #endif
15599 ))
15600 break;
15601 if (GET_CODE (rtl) == SYMBOL_REF
15602 && SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
15603 {
15604 dw_loc_descr_ref temp;
15605
15606 /* If this is not defined, we have no way to emit the data. */
15607 if (!targetm.have_tls || !targetm.asm_out.output_dwarf_dtprel)
15608 break;
15609
15610 temp = new_addr_loc_descr (rtl, dtprel_true);
15611
15612 /* We check for DWARF 5 here because gdb did not implement
15613 DW_OP_form_tls_address until after 7.12. */
15614 mem_loc_result = new_loc_descr ((dwarf_version >= 5
15615 ? DW_OP_form_tls_address
15616 : DW_OP_GNU_push_tls_address),
15617 0, 0);
15618 add_loc_descr (&mem_loc_result, temp);
15619
15620 break;
15621 }
15622
15623 if (!const_ok_for_output (rtl))
15624 {
15625 if (GET_CODE (rtl) == CONST)
15626 switch (GET_CODE (XEXP (rtl, 0)))
15627 {
15628 case NOT:
15629 op = DW_OP_not;
15630 goto try_const_unop;
15631 case NEG:
15632 op = DW_OP_neg;
15633 goto try_const_unop;
15634 try_const_unop:
15635 rtx arg;
15636 arg = XEXP (XEXP (rtl, 0), 0);
15637 if (!CONSTANT_P (arg))
15638 arg = gen_rtx_CONST (int_mode, arg);
15639 op0 = mem_loc_descriptor (arg, int_mode, mem_mode,
15640 initialized);
15641 if (op0)
15642 {
15643 mem_loc_result = op0;
15644 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15645 }
15646 break;
15647 default:
15648 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), int_mode,
15649 mem_mode, initialized);
15650 break;
15651 }
15652 break;
15653 }
15654
15655 symref:
15656 mem_loc_result = new_addr_loc_descr (rtl, dtprel_false);
15657 vec_safe_push (used_rtx_array, rtl);
15658 break;
15659
15660 case CONCAT:
15661 case CONCATN:
15662 case VAR_LOCATION:
15663 case DEBUG_IMPLICIT_PTR:
15664 expansion_failed (NULL_TREE, rtl,
15665 "CONCAT/CONCATN/VAR_LOCATION is handled only by loc_descriptor");
15666 return 0;
15667
15668 case ENTRY_VALUE:
15669 if (dwarf_strict && dwarf_version < 5)
15670 return NULL;
15671 if (REG_P (ENTRY_VALUE_EXP (rtl)))
15672 {
15673 if (!is_a <scalar_int_mode> (mode, &int_mode)
15674 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15675 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15676 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15677 else
15678 {
15679 unsigned int dbx_regnum = dbx_reg_number (ENTRY_VALUE_EXP (rtl));
15680 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15681 return NULL;
15682 op0 = one_reg_loc_descriptor (dbx_regnum,
15683 VAR_INIT_STATUS_INITIALIZED);
15684 }
15685 }
15686 else if (MEM_P (ENTRY_VALUE_EXP (rtl))
15687 && REG_P (XEXP (ENTRY_VALUE_EXP (rtl), 0)))
15688 {
15689 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15690 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15691 if (op0 && op0->dw_loc_opc == DW_OP_fbreg)
15692 return NULL;
15693 }
15694 else
15695 gcc_unreachable ();
15696 if (op0 == NULL)
15697 return NULL;
15698 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_entry_value), 0, 0);
15699 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_loc;
15700 mem_loc_result->dw_loc_oprnd1.v.val_loc = op0;
15701 break;
15702
15703 case DEBUG_PARAMETER_REF:
15704 mem_loc_result = parameter_ref_descriptor (rtl);
15705 break;
15706
15707 case PRE_MODIFY:
15708 /* Extract the PLUS expression nested inside and fall into
15709 PLUS code below. */
15710 rtl = XEXP (rtl, 1);
15711 goto plus;
15712
15713 case PRE_INC:
15714 case PRE_DEC:
15715 /* Turn these into a PLUS expression and fall into the PLUS code
15716 below. */
15717 rtl = gen_rtx_PLUS (mode, XEXP (rtl, 0),
15718 gen_int_mode (GET_CODE (rtl) == PRE_INC
15719 ? GET_MODE_UNIT_SIZE (mem_mode)
15720 : -GET_MODE_UNIT_SIZE (mem_mode),
15721 mode));
15722
15723 /* fall through */
15724
15725 case PLUS:
15726 plus:
15727 if (is_based_loc (rtl)
15728 && is_a <scalar_int_mode> (mode, &int_mode)
15729 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15730 || XEXP (rtl, 0) == arg_pointer_rtx
15731 || XEXP (rtl, 0) == frame_pointer_rtx))
15732 mem_loc_result = based_loc_descr (XEXP (rtl, 0),
15733 INTVAL (XEXP (rtl, 1)),
15734 VAR_INIT_STATUS_INITIALIZED);
15735 else
15736 {
15737 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15738 VAR_INIT_STATUS_INITIALIZED);
15739 if (mem_loc_result == 0)
15740 break;
15741
15742 if (CONST_INT_P (XEXP (rtl, 1))
15743 && (GET_MODE_SIZE (as_a <scalar_int_mode> (mode))
15744 <= DWARF2_ADDR_SIZE))
15745 loc_descr_plus_const (&mem_loc_result, INTVAL (XEXP (rtl, 1)));
15746 else
15747 {
15748 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15749 VAR_INIT_STATUS_INITIALIZED);
15750 if (op1 == 0)
15751 return NULL;
15752 add_loc_descr (&mem_loc_result, op1);
15753 add_loc_descr (&mem_loc_result,
15754 new_loc_descr (DW_OP_plus, 0, 0));
15755 }
15756 }
15757 break;
15758
15759 /* If a pseudo-reg is optimized away, it is possible for it to
15760 be replaced with a MEM containing a multiply or shift. */
15761 case MINUS:
15762 op = DW_OP_minus;
15763 goto do_binop;
15764
15765 case MULT:
15766 op = DW_OP_mul;
15767 goto do_binop;
15768
15769 case DIV:
15770 if ((!dwarf_strict || dwarf_version >= 5)
15771 && is_a <scalar_int_mode> (mode, &int_mode)
15772 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15773 {
15774 mem_loc_result = typed_binop (DW_OP_div, rtl,
15775 base_type_for_mode (mode, 0),
15776 int_mode, mem_mode);
15777 break;
15778 }
15779 op = DW_OP_div;
15780 goto do_binop;
15781
15782 case UMOD:
15783 op = DW_OP_mod;
15784 goto do_binop;
15785
15786 case ASHIFT:
15787 op = DW_OP_shl;
15788 goto do_shift;
15789
15790 case ASHIFTRT:
15791 op = DW_OP_shra;
15792 goto do_shift;
15793
15794 case LSHIFTRT:
15795 op = DW_OP_shr;
15796 goto do_shift;
15797
15798 do_shift:
15799 if (!is_a <scalar_int_mode> (mode, &int_mode))
15800 break;
15801 op0 = mem_loc_descriptor (XEXP (rtl, 0), int_mode, mem_mode,
15802 VAR_INIT_STATUS_INITIALIZED);
15803 {
15804 rtx rtlop1 = XEXP (rtl, 1);
15805 if (is_a <scalar_int_mode> (GET_MODE (rtlop1), &op1_mode)
15806 && GET_MODE_BITSIZE (op1_mode) < GET_MODE_BITSIZE (int_mode))
15807 rtlop1 = gen_rtx_ZERO_EXTEND (int_mode, rtlop1);
15808 op1 = mem_loc_descriptor (rtlop1, int_mode, mem_mode,
15809 VAR_INIT_STATUS_INITIALIZED);
15810 }
15811
15812 if (op0 == 0 || op1 == 0)
15813 break;
15814
15815 mem_loc_result = op0;
15816 add_loc_descr (&mem_loc_result, op1);
15817 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15818 break;
15819
15820 case AND:
15821 op = DW_OP_and;
15822 goto do_binop;
15823
15824 case IOR:
15825 op = DW_OP_or;
15826 goto do_binop;
15827
15828 case XOR:
15829 op = DW_OP_xor;
15830 goto do_binop;
15831
15832 do_binop:
15833 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15834 VAR_INIT_STATUS_INITIALIZED);
15835 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15836 VAR_INIT_STATUS_INITIALIZED);
15837
15838 if (op0 == 0 || op1 == 0)
15839 break;
15840
15841 mem_loc_result = op0;
15842 add_loc_descr (&mem_loc_result, op1);
15843 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15844 break;
15845
15846 case MOD:
15847 if ((!dwarf_strict || dwarf_version >= 5)
15848 && is_a <scalar_int_mode> (mode, &int_mode)
15849 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15850 {
15851 mem_loc_result = typed_binop (DW_OP_mod, rtl,
15852 base_type_for_mode (mode, 0),
15853 int_mode, mem_mode);
15854 break;
15855 }
15856
15857 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15858 VAR_INIT_STATUS_INITIALIZED);
15859 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15860 VAR_INIT_STATUS_INITIALIZED);
15861
15862 if (op0 == 0 || op1 == 0)
15863 break;
15864
15865 mem_loc_result = op0;
15866 add_loc_descr (&mem_loc_result, op1);
15867 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
15868 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
15869 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_div, 0, 0));
15870 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_mul, 0, 0));
15871 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_minus, 0, 0));
15872 break;
15873
15874 case UDIV:
15875 if ((!dwarf_strict || dwarf_version >= 5)
15876 && is_a <scalar_int_mode> (mode, &int_mode))
15877 {
15878 if (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15879 {
15880 op = DW_OP_div;
15881 goto do_binop;
15882 }
15883 mem_loc_result = typed_binop (DW_OP_div, rtl,
15884 base_type_for_mode (int_mode, 1),
15885 int_mode, mem_mode);
15886 }
15887 break;
15888
15889 case NOT:
15890 op = DW_OP_not;
15891 goto do_unop;
15892
15893 case ABS:
15894 op = DW_OP_abs;
15895 goto do_unop;
15896
15897 case NEG:
15898 op = DW_OP_neg;
15899 goto do_unop;
15900
15901 do_unop:
15902 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15903 VAR_INIT_STATUS_INITIALIZED);
15904
15905 if (op0 == 0)
15906 break;
15907
15908 mem_loc_result = op0;
15909 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15910 break;
15911
15912 case CONST_INT:
15913 if (!is_a <scalar_int_mode> (mode, &int_mode)
15914 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15915 #ifdef POINTERS_EXTEND_UNSIGNED
15916 || (int_mode == Pmode
15917 && mem_mode != VOIDmode
15918 && trunc_int_for_mode (INTVAL (rtl), ptr_mode) == INTVAL (rtl))
15919 #endif
15920 )
15921 {
15922 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
15923 break;
15924 }
15925 if ((!dwarf_strict || dwarf_version >= 5)
15926 && (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT
15927 || GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_DOUBLE_INT))
15928 {
15929 dw_die_ref type_die = base_type_for_mode (int_mode, 1);
15930 scalar_int_mode amode;
15931 if (type_die == NULL)
15932 return NULL;
15933 if (INTVAL (rtl) >= 0
15934 && (int_mode_for_size (DWARF2_ADDR_SIZE * BITS_PER_UNIT, 0)
15935 .exists (&amode))
15936 && trunc_int_for_mode (INTVAL (rtl), amode) == INTVAL (rtl)
15937 /* const DW_OP_convert <XXX> vs.
15938 DW_OP_const_type <XXX, 1, const>. */
15939 && size_of_int_loc_descriptor (INTVAL (rtl)) + 1 + 1
15940 < (unsigned long) 1 + 1 + 1 + GET_MODE_SIZE (int_mode))
15941 {
15942 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
15943 op0 = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15944 op0->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15945 op0->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15946 op0->dw_loc_oprnd1.v.val_die_ref.external = 0;
15947 add_loc_descr (&mem_loc_result, op0);
15948 return mem_loc_result;
15949 }
15950 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0,
15951 INTVAL (rtl));
15952 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15953 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15954 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
15955 if (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT)
15956 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
15957 else
15958 {
15959 mem_loc_result->dw_loc_oprnd2.val_class
15960 = dw_val_class_const_double;
15961 mem_loc_result->dw_loc_oprnd2.v.val_double
15962 = double_int::from_shwi (INTVAL (rtl));
15963 }
15964 }
15965 break;
15966
15967 case CONST_DOUBLE:
15968 if (!dwarf_strict || dwarf_version >= 5)
15969 {
15970 dw_die_ref type_die;
15971
15972 /* Note that if TARGET_SUPPORTS_WIDE_INT == 0, a
15973 CONST_DOUBLE rtx could represent either a large integer
15974 or a floating-point constant. If TARGET_SUPPORTS_WIDE_INT != 0,
15975 the value is always a floating point constant.
15976
15977 When it is an integer, a CONST_DOUBLE is used whenever
15978 the constant requires 2 HWIs to be adequately represented.
15979 We output CONST_DOUBLEs as blocks. */
15980 if (mode == VOIDmode
15981 || (GET_MODE (rtl) == VOIDmode
15982 && maybe_ne (GET_MODE_BITSIZE (mode),
15983 HOST_BITS_PER_DOUBLE_INT)))
15984 break;
15985 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15986 if (type_die == NULL)
15987 return NULL;
15988 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
15989 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15990 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15991 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
15992 #if TARGET_SUPPORTS_WIDE_INT == 0
15993 if (!SCALAR_FLOAT_MODE_P (mode))
15994 {
15995 mem_loc_result->dw_loc_oprnd2.val_class
15996 = dw_val_class_const_double;
15997 mem_loc_result->dw_loc_oprnd2.v.val_double
15998 = rtx_to_double_int (rtl);
15999 }
16000 else
16001 #endif
16002 {
16003 scalar_float_mode float_mode = as_a <scalar_float_mode> (mode);
16004 unsigned int length = GET_MODE_SIZE (float_mode);
16005 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16006
16007 insert_float (rtl, array);
16008 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16009 mem_loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16010 mem_loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16011 mem_loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16012 }
16013 }
16014 break;
16015
16016 case CONST_WIDE_INT:
16017 if (!dwarf_strict || dwarf_version >= 5)
16018 {
16019 dw_die_ref type_die;
16020
16021 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
16022 if (type_die == NULL)
16023 return NULL;
16024 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
16025 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16026 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16027 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16028 mem_loc_result->dw_loc_oprnd2.val_class
16029 = dw_val_class_wide_int;
16030 mem_loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16031 *mem_loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, mode);
16032 }
16033 break;
16034
16035 case CONST_POLY_INT:
16036 mem_loc_result = int_loc_descriptor (rtx_to_poly_int64 (rtl));
16037 break;
16038
16039 case EQ:
16040 mem_loc_result = scompare_loc_descriptor (DW_OP_eq, rtl, mem_mode);
16041 break;
16042
16043 case GE:
16044 mem_loc_result = scompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16045 break;
16046
16047 case GT:
16048 mem_loc_result = scompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16049 break;
16050
16051 case LE:
16052 mem_loc_result = scompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16053 break;
16054
16055 case LT:
16056 mem_loc_result = scompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16057 break;
16058
16059 case NE:
16060 mem_loc_result = scompare_loc_descriptor (DW_OP_ne, rtl, mem_mode);
16061 break;
16062
16063 case GEU:
16064 mem_loc_result = ucompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16065 break;
16066
16067 case GTU:
16068 mem_loc_result = ucompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16069 break;
16070
16071 case LEU:
16072 mem_loc_result = ucompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16073 break;
16074
16075 case LTU:
16076 mem_loc_result = ucompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16077 break;
16078
16079 case UMIN:
16080 case UMAX:
16081 if (!SCALAR_INT_MODE_P (mode))
16082 break;
16083 /* FALLTHRU */
16084 case SMIN:
16085 case SMAX:
16086 mem_loc_result = minmax_loc_descriptor (rtl, mode, mem_mode);
16087 break;
16088
16089 case ZERO_EXTRACT:
16090 case SIGN_EXTRACT:
16091 if (CONST_INT_P (XEXP (rtl, 1))
16092 && CONST_INT_P (XEXP (rtl, 2))
16093 && is_a <scalar_int_mode> (mode, &int_mode)
16094 && is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode)
16095 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16096 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE
16097 && ((unsigned) INTVAL (XEXP (rtl, 1))
16098 + (unsigned) INTVAL (XEXP (rtl, 2))
16099 <= GET_MODE_BITSIZE (int_mode)))
16100 {
16101 int shift, size;
16102 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
16103 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16104 if (op0 == 0)
16105 break;
16106 if (GET_CODE (rtl) == SIGN_EXTRACT)
16107 op = DW_OP_shra;
16108 else
16109 op = DW_OP_shr;
16110 mem_loc_result = op0;
16111 size = INTVAL (XEXP (rtl, 1));
16112 shift = INTVAL (XEXP (rtl, 2));
16113 if (BITS_BIG_ENDIAN)
16114 shift = GET_MODE_BITSIZE (inner_mode) - shift - size;
16115 if (shift + size != (int) DWARF2_ADDR_SIZE)
16116 {
16117 add_loc_descr (&mem_loc_result,
16118 int_loc_descriptor (DWARF2_ADDR_SIZE
16119 - shift - size));
16120 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
16121 }
16122 if (size != (int) DWARF2_ADDR_SIZE)
16123 {
16124 add_loc_descr (&mem_loc_result,
16125 int_loc_descriptor (DWARF2_ADDR_SIZE - size));
16126 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
16127 }
16128 }
16129 break;
16130
16131 case IF_THEN_ELSE:
16132 {
16133 dw_loc_descr_ref op2, bra_node, drop_node;
16134 op0 = mem_loc_descriptor (XEXP (rtl, 0),
16135 GET_MODE (XEXP (rtl, 0)) == VOIDmode
16136 ? word_mode : GET_MODE (XEXP (rtl, 0)),
16137 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16138 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
16139 VAR_INIT_STATUS_INITIALIZED);
16140 op2 = mem_loc_descriptor (XEXP (rtl, 2), mode, mem_mode,
16141 VAR_INIT_STATUS_INITIALIZED);
16142 if (op0 == NULL || op1 == NULL || op2 == NULL)
16143 break;
16144
16145 mem_loc_result = op1;
16146 add_loc_descr (&mem_loc_result, op2);
16147 add_loc_descr (&mem_loc_result, op0);
16148 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
16149 add_loc_descr (&mem_loc_result, bra_node);
16150 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_swap, 0, 0));
16151 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
16152 add_loc_descr (&mem_loc_result, drop_node);
16153 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
16154 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
16155 }
16156 break;
16157
16158 case FLOAT_EXTEND:
16159 case FLOAT_TRUNCATE:
16160 case FLOAT:
16161 case UNSIGNED_FLOAT:
16162 case FIX:
16163 case UNSIGNED_FIX:
16164 if (!dwarf_strict || dwarf_version >= 5)
16165 {
16166 dw_die_ref type_die;
16167 dw_loc_descr_ref cvt;
16168
16169 op0 = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (XEXP (rtl, 0)),
16170 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16171 if (op0 == NULL)
16172 break;
16173 if (is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &int_mode)
16174 && (GET_CODE (rtl) == FLOAT
16175 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE))
16176 {
16177 type_die = base_type_for_mode (int_mode,
16178 GET_CODE (rtl) == UNSIGNED_FLOAT);
16179 if (type_die == NULL)
16180 break;
16181 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16182 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16183 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16184 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16185 add_loc_descr (&op0, cvt);
16186 }
16187 type_die = base_type_for_mode (mode, GET_CODE (rtl) == UNSIGNED_FIX);
16188 if (type_die == NULL)
16189 break;
16190 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16191 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16192 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16193 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16194 add_loc_descr (&op0, cvt);
16195 if (is_a <scalar_int_mode> (mode, &int_mode)
16196 && (GET_CODE (rtl) == FIX
16197 || GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE))
16198 {
16199 op0 = convert_descriptor_to_mode (int_mode, op0);
16200 if (op0 == NULL)
16201 break;
16202 }
16203 mem_loc_result = op0;
16204 }
16205 break;
16206
16207 case CLZ:
16208 case CTZ:
16209 case FFS:
16210 if (is_a <scalar_int_mode> (mode, &int_mode))
16211 mem_loc_result = clz_loc_descriptor (rtl, int_mode, mem_mode);
16212 break;
16213
16214 case POPCOUNT:
16215 case PARITY:
16216 if (is_a <scalar_int_mode> (mode, &int_mode))
16217 mem_loc_result = popcount_loc_descriptor (rtl, int_mode, mem_mode);
16218 break;
16219
16220 case BSWAP:
16221 if (is_a <scalar_int_mode> (mode, &int_mode))
16222 mem_loc_result = bswap_loc_descriptor (rtl, int_mode, mem_mode);
16223 break;
16224
16225 case ROTATE:
16226 case ROTATERT:
16227 if (is_a <scalar_int_mode> (mode, &int_mode))
16228 mem_loc_result = rotate_loc_descriptor (rtl, int_mode, mem_mode);
16229 break;
16230
16231 case COMPARE:
16232 /* In theory, we could implement the above. */
16233 /* DWARF cannot represent the unsigned compare operations
16234 natively. */
16235 case SS_MULT:
16236 case US_MULT:
16237 case SS_DIV:
16238 case US_DIV:
16239 case SS_PLUS:
16240 case US_PLUS:
16241 case SS_MINUS:
16242 case US_MINUS:
16243 case SS_NEG:
16244 case US_NEG:
16245 case SS_ABS:
16246 case SS_ASHIFT:
16247 case US_ASHIFT:
16248 case SS_TRUNCATE:
16249 case US_TRUNCATE:
16250 case UNORDERED:
16251 case ORDERED:
16252 case UNEQ:
16253 case UNGE:
16254 case UNGT:
16255 case UNLE:
16256 case UNLT:
16257 case LTGT:
16258 case FRACT_CONVERT:
16259 case UNSIGNED_FRACT_CONVERT:
16260 case SAT_FRACT:
16261 case UNSIGNED_SAT_FRACT:
16262 case SQRT:
16263 case ASM_OPERANDS:
16264 case VEC_MERGE:
16265 case VEC_SELECT:
16266 case VEC_CONCAT:
16267 case VEC_DUPLICATE:
16268 case VEC_SERIES:
16269 case UNSPEC:
16270 case HIGH:
16271 case FMA:
16272 case STRICT_LOW_PART:
16273 case CONST_VECTOR:
16274 case CONST_FIXED:
16275 case CLRSB:
16276 case CLOBBER:
16277 case CLOBBER_HIGH:
16278 /* If delegitimize_address couldn't do anything with the UNSPEC, we
16279 can't express it in the debug info. This can happen e.g. with some
16280 TLS UNSPECs. */
16281 break;
16282
16283 case CONST_STRING:
16284 resolve_one_addr (&rtl);
16285 goto symref;
16286
16287 /* RTL sequences inside PARALLEL record a series of DWARF operations for
16288 the expression. An UNSPEC rtx represents a raw DWARF operation,
16289 new_loc_descr is called for it to build the operation directly.
16290 Otherwise mem_loc_descriptor is called recursively. */
16291 case PARALLEL:
16292 {
16293 int index = 0;
16294 dw_loc_descr_ref exp_result = NULL;
16295
16296 for (; index < XVECLEN (rtl, 0); index++)
16297 {
16298 rtx elem = XVECEXP (rtl, 0, index);
16299 if (GET_CODE (elem) == UNSPEC)
16300 {
16301 /* Each DWARF operation UNSPEC contain two operands, if
16302 one operand is not used for the operation, const0_rtx is
16303 passed. */
16304 gcc_assert (XVECLEN (elem, 0) == 2);
16305
16306 HOST_WIDE_INT dw_op = XINT (elem, 1);
16307 HOST_WIDE_INT oprnd1 = INTVAL (XVECEXP (elem, 0, 0));
16308 HOST_WIDE_INT oprnd2 = INTVAL (XVECEXP (elem, 0, 1));
16309 exp_result
16310 = new_loc_descr ((enum dwarf_location_atom) dw_op, oprnd1,
16311 oprnd2);
16312 }
16313 else
16314 exp_result
16315 = mem_loc_descriptor (elem, mode, mem_mode,
16316 VAR_INIT_STATUS_INITIALIZED);
16317
16318 if (!mem_loc_result)
16319 mem_loc_result = exp_result;
16320 else
16321 add_loc_descr (&mem_loc_result, exp_result);
16322 }
16323
16324 break;
16325 }
16326
16327 default:
16328 if (flag_checking)
16329 {
16330 print_rtl (stderr, rtl);
16331 gcc_unreachable ();
16332 }
16333 break;
16334 }
16335
16336 if (mem_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16337 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16338
16339 return mem_loc_result;
16340 }
16341
16342 /* Return a descriptor that describes the concatenation of two locations.
16343 This is typically a complex variable. */
16344
16345 static dw_loc_descr_ref
16346 concat_loc_descriptor (rtx x0, rtx x1, enum var_init_status initialized)
16347 {
16348 /* At present we only track constant-sized pieces. */
16349 unsigned int size0, size1;
16350 if (!GET_MODE_SIZE (GET_MODE (x0)).is_constant (&size0)
16351 || !GET_MODE_SIZE (GET_MODE (x1)).is_constant (&size1))
16352 return 0;
16353
16354 dw_loc_descr_ref cc_loc_result = NULL;
16355 dw_loc_descr_ref x0_ref
16356 = loc_descriptor (x0, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16357 dw_loc_descr_ref x1_ref
16358 = loc_descriptor (x1, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16359
16360 if (x0_ref == 0 || x1_ref == 0)
16361 return 0;
16362
16363 cc_loc_result = x0_ref;
16364 add_loc_descr_op_piece (&cc_loc_result, size0);
16365
16366 add_loc_descr (&cc_loc_result, x1_ref);
16367 add_loc_descr_op_piece (&cc_loc_result, size1);
16368
16369 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
16370 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16371
16372 return cc_loc_result;
16373 }
16374
16375 /* Return a descriptor that describes the concatenation of N
16376 locations. */
16377
16378 static dw_loc_descr_ref
16379 concatn_loc_descriptor (rtx concatn, enum var_init_status initialized)
16380 {
16381 unsigned int i;
16382 dw_loc_descr_ref cc_loc_result = NULL;
16383 unsigned int n = XVECLEN (concatn, 0);
16384 unsigned int size;
16385
16386 for (i = 0; i < n; ++i)
16387 {
16388 dw_loc_descr_ref ref;
16389 rtx x = XVECEXP (concatn, 0, i);
16390
16391 /* At present we only track constant-sized pieces. */
16392 if (!GET_MODE_SIZE (GET_MODE (x)).is_constant (&size))
16393 return NULL;
16394
16395 ref = loc_descriptor (x, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16396 if (ref == NULL)
16397 return NULL;
16398
16399 add_loc_descr (&cc_loc_result, ref);
16400 add_loc_descr_op_piece (&cc_loc_result, size);
16401 }
16402
16403 if (cc_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16404 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16405
16406 return cc_loc_result;
16407 }
16408
16409 /* Helper function for loc_descriptor. Return DW_OP_implicit_pointer
16410 for DEBUG_IMPLICIT_PTR RTL. */
16411
16412 static dw_loc_descr_ref
16413 implicit_ptr_descriptor (rtx rtl, HOST_WIDE_INT offset)
16414 {
16415 dw_loc_descr_ref ret;
16416 dw_die_ref ref;
16417
16418 if (dwarf_strict && dwarf_version < 5)
16419 return NULL;
16420 gcc_assert (TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == VAR_DECL
16421 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == PARM_DECL
16422 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == RESULT_DECL);
16423 ref = lookup_decl_die (DEBUG_IMPLICIT_PTR_DECL (rtl));
16424 ret = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
16425 ret->dw_loc_oprnd2.val_class = dw_val_class_const;
16426 if (ref)
16427 {
16428 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16429 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
16430 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
16431 }
16432 else
16433 {
16434 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
16435 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_IMPLICIT_PTR_DECL (rtl);
16436 }
16437 return ret;
16438 }
16439
16440 /* Output a proper Dwarf location descriptor for a variable or parameter
16441 which is either allocated in a register or in a memory location. For a
16442 register, we just generate an OP_REG and the register number. For a
16443 memory location we provide a Dwarf postfix expression describing how to
16444 generate the (dynamic) address of the object onto the address stack.
16445
16446 MODE is mode of the decl if this loc_descriptor is going to be used in
16447 .debug_loc section where DW_OP_stack_value and DW_OP_implicit_value are
16448 allowed, VOIDmode otherwise.
16449
16450 If we don't know how to describe it, return 0. */
16451
16452 static dw_loc_descr_ref
16453 loc_descriptor (rtx rtl, machine_mode mode,
16454 enum var_init_status initialized)
16455 {
16456 dw_loc_descr_ref loc_result = NULL;
16457 scalar_int_mode int_mode;
16458
16459 switch (GET_CODE (rtl))
16460 {
16461 case SUBREG:
16462 /* The case of a subreg may arise when we have a local (register)
16463 variable or a formal (register) parameter which doesn't quite fill
16464 up an entire register. For now, just assume that it is
16465 legitimate to make the Dwarf info refer to the whole register which
16466 contains the given subreg. */
16467 if (REG_P (SUBREG_REG (rtl)) && subreg_lowpart_p (rtl))
16468 loc_result = loc_descriptor (SUBREG_REG (rtl),
16469 GET_MODE (SUBREG_REG (rtl)), initialized);
16470 else
16471 goto do_default;
16472 break;
16473
16474 case REG:
16475 loc_result = reg_loc_descriptor (rtl, initialized);
16476 break;
16477
16478 case MEM:
16479 loc_result = mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
16480 GET_MODE (rtl), initialized);
16481 if (loc_result == NULL)
16482 loc_result = tls_mem_loc_descriptor (rtl);
16483 if (loc_result == NULL)
16484 {
16485 rtx new_rtl = avoid_constant_pool_reference (rtl);
16486 if (new_rtl != rtl)
16487 loc_result = loc_descriptor (new_rtl, mode, initialized);
16488 }
16489 break;
16490
16491 case CONCAT:
16492 loc_result = concat_loc_descriptor (XEXP (rtl, 0), XEXP (rtl, 1),
16493 initialized);
16494 break;
16495
16496 case CONCATN:
16497 loc_result = concatn_loc_descriptor (rtl, initialized);
16498 break;
16499
16500 case VAR_LOCATION:
16501 /* Single part. */
16502 if (GET_CODE (PAT_VAR_LOCATION_LOC (rtl)) != PARALLEL)
16503 {
16504 rtx loc = PAT_VAR_LOCATION_LOC (rtl);
16505 if (GET_CODE (loc) == EXPR_LIST)
16506 loc = XEXP (loc, 0);
16507 loc_result = loc_descriptor (loc, mode, initialized);
16508 break;
16509 }
16510
16511 rtl = XEXP (rtl, 1);
16512 /* FALLTHRU */
16513
16514 case PARALLEL:
16515 {
16516 rtvec par_elems = XVEC (rtl, 0);
16517 int num_elem = GET_NUM_ELEM (par_elems);
16518 machine_mode mode;
16519 int i, size;
16520
16521 /* Create the first one, so we have something to add to. */
16522 loc_result = loc_descriptor (XEXP (RTVEC_ELT (par_elems, 0), 0),
16523 VOIDmode, initialized);
16524 if (loc_result == NULL)
16525 return NULL;
16526 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, 0), 0));
16527 /* At present we only track constant-sized pieces. */
16528 if (!GET_MODE_SIZE (mode).is_constant (&size))
16529 return NULL;
16530 add_loc_descr_op_piece (&loc_result, size);
16531 for (i = 1; i < num_elem; i++)
16532 {
16533 dw_loc_descr_ref temp;
16534
16535 temp = loc_descriptor (XEXP (RTVEC_ELT (par_elems, i), 0),
16536 VOIDmode, initialized);
16537 if (temp == NULL)
16538 return NULL;
16539 add_loc_descr (&loc_result, temp);
16540 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, i), 0));
16541 /* At present we only track constant-sized pieces. */
16542 if (!GET_MODE_SIZE (mode).is_constant (&size))
16543 return NULL;
16544 add_loc_descr_op_piece (&loc_result, size);
16545 }
16546 }
16547 break;
16548
16549 case CONST_INT:
16550 if (mode != VOIDmode && mode != BLKmode)
16551 {
16552 int_mode = as_a <scalar_int_mode> (mode);
16553 loc_result = address_of_int_loc_descriptor (GET_MODE_SIZE (int_mode),
16554 INTVAL (rtl));
16555 }
16556 break;
16557
16558 case CONST_DOUBLE:
16559 if (mode == VOIDmode)
16560 mode = GET_MODE (rtl);
16561
16562 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16563 {
16564 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16565
16566 /* Note that a CONST_DOUBLE rtx could represent either an integer
16567 or a floating-point constant. A CONST_DOUBLE is used whenever
16568 the constant requires more than one word in order to be
16569 adequately represented. We output CONST_DOUBLEs as blocks. */
16570 scalar_mode smode = as_a <scalar_mode> (mode);
16571 loc_result = new_loc_descr (DW_OP_implicit_value,
16572 GET_MODE_SIZE (smode), 0);
16573 #if TARGET_SUPPORTS_WIDE_INT == 0
16574 if (!SCALAR_FLOAT_MODE_P (smode))
16575 {
16576 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const_double;
16577 loc_result->dw_loc_oprnd2.v.val_double
16578 = rtx_to_double_int (rtl);
16579 }
16580 else
16581 #endif
16582 {
16583 unsigned int length = GET_MODE_SIZE (smode);
16584 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16585
16586 insert_float (rtl, array);
16587 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16588 loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16589 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16590 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16591 }
16592 }
16593 break;
16594
16595 case CONST_WIDE_INT:
16596 if (mode == VOIDmode)
16597 mode = GET_MODE (rtl);
16598
16599 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16600 {
16601 int_mode = as_a <scalar_int_mode> (mode);
16602 loc_result = new_loc_descr (DW_OP_implicit_value,
16603 GET_MODE_SIZE (int_mode), 0);
16604 loc_result->dw_loc_oprnd2.val_class = dw_val_class_wide_int;
16605 loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16606 *loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, int_mode);
16607 }
16608 break;
16609
16610 case CONST_VECTOR:
16611 if (mode == VOIDmode)
16612 mode = GET_MODE (rtl);
16613
16614 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16615 {
16616 unsigned int length;
16617 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
16618 return NULL;
16619
16620 unsigned int elt_size = GET_MODE_UNIT_SIZE (GET_MODE (rtl));
16621 unsigned char *array
16622 = ggc_vec_alloc<unsigned char> (length * elt_size);
16623 unsigned int i;
16624 unsigned char *p;
16625 machine_mode imode = GET_MODE_INNER (mode);
16626
16627 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16628 switch (GET_MODE_CLASS (mode))
16629 {
16630 case MODE_VECTOR_INT:
16631 for (i = 0, p = array; i < length; i++, p += elt_size)
16632 {
16633 rtx elt = CONST_VECTOR_ELT (rtl, i);
16634 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
16635 }
16636 break;
16637
16638 case MODE_VECTOR_FLOAT:
16639 for (i = 0, p = array; i < length; i++, p += elt_size)
16640 {
16641 rtx elt = CONST_VECTOR_ELT (rtl, i);
16642 insert_float (elt, p);
16643 }
16644 break;
16645
16646 default:
16647 gcc_unreachable ();
16648 }
16649
16650 loc_result = new_loc_descr (DW_OP_implicit_value,
16651 length * elt_size, 0);
16652 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16653 loc_result->dw_loc_oprnd2.v.val_vec.length = length;
16654 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = elt_size;
16655 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16656 }
16657 break;
16658
16659 case CONST:
16660 if (mode == VOIDmode
16661 || CONST_SCALAR_INT_P (XEXP (rtl, 0))
16662 || CONST_DOUBLE_AS_FLOAT_P (XEXP (rtl, 0))
16663 || GET_CODE (XEXP (rtl, 0)) == CONST_VECTOR)
16664 {
16665 loc_result = loc_descriptor (XEXP (rtl, 0), mode, initialized);
16666 break;
16667 }
16668 /* FALLTHROUGH */
16669 case SYMBOL_REF:
16670 if (!const_ok_for_output (rtl))
16671 break;
16672 /* FALLTHROUGH */
16673 case LABEL_REF:
16674 if (is_a <scalar_int_mode> (mode, &int_mode)
16675 && GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE
16676 && (dwarf_version >= 4 || !dwarf_strict))
16677 {
16678 loc_result = new_addr_loc_descr (rtl, dtprel_false);
16679 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
16680 vec_safe_push (used_rtx_array, rtl);
16681 }
16682 break;
16683
16684 case DEBUG_IMPLICIT_PTR:
16685 loc_result = implicit_ptr_descriptor (rtl, 0);
16686 break;
16687
16688 case PLUS:
16689 if (GET_CODE (XEXP (rtl, 0)) == DEBUG_IMPLICIT_PTR
16690 && CONST_INT_P (XEXP (rtl, 1)))
16691 {
16692 loc_result
16693 = implicit_ptr_descriptor (XEXP (rtl, 0), INTVAL (XEXP (rtl, 1)));
16694 break;
16695 }
16696 /* FALLTHRU */
16697 do_default:
16698 default:
16699 if ((is_a <scalar_int_mode> (mode, &int_mode)
16700 && GET_MODE (rtl) == int_mode
16701 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16702 && dwarf_version >= 4)
16703 || (!dwarf_strict && mode != VOIDmode && mode != BLKmode))
16704 {
16705 /* Value expression. */
16706 loc_result = mem_loc_descriptor (rtl, mode, VOIDmode, initialized);
16707 if (loc_result)
16708 add_loc_descr (&loc_result,
16709 new_loc_descr (DW_OP_stack_value, 0, 0));
16710 }
16711 break;
16712 }
16713
16714 return loc_result;
16715 }
16716
16717 /* We need to figure out what section we should use as the base for the
16718 address ranges where a given location is valid.
16719 1. If this particular DECL has a section associated with it, use that.
16720 2. If this function has a section associated with it, use that.
16721 3. Otherwise, use the text section.
16722 XXX: If you split a variable across multiple sections, we won't notice. */
16723
16724 static const char *
16725 secname_for_decl (const_tree decl)
16726 {
16727 const char *secname;
16728
16729 if (VAR_OR_FUNCTION_DECL_P (decl)
16730 && (DECL_EXTERNAL (decl) || TREE_PUBLIC (decl) || TREE_STATIC (decl))
16731 && DECL_SECTION_NAME (decl))
16732 secname = DECL_SECTION_NAME (decl);
16733 else if (current_function_decl && DECL_SECTION_NAME (current_function_decl))
16734 secname = DECL_SECTION_NAME (current_function_decl);
16735 else if (cfun && in_cold_section_p)
16736 secname = crtl->subsections.cold_section_label;
16737 else
16738 secname = text_section_label;
16739
16740 return secname;
16741 }
16742
16743 /* Return true when DECL_BY_REFERENCE is defined and set for DECL. */
16744
16745 static bool
16746 decl_by_reference_p (tree decl)
16747 {
16748 return ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL
16749 || VAR_P (decl))
16750 && DECL_BY_REFERENCE (decl));
16751 }
16752
16753 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16754 for VARLOC. */
16755
16756 static dw_loc_descr_ref
16757 dw_loc_list_1 (tree loc, rtx varloc, int want_address,
16758 enum var_init_status initialized)
16759 {
16760 int have_address = 0;
16761 dw_loc_descr_ref descr;
16762 machine_mode mode;
16763
16764 if (want_address != 2)
16765 {
16766 gcc_assert (GET_CODE (varloc) == VAR_LOCATION);
16767 /* Single part. */
16768 if (GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
16769 {
16770 varloc = PAT_VAR_LOCATION_LOC (varloc);
16771 if (GET_CODE (varloc) == EXPR_LIST)
16772 varloc = XEXP (varloc, 0);
16773 mode = GET_MODE (varloc);
16774 if (MEM_P (varloc))
16775 {
16776 rtx addr = XEXP (varloc, 0);
16777 descr = mem_loc_descriptor (addr, get_address_mode (varloc),
16778 mode, initialized);
16779 if (descr)
16780 have_address = 1;
16781 else
16782 {
16783 rtx x = avoid_constant_pool_reference (varloc);
16784 if (x != varloc)
16785 descr = mem_loc_descriptor (x, mode, VOIDmode,
16786 initialized);
16787 }
16788 }
16789 else
16790 descr = mem_loc_descriptor (varloc, mode, VOIDmode, initialized);
16791 }
16792 else
16793 return 0;
16794 }
16795 else
16796 {
16797 if (GET_CODE (varloc) == VAR_LOCATION)
16798 mode = DECL_MODE (PAT_VAR_LOCATION_DECL (varloc));
16799 else
16800 mode = DECL_MODE (loc);
16801 descr = loc_descriptor (varloc, mode, initialized);
16802 have_address = 1;
16803 }
16804
16805 if (!descr)
16806 return 0;
16807
16808 if (want_address == 2 && !have_address
16809 && (dwarf_version >= 4 || !dwarf_strict))
16810 {
16811 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
16812 {
16813 expansion_failed (loc, NULL_RTX,
16814 "DWARF address size mismatch");
16815 return 0;
16816 }
16817 add_loc_descr (&descr, new_loc_descr (DW_OP_stack_value, 0, 0));
16818 have_address = 1;
16819 }
16820 /* Show if we can't fill the request for an address. */
16821 if (want_address && !have_address)
16822 {
16823 expansion_failed (loc, NULL_RTX,
16824 "Want address and only have value");
16825 return 0;
16826 }
16827
16828 /* If we've got an address and don't want one, dereference. */
16829 if (!want_address && have_address)
16830 {
16831 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
16832 enum dwarf_location_atom op;
16833
16834 if (size > DWARF2_ADDR_SIZE || size == -1)
16835 {
16836 expansion_failed (loc, NULL_RTX,
16837 "DWARF address size mismatch");
16838 return 0;
16839 }
16840 else if (size == DWARF2_ADDR_SIZE)
16841 op = DW_OP_deref;
16842 else
16843 op = DW_OP_deref_size;
16844
16845 add_loc_descr (&descr, new_loc_descr (op, size, 0));
16846 }
16847
16848 return descr;
16849 }
16850
16851 /* Create a DW_OP_piece or DW_OP_bit_piece for bitsize, or return NULL
16852 if it is not possible. */
16853
16854 static dw_loc_descr_ref
16855 new_loc_descr_op_bit_piece (HOST_WIDE_INT bitsize, HOST_WIDE_INT offset)
16856 {
16857 if ((bitsize % BITS_PER_UNIT) == 0 && offset == 0)
16858 return new_loc_descr (DW_OP_piece, bitsize / BITS_PER_UNIT, 0);
16859 else if (dwarf_version >= 3 || !dwarf_strict)
16860 return new_loc_descr (DW_OP_bit_piece, bitsize, offset);
16861 else
16862 return NULL;
16863 }
16864
16865 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16866 for VAR_LOC_NOTE for variable DECL that has been optimized by SRA. */
16867
16868 static dw_loc_descr_ref
16869 dw_sra_loc_expr (tree decl, rtx loc)
16870 {
16871 rtx p;
16872 unsigned HOST_WIDE_INT padsize = 0;
16873 dw_loc_descr_ref descr, *descr_tail;
16874 unsigned HOST_WIDE_INT decl_size;
16875 rtx varloc;
16876 enum var_init_status initialized;
16877
16878 if (DECL_SIZE (decl) == NULL
16879 || !tree_fits_uhwi_p (DECL_SIZE (decl)))
16880 return NULL;
16881
16882 decl_size = tree_to_uhwi (DECL_SIZE (decl));
16883 descr = NULL;
16884 descr_tail = &descr;
16885
16886 for (p = loc; p; p = XEXP (p, 1))
16887 {
16888 unsigned HOST_WIDE_INT bitsize = decl_piece_bitsize (p);
16889 rtx loc_note = *decl_piece_varloc_ptr (p);
16890 dw_loc_descr_ref cur_descr;
16891 dw_loc_descr_ref *tail, last = NULL;
16892 unsigned HOST_WIDE_INT opsize = 0;
16893
16894 if (loc_note == NULL_RTX
16895 || NOTE_VAR_LOCATION_LOC (loc_note) == NULL_RTX)
16896 {
16897 padsize += bitsize;
16898 continue;
16899 }
16900 initialized = NOTE_VAR_LOCATION_STATUS (loc_note);
16901 varloc = NOTE_VAR_LOCATION (loc_note);
16902 cur_descr = dw_loc_list_1 (decl, varloc, 2, initialized);
16903 if (cur_descr == NULL)
16904 {
16905 padsize += bitsize;
16906 continue;
16907 }
16908
16909 /* Check that cur_descr either doesn't use
16910 DW_OP_*piece operations, or their sum is equal
16911 to bitsize. Otherwise we can't embed it. */
16912 for (tail = &cur_descr; *tail != NULL;
16913 tail = &(*tail)->dw_loc_next)
16914 if ((*tail)->dw_loc_opc == DW_OP_piece)
16915 {
16916 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned
16917 * BITS_PER_UNIT;
16918 last = *tail;
16919 }
16920 else if ((*tail)->dw_loc_opc == DW_OP_bit_piece)
16921 {
16922 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned;
16923 last = *tail;
16924 }
16925
16926 if (last != NULL && opsize != bitsize)
16927 {
16928 padsize += bitsize;
16929 /* Discard the current piece of the descriptor and release any
16930 addr_table entries it uses. */
16931 remove_loc_list_addr_table_entries (cur_descr);
16932 continue;
16933 }
16934
16935 /* If there is a hole, add DW_OP_*piece after empty DWARF
16936 expression, which means that those bits are optimized out. */
16937 if (padsize)
16938 {
16939 if (padsize > decl_size)
16940 {
16941 remove_loc_list_addr_table_entries (cur_descr);
16942 goto discard_descr;
16943 }
16944 decl_size -= padsize;
16945 *descr_tail = new_loc_descr_op_bit_piece (padsize, 0);
16946 if (*descr_tail == NULL)
16947 {
16948 remove_loc_list_addr_table_entries (cur_descr);
16949 goto discard_descr;
16950 }
16951 descr_tail = &(*descr_tail)->dw_loc_next;
16952 padsize = 0;
16953 }
16954 *descr_tail = cur_descr;
16955 descr_tail = tail;
16956 if (bitsize > decl_size)
16957 goto discard_descr;
16958 decl_size -= bitsize;
16959 if (last == NULL)
16960 {
16961 HOST_WIDE_INT offset = 0;
16962 if (GET_CODE (varloc) == VAR_LOCATION
16963 && GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
16964 {
16965 varloc = PAT_VAR_LOCATION_LOC (varloc);
16966 if (GET_CODE (varloc) == EXPR_LIST)
16967 varloc = XEXP (varloc, 0);
16968 }
16969 do
16970 {
16971 if (GET_CODE (varloc) == CONST
16972 || GET_CODE (varloc) == SIGN_EXTEND
16973 || GET_CODE (varloc) == ZERO_EXTEND)
16974 varloc = XEXP (varloc, 0);
16975 else if (GET_CODE (varloc) == SUBREG)
16976 varloc = SUBREG_REG (varloc);
16977 else
16978 break;
16979 }
16980 while (1);
16981 /* DW_OP_bit_size offset should be zero for register
16982 or implicit location descriptions and empty location
16983 descriptions, but for memory addresses needs big endian
16984 adjustment. */
16985 if (MEM_P (varloc))
16986 {
16987 unsigned HOST_WIDE_INT memsize;
16988 if (!poly_uint64 (MEM_SIZE (varloc)).is_constant (&memsize))
16989 goto discard_descr;
16990 memsize *= BITS_PER_UNIT;
16991 if (memsize != bitsize)
16992 {
16993 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
16994 && (memsize > BITS_PER_WORD || bitsize > BITS_PER_WORD))
16995 goto discard_descr;
16996 if (memsize < bitsize)
16997 goto discard_descr;
16998 if (BITS_BIG_ENDIAN)
16999 offset = memsize - bitsize;
17000 }
17001 }
17002
17003 *descr_tail = new_loc_descr_op_bit_piece (bitsize, offset);
17004 if (*descr_tail == NULL)
17005 goto discard_descr;
17006 descr_tail = &(*descr_tail)->dw_loc_next;
17007 }
17008 }
17009
17010 /* If there were any non-empty expressions, add padding till the end of
17011 the decl. */
17012 if (descr != NULL && decl_size != 0)
17013 {
17014 *descr_tail = new_loc_descr_op_bit_piece (decl_size, 0);
17015 if (*descr_tail == NULL)
17016 goto discard_descr;
17017 }
17018 return descr;
17019
17020 discard_descr:
17021 /* Discard the descriptor and release any addr_table entries it uses. */
17022 remove_loc_list_addr_table_entries (descr);
17023 return NULL;
17024 }
17025
17026 /* Return the dwarf representation of the location list LOC_LIST of
17027 DECL. WANT_ADDRESS has the same meaning as in loc_list_from_tree
17028 function. */
17029
17030 static dw_loc_list_ref
17031 dw_loc_list (var_loc_list *loc_list, tree decl, int want_address)
17032 {
17033 const char *endname, *secname;
17034 var_loc_view endview;
17035 rtx varloc;
17036 enum var_init_status initialized;
17037 struct var_loc_node *node;
17038 dw_loc_descr_ref descr;
17039 char label_id[MAX_ARTIFICIAL_LABEL_BYTES];
17040 dw_loc_list_ref list = NULL;
17041 dw_loc_list_ref *listp = &list;
17042
17043 /* Now that we know what section we are using for a base,
17044 actually construct the list of locations.
17045 The first location information is what is passed to the
17046 function that creates the location list, and the remaining
17047 locations just get added on to that list.
17048 Note that we only know the start address for a location
17049 (IE location changes), so to build the range, we use
17050 the range [current location start, next location start].
17051 This means we have to special case the last node, and generate
17052 a range of [last location start, end of function label]. */
17053
17054 if (cfun && crtl->has_bb_partition)
17055 {
17056 bool save_in_cold_section_p = in_cold_section_p;
17057 in_cold_section_p = first_function_block_is_cold;
17058 if (loc_list->last_before_switch == NULL)
17059 in_cold_section_p = !in_cold_section_p;
17060 secname = secname_for_decl (decl);
17061 in_cold_section_p = save_in_cold_section_p;
17062 }
17063 else
17064 secname = secname_for_decl (decl);
17065
17066 for (node = loc_list->first; node; node = node->next)
17067 {
17068 bool range_across_switch = false;
17069 if (GET_CODE (node->loc) == EXPR_LIST
17070 || NOTE_VAR_LOCATION_LOC (node->loc) != NULL_RTX)
17071 {
17072 if (GET_CODE (node->loc) == EXPR_LIST)
17073 {
17074 descr = NULL;
17075 /* This requires DW_OP_{,bit_}piece, which is not usable
17076 inside DWARF expressions. */
17077 if (want_address == 2)
17078 descr = dw_sra_loc_expr (decl, node->loc);
17079 }
17080 else
17081 {
17082 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17083 varloc = NOTE_VAR_LOCATION (node->loc);
17084 descr = dw_loc_list_1 (decl, varloc, want_address, initialized);
17085 }
17086 if (descr)
17087 {
17088 /* If section switch happens in between node->label
17089 and node->next->label (or end of function) and
17090 we can't emit it as a single entry list,
17091 emit two ranges, first one ending at the end
17092 of first partition and second one starting at the
17093 beginning of second partition. */
17094 if (node == loc_list->last_before_switch
17095 && (node != loc_list->first || loc_list->first->next
17096 /* If we are to emit a view number, we will emit
17097 a loclist rather than a single location
17098 expression for the entire function (see
17099 loc_list_has_views), so we have to split the
17100 range that straddles across partitions. */
17101 || !ZERO_VIEW_P (node->view))
17102 && current_function_decl)
17103 {
17104 endname = cfun->fde->dw_fde_end;
17105 endview = 0;
17106 range_across_switch = true;
17107 }
17108 /* The variable has a location between NODE->LABEL and
17109 NODE->NEXT->LABEL. */
17110 else if (node->next)
17111 endname = node->next->label, endview = node->next->view;
17112 /* If the variable has a location at the last label
17113 it keeps its location until the end of function. */
17114 else if (!current_function_decl)
17115 endname = text_end_label, endview = 0;
17116 else
17117 {
17118 ASM_GENERATE_INTERNAL_LABEL (label_id, FUNC_END_LABEL,
17119 current_function_funcdef_no);
17120 endname = ggc_strdup (label_id);
17121 endview = 0;
17122 }
17123
17124 *listp = new_loc_list (descr, node->label, node->view,
17125 endname, endview, secname);
17126 if (TREE_CODE (decl) == PARM_DECL
17127 && node == loc_list->first
17128 && NOTE_P (node->loc)
17129 && strcmp (node->label, endname) == 0)
17130 (*listp)->force = true;
17131 listp = &(*listp)->dw_loc_next;
17132 }
17133 }
17134
17135 if (cfun
17136 && crtl->has_bb_partition
17137 && node == loc_list->last_before_switch)
17138 {
17139 bool save_in_cold_section_p = in_cold_section_p;
17140 in_cold_section_p = !first_function_block_is_cold;
17141 secname = secname_for_decl (decl);
17142 in_cold_section_p = save_in_cold_section_p;
17143 }
17144
17145 if (range_across_switch)
17146 {
17147 if (GET_CODE (node->loc) == EXPR_LIST)
17148 descr = dw_sra_loc_expr (decl, node->loc);
17149 else
17150 {
17151 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17152 varloc = NOTE_VAR_LOCATION (node->loc);
17153 descr = dw_loc_list_1 (decl, varloc, want_address,
17154 initialized);
17155 }
17156 gcc_assert (descr);
17157 /* The variable has a location between NODE->LABEL and
17158 NODE->NEXT->LABEL. */
17159 if (node->next)
17160 endname = node->next->label, endview = node->next->view;
17161 else
17162 endname = cfun->fde->dw_fde_second_end, endview = 0;
17163 *listp = new_loc_list (descr, cfun->fde->dw_fde_second_begin, 0,
17164 endname, endview, secname);
17165 listp = &(*listp)->dw_loc_next;
17166 }
17167 }
17168
17169 /* Try to avoid the overhead of a location list emitting a location
17170 expression instead, but only if we didn't have more than one
17171 location entry in the first place. If some entries were not
17172 representable, we don't want to pretend a single entry that was
17173 applies to the entire scope in which the variable is
17174 available. */
17175 if (list && loc_list->first->next)
17176 gen_llsym (list);
17177 else
17178 maybe_gen_llsym (list);
17179
17180 return list;
17181 }
17182
17183 /* Return if the loc_list has only single element and thus can be represented
17184 as location description. */
17185
17186 static bool
17187 single_element_loc_list_p (dw_loc_list_ref list)
17188 {
17189 gcc_assert (!list->dw_loc_next || list->ll_symbol);
17190 return !list->ll_symbol;
17191 }
17192
17193 /* Duplicate a single element of location list. */
17194
17195 static inline dw_loc_descr_ref
17196 copy_loc_descr (dw_loc_descr_ref ref)
17197 {
17198 dw_loc_descr_ref copy = ggc_alloc<dw_loc_descr_node> ();
17199 memcpy (copy, ref, sizeof (dw_loc_descr_node));
17200 return copy;
17201 }
17202
17203 /* To each location in list LIST append loc descr REF. */
17204
17205 static void
17206 add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17207 {
17208 dw_loc_descr_ref copy;
17209 add_loc_descr (&list->expr, ref);
17210 list = list->dw_loc_next;
17211 while (list)
17212 {
17213 copy = copy_loc_descr (ref);
17214 add_loc_descr (&list->expr, copy);
17215 while (copy->dw_loc_next)
17216 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17217 list = list->dw_loc_next;
17218 }
17219 }
17220
17221 /* To each location in list LIST prepend loc descr REF. */
17222
17223 static void
17224 prepend_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17225 {
17226 dw_loc_descr_ref copy;
17227 dw_loc_descr_ref ref_end = list->expr;
17228 add_loc_descr (&ref, list->expr);
17229 list->expr = ref;
17230 list = list->dw_loc_next;
17231 while (list)
17232 {
17233 dw_loc_descr_ref end = list->expr;
17234 list->expr = copy = copy_loc_descr (ref);
17235 while (copy->dw_loc_next != ref_end)
17236 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17237 copy->dw_loc_next = end;
17238 list = list->dw_loc_next;
17239 }
17240 }
17241
17242 /* Given two lists RET and LIST
17243 produce location list that is result of adding expression in LIST
17244 to expression in RET on each position in program.
17245 Might be destructive on both RET and LIST.
17246
17247 TODO: We handle only simple cases of RET or LIST having at most one
17248 element. General case would involve sorting the lists in program order
17249 and merging them that will need some additional work.
17250 Adding that will improve quality of debug info especially for SRA-ed
17251 structures. */
17252
17253 static void
17254 add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list)
17255 {
17256 if (!list)
17257 return;
17258 if (!*ret)
17259 {
17260 *ret = list;
17261 return;
17262 }
17263 if (!list->dw_loc_next)
17264 {
17265 add_loc_descr_to_each (*ret, list->expr);
17266 return;
17267 }
17268 if (!(*ret)->dw_loc_next)
17269 {
17270 prepend_loc_descr_to_each (list, (*ret)->expr);
17271 *ret = list;
17272 return;
17273 }
17274 expansion_failed (NULL_TREE, NULL_RTX,
17275 "Don't know how to merge two non-trivial"
17276 " location lists.\n");
17277 *ret = NULL;
17278 return;
17279 }
17280
17281 /* LOC is constant expression. Try a luck, look it up in constant
17282 pool and return its loc_descr of its address. */
17283
17284 static dw_loc_descr_ref
17285 cst_pool_loc_descr (tree loc)
17286 {
17287 /* Get an RTL for this, if something has been emitted. */
17288 rtx rtl = lookup_constant_def (loc);
17289
17290 if (!rtl || !MEM_P (rtl))
17291 {
17292 gcc_assert (!rtl);
17293 return 0;
17294 }
17295 gcc_assert (GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF);
17296
17297 /* TODO: We might get more coverage if we was actually delaying expansion
17298 of all expressions till end of compilation when constant pools are fully
17299 populated. */
17300 if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (XEXP (rtl, 0))))
17301 {
17302 expansion_failed (loc, NULL_RTX,
17303 "CST value in contant pool but not marked.");
17304 return 0;
17305 }
17306 return mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
17307 GET_MODE (rtl), VAR_INIT_STATUS_INITIALIZED);
17308 }
17309
17310 /* Return dw_loc_list representing address of addr_expr LOC
17311 by looking for inner INDIRECT_REF expression and turning
17312 it into simple arithmetics.
17313
17314 See loc_list_from_tree for the meaning of CONTEXT. */
17315
17316 static dw_loc_list_ref
17317 loc_list_for_address_of_addr_expr_of_indirect_ref (tree loc, bool toplev,
17318 loc_descr_context *context)
17319 {
17320 tree obj, offset;
17321 poly_int64 bitsize, bitpos, bytepos;
17322 machine_mode mode;
17323 int unsignedp, reversep, volatilep = 0;
17324 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
17325
17326 obj = get_inner_reference (TREE_OPERAND (loc, 0),
17327 &bitsize, &bitpos, &offset, &mode,
17328 &unsignedp, &reversep, &volatilep);
17329 STRIP_NOPS (obj);
17330 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos))
17331 {
17332 expansion_failed (loc, NULL_RTX, "bitfield access");
17333 return 0;
17334 }
17335 if (!INDIRECT_REF_P (obj))
17336 {
17337 expansion_failed (obj,
17338 NULL_RTX, "no indirect ref in inner refrence");
17339 return 0;
17340 }
17341 if (!offset && known_eq (bitpos, 0))
17342 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), toplev ? 2 : 1,
17343 context);
17344 else if (toplev
17345 && int_size_in_bytes (TREE_TYPE (loc)) <= DWARF2_ADDR_SIZE
17346 && (dwarf_version >= 4 || !dwarf_strict))
17347 {
17348 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), 0, context);
17349 if (!list_ret)
17350 return 0;
17351 if (offset)
17352 {
17353 /* Variable offset. */
17354 list_ret1 = loc_list_from_tree (offset, 0, context);
17355 if (list_ret1 == 0)
17356 return 0;
17357 add_loc_list (&list_ret, list_ret1);
17358 if (!list_ret)
17359 return 0;
17360 add_loc_descr_to_each (list_ret,
17361 new_loc_descr (DW_OP_plus, 0, 0));
17362 }
17363 HOST_WIDE_INT value;
17364 if (bytepos.is_constant (&value) && value > 0)
17365 add_loc_descr_to_each (list_ret,
17366 new_loc_descr (DW_OP_plus_uconst, value, 0));
17367 else if (maybe_ne (bytepos, 0))
17368 loc_list_plus_const (list_ret, bytepos);
17369 add_loc_descr_to_each (list_ret,
17370 new_loc_descr (DW_OP_stack_value, 0, 0));
17371 }
17372 return list_ret;
17373 }
17374
17375 /* Set LOC to the next operation that is not a DW_OP_nop operation. In the case
17376 all operations from LOC are nops, move to the last one. Insert in NOPS all
17377 operations that are skipped. */
17378
17379 static void
17380 loc_descr_to_next_no_nop (dw_loc_descr_ref &loc,
17381 hash_set<dw_loc_descr_ref> &nops)
17382 {
17383 while (loc->dw_loc_next != NULL && loc->dw_loc_opc == DW_OP_nop)
17384 {
17385 nops.add (loc);
17386 loc = loc->dw_loc_next;
17387 }
17388 }
17389
17390 /* Helper for loc_descr_without_nops: free the location description operation
17391 P. */
17392
17393 bool
17394 free_loc_descr (const dw_loc_descr_ref &loc, void *data ATTRIBUTE_UNUSED)
17395 {
17396 ggc_free (loc);
17397 return true;
17398 }
17399
17400 /* Remove all DW_OP_nop operations from LOC except, if it exists, the one that
17401 finishes LOC. */
17402
17403 static void
17404 loc_descr_without_nops (dw_loc_descr_ref &loc)
17405 {
17406 if (loc->dw_loc_opc == DW_OP_nop && loc->dw_loc_next == NULL)
17407 return;
17408
17409 /* Set of all DW_OP_nop operations we remove. */
17410 hash_set<dw_loc_descr_ref> nops;
17411
17412 /* First, strip all prefix NOP operations in order to keep the head of the
17413 operations list. */
17414 loc_descr_to_next_no_nop (loc, nops);
17415
17416 for (dw_loc_descr_ref cur = loc; cur != NULL;)
17417 {
17418 /* For control flow operations: strip "prefix" nops in destination
17419 labels. */
17420 if (cur->dw_loc_oprnd1.val_class == dw_val_class_loc)
17421 loc_descr_to_next_no_nop (cur->dw_loc_oprnd1.v.val_loc, nops);
17422 if (cur->dw_loc_oprnd2.val_class == dw_val_class_loc)
17423 loc_descr_to_next_no_nop (cur->dw_loc_oprnd2.v.val_loc, nops);
17424
17425 /* Do the same for the operations that follow, then move to the next
17426 iteration. */
17427 if (cur->dw_loc_next != NULL)
17428 loc_descr_to_next_no_nop (cur->dw_loc_next, nops);
17429 cur = cur->dw_loc_next;
17430 }
17431
17432 nops.traverse<void *, free_loc_descr> (NULL);
17433 }
17434
17435
17436 struct dwarf_procedure_info;
17437
17438 /* Helper structure for location descriptions generation. */
17439 struct loc_descr_context
17440 {
17441 /* The type that is implicitly referenced by DW_OP_push_object_address, or
17442 NULL_TREE if DW_OP_push_object_address in invalid for this location
17443 description. This is used when processing PLACEHOLDER_EXPR nodes. */
17444 tree context_type;
17445 /* The ..._DECL node that should be translated as a
17446 DW_OP_push_object_address operation. */
17447 tree base_decl;
17448 /* Information about the DWARF procedure we are currently generating. NULL if
17449 we are not generating a DWARF procedure. */
17450 struct dwarf_procedure_info *dpi;
17451 /* True if integral PLACEHOLDER_EXPR stands for the first argument passed
17452 by consumer. Used for DW_TAG_generic_subrange attributes. */
17453 bool placeholder_arg;
17454 /* True if PLACEHOLDER_EXPR has been seen. */
17455 bool placeholder_seen;
17456 };
17457
17458 /* DWARF procedures generation
17459
17460 DWARF expressions (aka. location descriptions) are used to encode variable
17461 things such as sizes or offsets. Such computations can have redundant parts
17462 that can be factorized in order to reduce the size of the output debug
17463 information. This is the whole point of DWARF procedures.
17464
17465 Thanks to stor-layout.c, size and offset expressions in GENERIC trees are
17466 already factorized into functions ("size functions") in order to handle very
17467 big and complex types. Such functions are quite simple: they have integral
17468 arguments, they return an integral result and their body contains only a
17469 return statement with arithmetic expressions. This is the only kind of
17470 function we are interested in translating into DWARF procedures, here.
17471
17472 DWARF expressions and DWARF procedure are executed using a stack, so we have
17473 to define some calling convention for them to interact. Let's say that:
17474
17475 - Before calling a DWARF procedure, DWARF expressions must push on the stack
17476 all arguments in reverse order (right-to-left) so that when the DWARF
17477 procedure execution starts, the first argument is the top of the stack.
17478
17479 - Then, when returning, the DWARF procedure must have consumed all arguments
17480 on the stack, must have pushed the result and touched nothing else.
17481
17482 - Each integral argument and the result are integral types can be hold in a
17483 single stack slot.
17484
17485 - We call "frame offset" the number of stack slots that are "under DWARF
17486 procedure control": it includes the arguments slots, the temporaries and
17487 the result slot. Thus, it is equal to the number of arguments when the
17488 procedure execution starts and must be equal to one (the result) when it
17489 returns. */
17490
17491 /* Helper structure used when generating operations for a DWARF procedure. */
17492 struct dwarf_procedure_info
17493 {
17494 /* The FUNCTION_DECL node corresponding to the DWARF procedure that is
17495 currently translated. */
17496 tree fndecl;
17497 /* The number of arguments FNDECL takes. */
17498 unsigned args_count;
17499 };
17500
17501 /* Return a pointer to a newly created DIE node for a DWARF procedure. Add
17502 LOCATION as its DW_AT_location attribute. If FNDECL is not NULL_TREE,
17503 equate it to this DIE. */
17504
17505 static dw_die_ref
17506 new_dwarf_proc_die (dw_loc_descr_ref location, tree fndecl,
17507 dw_die_ref parent_die)
17508 {
17509 dw_die_ref dwarf_proc_die;
17510
17511 if ((dwarf_version < 3 && dwarf_strict)
17512 || location == NULL)
17513 return NULL;
17514
17515 dwarf_proc_die = new_die (DW_TAG_dwarf_procedure, parent_die, fndecl);
17516 if (fndecl)
17517 equate_decl_number_to_die (fndecl, dwarf_proc_die);
17518 add_AT_loc (dwarf_proc_die, DW_AT_location, location);
17519 return dwarf_proc_die;
17520 }
17521
17522 /* Return whether TYPE is a supported type as a DWARF procedure argument
17523 type or return type (we handle only scalar types and pointer types that
17524 aren't wider than the DWARF expression evaluation stack. */
17525
17526 static bool
17527 is_handled_procedure_type (tree type)
17528 {
17529 return ((INTEGRAL_TYPE_P (type)
17530 || TREE_CODE (type) == OFFSET_TYPE
17531 || TREE_CODE (type) == POINTER_TYPE)
17532 && int_size_in_bytes (type) <= DWARF2_ADDR_SIZE);
17533 }
17534
17535 /* Helper for resolve_args_picking: do the same but stop when coming across
17536 visited nodes. For each node we visit, register in FRAME_OFFSETS the frame
17537 offset *before* evaluating the corresponding operation. */
17538
17539 static bool
17540 resolve_args_picking_1 (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17541 struct dwarf_procedure_info *dpi,
17542 hash_map<dw_loc_descr_ref, unsigned> &frame_offsets)
17543 {
17544 /* The "frame_offset" identifier is already used to name a macro... */
17545 unsigned frame_offset_ = initial_frame_offset;
17546 dw_loc_descr_ref l;
17547
17548 for (l = loc; l != NULL;)
17549 {
17550 bool existed;
17551 unsigned &l_frame_offset = frame_offsets.get_or_insert (l, &existed);
17552
17553 /* If we already met this node, there is nothing to compute anymore. */
17554 if (existed)
17555 {
17556 /* Make sure that the stack size is consistent wherever the execution
17557 flow comes from. */
17558 gcc_assert ((unsigned) l_frame_offset == frame_offset_);
17559 break;
17560 }
17561 l_frame_offset = frame_offset_;
17562
17563 /* If needed, relocate the picking offset with respect to the frame
17564 offset. */
17565 if (l->frame_offset_rel)
17566 {
17567 unsigned HOST_WIDE_INT off;
17568 switch (l->dw_loc_opc)
17569 {
17570 case DW_OP_pick:
17571 off = l->dw_loc_oprnd1.v.val_unsigned;
17572 break;
17573 case DW_OP_dup:
17574 off = 0;
17575 break;
17576 case DW_OP_over:
17577 off = 1;
17578 break;
17579 default:
17580 gcc_unreachable ();
17581 }
17582 /* frame_offset_ is the size of the current stack frame, including
17583 incoming arguments. Besides, the arguments are pushed
17584 right-to-left. Thus, in order to access the Nth argument from
17585 this operation node, the picking has to skip temporaries *plus*
17586 one stack slot per argument (0 for the first one, 1 for the second
17587 one, etc.).
17588
17589 The targetted argument number (N) is already set as the operand,
17590 and the number of temporaries can be computed with:
17591 frame_offsets_ - dpi->args_count */
17592 off += frame_offset_ - dpi->args_count;
17593
17594 /* DW_OP_pick handles only offsets from 0 to 255 (inclusive)... */
17595 if (off > 255)
17596 return false;
17597
17598 if (off == 0)
17599 {
17600 l->dw_loc_opc = DW_OP_dup;
17601 l->dw_loc_oprnd1.v.val_unsigned = 0;
17602 }
17603 else if (off == 1)
17604 {
17605 l->dw_loc_opc = DW_OP_over;
17606 l->dw_loc_oprnd1.v.val_unsigned = 0;
17607 }
17608 else
17609 {
17610 l->dw_loc_opc = DW_OP_pick;
17611 l->dw_loc_oprnd1.v.val_unsigned = off;
17612 }
17613 }
17614
17615 /* Update frame_offset according to the effect the current operation has
17616 on the stack. */
17617 switch (l->dw_loc_opc)
17618 {
17619 case DW_OP_deref:
17620 case DW_OP_swap:
17621 case DW_OP_rot:
17622 case DW_OP_abs:
17623 case DW_OP_neg:
17624 case DW_OP_not:
17625 case DW_OP_plus_uconst:
17626 case DW_OP_skip:
17627 case DW_OP_reg0:
17628 case DW_OP_reg1:
17629 case DW_OP_reg2:
17630 case DW_OP_reg3:
17631 case DW_OP_reg4:
17632 case DW_OP_reg5:
17633 case DW_OP_reg6:
17634 case DW_OP_reg7:
17635 case DW_OP_reg8:
17636 case DW_OP_reg9:
17637 case DW_OP_reg10:
17638 case DW_OP_reg11:
17639 case DW_OP_reg12:
17640 case DW_OP_reg13:
17641 case DW_OP_reg14:
17642 case DW_OP_reg15:
17643 case DW_OP_reg16:
17644 case DW_OP_reg17:
17645 case DW_OP_reg18:
17646 case DW_OP_reg19:
17647 case DW_OP_reg20:
17648 case DW_OP_reg21:
17649 case DW_OP_reg22:
17650 case DW_OP_reg23:
17651 case DW_OP_reg24:
17652 case DW_OP_reg25:
17653 case DW_OP_reg26:
17654 case DW_OP_reg27:
17655 case DW_OP_reg28:
17656 case DW_OP_reg29:
17657 case DW_OP_reg30:
17658 case DW_OP_reg31:
17659 case DW_OP_bregx:
17660 case DW_OP_piece:
17661 case DW_OP_deref_size:
17662 case DW_OP_nop:
17663 case DW_OP_bit_piece:
17664 case DW_OP_implicit_value:
17665 case DW_OP_stack_value:
17666 break;
17667
17668 case DW_OP_addr:
17669 case DW_OP_const1u:
17670 case DW_OP_const1s:
17671 case DW_OP_const2u:
17672 case DW_OP_const2s:
17673 case DW_OP_const4u:
17674 case DW_OP_const4s:
17675 case DW_OP_const8u:
17676 case DW_OP_const8s:
17677 case DW_OP_constu:
17678 case DW_OP_consts:
17679 case DW_OP_dup:
17680 case DW_OP_over:
17681 case DW_OP_pick:
17682 case DW_OP_lit0:
17683 case DW_OP_lit1:
17684 case DW_OP_lit2:
17685 case DW_OP_lit3:
17686 case DW_OP_lit4:
17687 case DW_OP_lit5:
17688 case DW_OP_lit6:
17689 case DW_OP_lit7:
17690 case DW_OP_lit8:
17691 case DW_OP_lit9:
17692 case DW_OP_lit10:
17693 case DW_OP_lit11:
17694 case DW_OP_lit12:
17695 case DW_OP_lit13:
17696 case DW_OP_lit14:
17697 case DW_OP_lit15:
17698 case DW_OP_lit16:
17699 case DW_OP_lit17:
17700 case DW_OP_lit18:
17701 case DW_OP_lit19:
17702 case DW_OP_lit20:
17703 case DW_OP_lit21:
17704 case DW_OP_lit22:
17705 case DW_OP_lit23:
17706 case DW_OP_lit24:
17707 case DW_OP_lit25:
17708 case DW_OP_lit26:
17709 case DW_OP_lit27:
17710 case DW_OP_lit28:
17711 case DW_OP_lit29:
17712 case DW_OP_lit30:
17713 case DW_OP_lit31:
17714 case DW_OP_breg0:
17715 case DW_OP_breg1:
17716 case DW_OP_breg2:
17717 case DW_OP_breg3:
17718 case DW_OP_breg4:
17719 case DW_OP_breg5:
17720 case DW_OP_breg6:
17721 case DW_OP_breg7:
17722 case DW_OP_breg8:
17723 case DW_OP_breg9:
17724 case DW_OP_breg10:
17725 case DW_OP_breg11:
17726 case DW_OP_breg12:
17727 case DW_OP_breg13:
17728 case DW_OP_breg14:
17729 case DW_OP_breg15:
17730 case DW_OP_breg16:
17731 case DW_OP_breg17:
17732 case DW_OP_breg18:
17733 case DW_OP_breg19:
17734 case DW_OP_breg20:
17735 case DW_OP_breg21:
17736 case DW_OP_breg22:
17737 case DW_OP_breg23:
17738 case DW_OP_breg24:
17739 case DW_OP_breg25:
17740 case DW_OP_breg26:
17741 case DW_OP_breg27:
17742 case DW_OP_breg28:
17743 case DW_OP_breg29:
17744 case DW_OP_breg30:
17745 case DW_OP_breg31:
17746 case DW_OP_fbreg:
17747 case DW_OP_push_object_address:
17748 case DW_OP_call_frame_cfa:
17749 case DW_OP_GNU_variable_value:
17750 ++frame_offset_;
17751 break;
17752
17753 case DW_OP_drop:
17754 case DW_OP_xderef:
17755 case DW_OP_and:
17756 case DW_OP_div:
17757 case DW_OP_minus:
17758 case DW_OP_mod:
17759 case DW_OP_mul:
17760 case DW_OP_or:
17761 case DW_OP_plus:
17762 case DW_OP_shl:
17763 case DW_OP_shr:
17764 case DW_OP_shra:
17765 case DW_OP_xor:
17766 case DW_OP_bra:
17767 case DW_OP_eq:
17768 case DW_OP_ge:
17769 case DW_OP_gt:
17770 case DW_OP_le:
17771 case DW_OP_lt:
17772 case DW_OP_ne:
17773 case DW_OP_regx:
17774 case DW_OP_xderef_size:
17775 --frame_offset_;
17776 break;
17777
17778 case DW_OP_call2:
17779 case DW_OP_call4:
17780 case DW_OP_call_ref:
17781 {
17782 dw_die_ref dwarf_proc = l->dw_loc_oprnd1.v.val_die_ref.die;
17783 int *stack_usage = dwarf_proc_stack_usage_map->get (dwarf_proc);
17784
17785 if (stack_usage == NULL)
17786 return false;
17787 frame_offset_ += *stack_usage;
17788 break;
17789 }
17790
17791 case DW_OP_implicit_pointer:
17792 case DW_OP_entry_value:
17793 case DW_OP_const_type:
17794 case DW_OP_regval_type:
17795 case DW_OP_deref_type:
17796 case DW_OP_convert:
17797 case DW_OP_reinterpret:
17798 case DW_OP_form_tls_address:
17799 case DW_OP_GNU_push_tls_address:
17800 case DW_OP_GNU_uninit:
17801 case DW_OP_GNU_encoded_addr:
17802 case DW_OP_GNU_implicit_pointer:
17803 case DW_OP_GNU_entry_value:
17804 case DW_OP_GNU_const_type:
17805 case DW_OP_GNU_regval_type:
17806 case DW_OP_GNU_deref_type:
17807 case DW_OP_GNU_convert:
17808 case DW_OP_GNU_reinterpret:
17809 case DW_OP_GNU_parameter_ref:
17810 /* loc_list_from_tree will probably not output these operations for
17811 size functions, so assume they will not appear here. */
17812 /* Fall through... */
17813
17814 default:
17815 gcc_unreachable ();
17816 }
17817
17818 /* Now, follow the control flow (except subroutine calls). */
17819 switch (l->dw_loc_opc)
17820 {
17821 case DW_OP_bra:
17822 if (!resolve_args_picking_1 (l->dw_loc_next, frame_offset_, dpi,
17823 frame_offsets))
17824 return false;
17825 /* Fall through. */
17826
17827 case DW_OP_skip:
17828 l = l->dw_loc_oprnd1.v.val_loc;
17829 break;
17830
17831 case DW_OP_stack_value:
17832 return true;
17833
17834 default:
17835 l = l->dw_loc_next;
17836 break;
17837 }
17838 }
17839
17840 return true;
17841 }
17842
17843 /* Make a DFS over operations reachable through LOC (i.e. follow branch
17844 operations) in order to resolve the operand of DW_OP_pick operations that
17845 target DWARF procedure arguments (DPI). INITIAL_FRAME_OFFSET is the frame
17846 offset *before* LOC is executed. Return if all relocations were
17847 successful. */
17848
17849 static bool
17850 resolve_args_picking (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17851 struct dwarf_procedure_info *dpi)
17852 {
17853 /* Associate to all visited operations the frame offset *before* evaluating
17854 this operation. */
17855 hash_map<dw_loc_descr_ref, unsigned> frame_offsets;
17856
17857 return resolve_args_picking_1 (loc, initial_frame_offset, dpi,
17858 frame_offsets);
17859 }
17860
17861 /* Try to generate a DWARF procedure that computes the same result as FNDECL.
17862 Return NULL if it is not possible. */
17863
17864 static dw_die_ref
17865 function_to_dwarf_procedure (tree fndecl)
17866 {
17867 struct loc_descr_context ctx;
17868 struct dwarf_procedure_info dpi;
17869 dw_die_ref dwarf_proc_die;
17870 tree tree_body = DECL_SAVED_TREE (fndecl);
17871 dw_loc_descr_ref loc_body, epilogue;
17872
17873 tree cursor;
17874 unsigned i;
17875
17876 /* Do not generate multiple DWARF procedures for the same function
17877 declaration. */
17878 dwarf_proc_die = lookup_decl_die (fndecl);
17879 if (dwarf_proc_die != NULL)
17880 return dwarf_proc_die;
17881
17882 /* DWARF procedures are available starting with the DWARFv3 standard. */
17883 if (dwarf_version < 3 && dwarf_strict)
17884 return NULL;
17885
17886 /* We handle only functions for which we still have a body, that return a
17887 supported type and that takes arguments with supported types. Note that
17888 there is no point translating functions that return nothing. */
17889 if (tree_body == NULL_TREE
17890 || DECL_RESULT (fndecl) == NULL_TREE
17891 || !is_handled_procedure_type (TREE_TYPE (DECL_RESULT (fndecl))))
17892 return NULL;
17893
17894 for (cursor = DECL_ARGUMENTS (fndecl);
17895 cursor != NULL_TREE;
17896 cursor = TREE_CHAIN (cursor))
17897 if (!is_handled_procedure_type (TREE_TYPE (cursor)))
17898 return NULL;
17899
17900 /* Match only "expr" in: RETURN_EXPR (MODIFY_EXPR (RESULT_DECL, expr)). */
17901 if (TREE_CODE (tree_body) != RETURN_EXPR)
17902 return NULL;
17903 tree_body = TREE_OPERAND (tree_body, 0);
17904 if (TREE_CODE (tree_body) != MODIFY_EXPR
17905 || TREE_OPERAND (tree_body, 0) != DECL_RESULT (fndecl))
17906 return NULL;
17907 tree_body = TREE_OPERAND (tree_body, 1);
17908
17909 /* Try to translate the body expression itself. Note that this will probably
17910 cause an infinite recursion if its call graph has a cycle. This is very
17911 unlikely for size functions, however, so don't bother with such things at
17912 the moment. */
17913 ctx.context_type = NULL_TREE;
17914 ctx.base_decl = NULL_TREE;
17915 ctx.dpi = &dpi;
17916 ctx.placeholder_arg = false;
17917 ctx.placeholder_seen = false;
17918 dpi.fndecl = fndecl;
17919 dpi.args_count = list_length (DECL_ARGUMENTS (fndecl));
17920 loc_body = loc_descriptor_from_tree (tree_body, 0, &ctx);
17921 if (!loc_body)
17922 return NULL;
17923
17924 /* After evaluating all operands in "loc_body", we should still have on the
17925 stack all arguments plus the desired function result (top of the stack).
17926 Generate code in order to keep only the result in our stack frame. */
17927 epilogue = NULL;
17928 for (i = 0; i < dpi.args_count; ++i)
17929 {
17930 dw_loc_descr_ref op_couple = new_loc_descr (DW_OP_swap, 0, 0);
17931 op_couple->dw_loc_next = new_loc_descr (DW_OP_drop, 0, 0);
17932 op_couple->dw_loc_next->dw_loc_next = epilogue;
17933 epilogue = op_couple;
17934 }
17935 add_loc_descr (&loc_body, epilogue);
17936 if (!resolve_args_picking (loc_body, dpi.args_count, &dpi))
17937 return NULL;
17938
17939 /* Trailing nops from loc_descriptor_from_tree (if any) cannot be removed
17940 because they are considered useful. Now there is an epilogue, they are
17941 not anymore, so give it another try. */
17942 loc_descr_without_nops (loc_body);
17943
17944 /* fndecl may be used both as a regular DW_TAG_subprogram DIE and as
17945 a DW_TAG_dwarf_procedure, so we may have a conflict, here. It's unlikely,
17946 though, given that size functions do not come from source, so they should
17947 not have a dedicated DW_TAG_subprogram DIE. */
17948 dwarf_proc_die
17949 = new_dwarf_proc_die (loc_body, fndecl,
17950 get_context_die (DECL_CONTEXT (fndecl)));
17951
17952 /* The called DWARF procedure consumes one stack slot per argument and
17953 returns one stack slot. */
17954 dwarf_proc_stack_usage_map->put (dwarf_proc_die, 1 - dpi.args_count);
17955
17956 return dwarf_proc_die;
17957 }
17958
17959
17960 /* Generate Dwarf location list representing LOC.
17961 If WANT_ADDRESS is false, expression computing LOC will be computed
17962 If WANT_ADDRESS is 1, expression computing address of LOC will be returned
17963 if WANT_ADDRESS is 2, expression computing address useable in location
17964 will be returned (i.e. DW_OP_reg can be used
17965 to refer to register values).
17966
17967 CONTEXT provides information to customize the location descriptions
17968 generation. Its context_type field specifies what type is implicitly
17969 referenced by DW_OP_push_object_address. If it is NULL_TREE, this operation
17970 will not be generated.
17971
17972 Its DPI field determines whether we are generating a DWARF expression for a
17973 DWARF procedure, so PARM_DECL references are processed specifically.
17974
17975 If CONTEXT is NULL, the behavior is the same as if context_type, base_decl
17976 and dpi fields were null. */
17977
17978 static dw_loc_list_ref
17979 loc_list_from_tree_1 (tree loc, int want_address,
17980 struct loc_descr_context *context)
17981 {
17982 dw_loc_descr_ref ret = NULL, ret1 = NULL;
17983 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
17984 int have_address = 0;
17985 enum dwarf_location_atom op;
17986
17987 /* ??? Most of the time we do not take proper care for sign/zero
17988 extending the values properly. Hopefully this won't be a real
17989 problem... */
17990
17991 if (context != NULL
17992 && context->base_decl == loc
17993 && want_address == 0)
17994 {
17995 if (dwarf_version >= 3 || !dwarf_strict)
17996 return new_loc_list (new_loc_descr (DW_OP_push_object_address, 0, 0),
17997 NULL, 0, NULL, 0, NULL);
17998 else
17999 return NULL;
18000 }
18001
18002 switch (TREE_CODE (loc))
18003 {
18004 case ERROR_MARK:
18005 expansion_failed (loc, NULL_RTX, "ERROR_MARK");
18006 return 0;
18007
18008 case PLACEHOLDER_EXPR:
18009 /* This case involves extracting fields from an object to determine the
18010 position of other fields. It is supposed to appear only as the first
18011 operand of COMPONENT_REF nodes and to reference precisely the type
18012 that the context allows. */
18013 if (context != NULL
18014 && TREE_TYPE (loc) == context->context_type
18015 && want_address >= 1)
18016 {
18017 if (dwarf_version >= 3 || !dwarf_strict)
18018 {
18019 ret = new_loc_descr (DW_OP_push_object_address, 0, 0);
18020 have_address = 1;
18021 break;
18022 }
18023 else
18024 return NULL;
18025 }
18026 /* For DW_TAG_generic_subrange attributes, PLACEHOLDER_EXPR stands for
18027 the single argument passed by consumer. */
18028 else if (context != NULL
18029 && context->placeholder_arg
18030 && INTEGRAL_TYPE_P (TREE_TYPE (loc))
18031 && want_address == 0)
18032 {
18033 ret = new_loc_descr (DW_OP_pick, 0, 0);
18034 ret->frame_offset_rel = 1;
18035 context->placeholder_seen = true;
18036 break;
18037 }
18038 else
18039 expansion_failed (loc, NULL_RTX,
18040 "PLACEHOLDER_EXPR for an unexpected type");
18041 break;
18042
18043 case CALL_EXPR:
18044 {
18045 const int nargs = call_expr_nargs (loc);
18046 tree callee = get_callee_fndecl (loc);
18047 int i;
18048 dw_die_ref dwarf_proc;
18049
18050 if (callee == NULL_TREE)
18051 goto call_expansion_failed;
18052
18053 /* We handle only functions that return an integer. */
18054 if (!is_handled_procedure_type (TREE_TYPE (TREE_TYPE (callee))))
18055 goto call_expansion_failed;
18056
18057 dwarf_proc = function_to_dwarf_procedure (callee);
18058 if (dwarf_proc == NULL)
18059 goto call_expansion_failed;
18060
18061 /* Evaluate arguments right-to-left so that the first argument will
18062 be the top-most one on the stack. */
18063 for (i = nargs - 1; i >= 0; --i)
18064 {
18065 dw_loc_descr_ref loc_descr
18066 = loc_descriptor_from_tree (CALL_EXPR_ARG (loc, i), 0,
18067 context);
18068
18069 if (loc_descr == NULL)
18070 goto call_expansion_failed;
18071
18072 add_loc_descr (&ret, loc_descr);
18073 }
18074
18075 ret1 = new_loc_descr (DW_OP_call4, 0, 0);
18076 ret1->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18077 ret1->dw_loc_oprnd1.v.val_die_ref.die = dwarf_proc;
18078 ret1->dw_loc_oprnd1.v.val_die_ref.external = 0;
18079 add_loc_descr (&ret, ret1);
18080 break;
18081
18082 call_expansion_failed:
18083 expansion_failed (loc, NULL_RTX, "CALL_EXPR");
18084 /* There are no opcodes for these operations. */
18085 return 0;
18086 }
18087
18088 case PREINCREMENT_EXPR:
18089 case PREDECREMENT_EXPR:
18090 case POSTINCREMENT_EXPR:
18091 case POSTDECREMENT_EXPR:
18092 expansion_failed (loc, NULL_RTX, "PRE/POST INDCREMENT/DECREMENT");
18093 /* There are no opcodes for these operations. */
18094 return 0;
18095
18096 case ADDR_EXPR:
18097 /* If we already want an address, see if there is INDIRECT_REF inside
18098 e.g. for &this->field. */
18099 if (want_address)
18100 {
18101 list_ret = loc_list_for_address_of_addr_expr_of_indirect_ref
18102 (loc, want_address == 2, context);
18103 if (list_ret)
18104 have_address = 1;
18105 else if (decl_address_ip_invariant_p (TREE_OPERAND (loc, 0))
18106 && (ret = cst_pool_loc_descr (loc)))
18107 have_address = 1;
18108 }
18109 /* Otherwise, process the argument and look for the address. */
18110 if (!list_ret && !ret)
18111 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 1, context);
18112 else
18113 {
18114 if (want_address)
18115 expansion_failed (loc, NULL_RTX, "need address of ADDR_EXPR");
18116 return NULL;
18117 }
18118 break;
18119
18120 case VAR_DECL:
18121 if (DECL_THREAD_LOCAL_P (loc))
18122 {
18123 rtx rtl;
18124 enum dwarf_location_atom tls_op;
18125 enum dtprel_bool dtprel = dtprel_false;
18126
18127 if (targetm.have_tls)
18128 {
18129 /* If this is not defined, we have no way to emit the
18130 data. */
18131 if (!targetm.asm_out.output_dwarf_dtprel)
18132 return 0;
18133
18134 /* The way DW_OP_GNU_push_tls_address is specified, we
18135 can only look up addresses of objects in the current
18136 module. We used DW_OP_addr as first op, but that's
18137 wrong, because DW_OP_addr is relocated by the debug
18138 info consumer, while DW_OP_GNU_push_tls_address
18139 operand shouldn't be. */
18140 if (DECL_EXTERNAL (loc) && !targetm.binds_local_p (loc))
18141 return 0;
18142 dtprel = dtprel_true;
18143 /* We check for DWARF 5 here because gdb did not implement
18144 DW_OP_form_tls_address until after 7.12. */
18145 tls_op = (dwarf_version >= 5 ? DW_OP_form_tls_address
18146 : DW_OP_GNU_push_tls_address);
18147 }
18148 else
18149 {
18150 if (!targetm.emutls.debug_form_tls_address
18151 || !(dwarf_version >= 3 || !dwarf_strict))
18152 return 0;
18153 /* We stuffed the control variable into the DECL_VALUE_EXPR
18154 to signal (via DECL_HAS_VALUE_EXPR_P) that the decl should
18155 no longer appear in gimple code. We used the control
18156 variable in specific so that we could pick it up here. */
18157 loc = DECL_VALUE_EXPR (loc);
18158 tls_op = DW_OP_form_tls_address;
18159 }
18160
18161 rtl = rtl_for_decl_location (loc);
18162 if (rtl == NULL_RTX)
18163 return 0;
18164
18165 if (!MEM_P (rtl))
18166 return 0;
18167 rtl = XEXP (rtl, 0);
18168 if (! CONSTANT_P (rtl))
18169 return 0;
18170
18171 ret = new_addr_loc_descr (rtl, dtprel);
18172 ret1 = new_loc_descr (tls_op, 0, 0);
18173 add_loc_descr (&ret, ret1);
18174
18175 have_address = 1;
18176 break;
18177 }
18178 /* FALLTHRU */
18179
18180 case PARM_DECL:
18181 if (context != NULL && context->dpi != NULL
18182 && DECL_CONTEXT (loc) == context->dpi->fndecl)
18183 {
18184 /* We are generating code for a DWARF procedure and we want to access
18185 one of its arguments: find the appropriate argument offset and let
18186 the resolve_args_picking pass compute the offset that complies
18187 with the stack frame size. */
18188 unsigned i = 0;
18189 tree cursor;
18190
18191 for (cursor = DECL_ARGUMENTS (context->dpi->fndecl);
18192 cursor != NULL_TREE && cursor != loc;
18193 cursor = TREE_CHAIN (cursor), ++i)
18194 ;
18195 /* If we are translating a DWARF procedure, all referenced parameters
18196 must belong to the current function. */
18197 gcc_assert (cursor != NULL_TREE);
18198
18199 ret = new_loc_descr (DW_OP_pick, i, 0);
18200 ret->frame_offset_rel = 1;
18201 break;
18202 }
18203 /* FALLTHRU */
18204
18205 case RESULT_DECL:
18206 if (DECL_HAS_VALUE_EXPR_P (loc))
18207 return loc_list_from_tree_1 (DECL_VALUE_EXPR (loc),
18208 want_address, context);
18209 /* FALLTHRU */
18210
18211 case FUNCTION_DECL:
18212 {
18213 rtx rtl;
18214 var_loc_list *loc_list = lookup_decl_loc (loc);
18215
18216 if (loc_list && loc_list->first)
18217 {
18218 list_ret = dw_loc_list (loc_list, loc, want_address);
18219 have_address = want_address != 0;
18220 break;
18221 }
18222 rtl = rtl_for_decl_location (loc);
18223 if (rtl == NULL_RTX)
18224 {
18225 if (TREE_CODE (loc) != FUNCTION_DECL
18226 && early_dwarf
18227 && current_function_decl
18228 && want_address != 1
18229 && ! DECL_IGNORED_P (loc)
18230 && (INTEGRAL_TYPE_P (TREE_TYPE (loc))
18231 || POINTER_TYPE_P (TREE_TYPE (loc)))
18232 && DECL_CONTEXT (loc) == current_function_decl
18233 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (loc)))
18234 <= DWARF2_ADDR_SIZE))
18235 {
18236 dw_die_ref ref = lookup_decl_die (loc);
18237 ret = new_loc_descr (DW_OP_GNU_variable_value, 0, 0);
18238 if (ref)
18239 {
18240 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18241 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
18242 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
18243 }
18244 else
18245 {
18246 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
18247 ret->dw_loc_oprnd1.v.val_decl_ref = loc;
18248 }
18249 break;
18250 }
18251 expansion_failed (loc, NULL_RTX, "DECL has no RTL");
18252 return 0;
18253 }
18254 else if (CONST_INT_P (rtl))
18255 {
18256 HOST_WIDE_INT val = INTVAL (rtl);
18257 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18258 val &= GET_MODE_MASK (DECL_MODE (loc));
18259 ret = int_loc_descriptor (val);
18260 }
18261 else if (GET_CODE (rtl) == CONST_STRING)
18262 {
18263 expansion_failed (loc, NULL_RTX, "CONST_STRING");
18264 return 0;
18265 }
18266 else if (CONSTANT_P (rtl) && const_ok_for_output (rtl))
18267 ret = new_addr_loc_descr (rtl, dtprel_false);
18268 else
18269 {
18270 machine_mode mode, mem_mode;
18271
18272 /* Certain constructs can only be represented at top-level. */
18273 if (want_address == 2)
18274 {
18275 ret = loc_descriptor (rtl, VOIDmode,
18276 VAR_INIT_STATUS_INITIALIZED);
18277 have_address = 1;
18278 }
18279 else
18280 {
18281 mode = GET_MODE (rtl);
18282 mem_mode = VOIDmode;
18283 if (MEM_P (rtl))
18284 {
18285 mem_mode = mode;
18286 mode = get_address_mode (rtl);
18287 rtl = XEXP (rtl, 0);
18288 have_address = 1;
18289 }
18290 ret = mem_loc_descriptor (rtl, mode, mem_mode,
18291 VAR_INIT_STATUS_INITIALIZED);
18292 }
18293 if (!ret)
18294 expansion_failed (loc, rtl,
18295 "failed to produce loc descriptor for rtl");
18296 }
18297 }
18298 break;
18299
18300 case MEM_REF:
18301 if (!integer_zerop (TREE_OPERAND (loc, 1)))
18302 {
18303 have_address = 1;
18304 goto do_plus;
18305 }
18306 /* Fallthru. */
18307 case INDIRECT_REF:
18308 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18309 have_address = 1;
18310 break;
18311
18312 case TARGET_MEM_REF:
18313 case SSA_NAME:
18314 case DEBUG_EXPR_DECL:
18315 return NULL;
18316
18317 case COMPOUND_EXPR:
18318 return loc_list_from_tree_1 (TREE_OPERAND (loc, 1), want_address,
18319 context);
18320
18321 CASE_CONVERT:
18322 case VIEW_CONVERT_EXPR:
18323 case SAVE_EXPR:
18324 case MODIFY_EXPR:
18325 case NON_LVALUE_EXPR:
18326 return loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address,
18327 context);
18328
18329 case COMPONENT_REF:
18330 case BIT_FIELD_REF:
18331 case ARRAY_REF:
18332 case ARRAY_RANGE_REF:
18333 case REALPART_EXPR:
18334 case IMAGPART_EXPR:
18335 {
18336 tree obj, offset;
18337 poly_int64 bitsize, bitpos, bytepos;
18338 machine_mode mode;
18339 int unsignedp, reversep, volatilep = 0;
18340
18341 obj = get_inner_reference (loc, &bitsize, &bitpos, &offset, &mode,
18342 &unsignedp, &reversep, &volatilep);
18343
18344 gcc_assert (obj != loc);
18345
18346 list_ret = loc_list_from_tree_1 (obj,
18347 want_address == 2
18348 && known_eq (bitpos, 0)
18349 && !offset ? 2 : 1,
18350 context);
18351 /* TODO: We can extract value of the small expression via shifting even
18352 for nonzero bitpos. */
18353 if (list_ret == 0)
18354 return 0;
18355 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
18356 || !multiple_p (bitsize, BITS_PER_UNIT))
18357 {
18358 expansion_failed (loc, NULL_RTX,
18359 "bitfield access");
18360 return 0;
18361 }
18362
18363 if (offset != NULL_TREE)
18364 {
18365 /* Variable offset. */
18366 list_ret1 = loc_list_from_tree_1 (offset, 0, context);
18367 if (list_ret1 == 0)
18368 return 0;
18369 add_loc_list (&list_ret, list_ret1);
18370 if (!list_ret)
18371 return 0;
18372 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus, 0, 0));
18373 }
18374
18375 HOST_WIDE_INT value;
18376 if (bytepos.is_constant (&value) && value > 0)
18377 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus_uconst,
18378 value, 0));
18379 else if (maybe_ne (bytepos, 0))
18380 loc_list_plus_const (list_ret, bytepos);
18381
18382 have_address = 1;
18383 break;
18384 }
18385
18386 case INTEGER_CST:
18387 if ((want_address || !tree_fits_shwi_p (loc))
18388 && (ret = cst_pool_loc_descr (loc)))
18389 have_address = 1;
18390 else if (want_address == 2
18391 && tree_fits_shwi_p (loc)
18392 && (ret = address_of_int_loc_descriptor
18393 (int_size_in_bytes (TREE_TYPE (loc)),
18394 tree_to_shwi (loc))))
18395 have_address = 1;
18396 else if (tree_fits_shwi_p (loc))
18397 ret = int_loc_descriptor (tree_to_shwi (loc));
18398 else if (tree_fits_uhwi_p (loc))
18399 ret = uint_loc_descriptor (tree_to_uhwi (loc));
18400 else
18401 {
18402 expansion_failed (loc, NULL_RTX,
18403 "Integer operand is not host integer");
18404 return 0;
18405 }
18406 break;
18407
18408 case CONSTRUCTOR:
18409 case REAL_CST:
18410 case STRING_CST:
18411 case COMPLEX_CST:
18412 if ((ret = cst_pool_loc_descr (loc)))
18413 have_address = 1;
18414 else if (TREE_CODE (loc) == CONSTRUCTOR)
18415 {
18416 tree type = TREE_TYPE (loc);
18417 unsigned HOST_WIDE_INT size = int_size_in_bytes (type);
18418 unsigned HOST_WIDE_INT offset = 0;
18419 unsigned HOST_WIDE_INT cnt;
18420 constructor_elt *ce;
18421
18422 if (TREE_CODE (type) == RECORD_TYPE)
18423 {
18424 /* This is very limited, but it's enough to output
18425 pointers to member functions, as long as the
18426 referenced function is defined in the current
18427 translation unit. */
18428 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (loc), cnt, ce)
18429 {
18430 tree val = ce->value;
18431
18432 tree field = ce->index;
18433
18434 if (val)
18435 STRIP_NOPS (val);
18436
18437 if (!field || DECL_BIT_FIELD (field))
18438 {
18439 expansion_failed (loc, NULL_RTX,
18440 "bitfield in record type constructor");
18441 size = offset = (unsigned HOST_WIDE_INT)-1;
18442 ret = NULL;
18443 break;
18444 }
18445
18446 HOST_WIDE_INT fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
18447 unsigned HOST_WIDE_INT pos = int_byte_position (field);
18448 gcc_assert (pos + fieldsize <= size);
18449 if (pos < offset)
18450 {
18451 expansion_failed (loc, NULL_RTX,
18452 "out-of-order fields in record constructor");
18453 size = offset = (unsigned HOST_WIDE_INT)-1;
18454 ret = NULL;
18455 break;
18456 }
18457 if (pos > offset)
18458 {
18459 ret1 = new_loc_descr (DW_OP_piece, pos - offset, 0);
18460 add_loc_descr (&ret, ret1);
18461 offset = pos;
18462 }
18463 if (val && fieldsize != 0)
18464 {
18465 ret1 = loc_descriptor_from_tree (val, want_address, context);
18466 if (!ret1)
18467 {
18468 expansion_failed (loc, NULL_RTX,
18469 "unsupported expression in field");
18470 size = offset = (unsigned HOST_WIDE_INT)-1;
18471 ret = NULL;
18472 break;
18473 }
18474 add_loc_descr (&ret, ret1);
18475 }
18476 if (fieldsize)
18477 {
18478 ret1 = new_loc_descr (DW_OP_piece, fieldsize, 0);
18479 add_loc_descr (&ret, ret1);
18480 offset = pos + fieldsize;
18481 }
18482 }
18483
18484 if (offset != size)
18485 {
18486 ret1 = new_loc_descr (DW_OP_piece, size - offset, 0);
18487 add_loc_descr (&ret, ret1);
18488 offset = size;
18489 }
18490
18491 have_address = !!want_address;
18492 }
18493 else
18494 expansion_failed (loc, NULL_RTX,
18495 "constructor of non-record type");
18496 }
18497 else
18498 /* We can construct small constants here using int_loc_descriptor. */
18499 expansion_failed (loc, NULL_RTX,
18500 "constructor or constant not in constant pool");
18501 break;
18502
18503 case TRUTH_AND_EXPR:
18504 case TRUTH_ANDIF_EXPR:
18505 case BIT_AND_EXPR:
18506 op = DW_OP_and;
18507 goto do_binop;
18508
18509 case TRUTH_XOR_EXPR:
18510 case BIT_XOR_EXPR:
18511 op = DW_OP_xor;
18512 goto do_binop;
18513
18514 case TRUTH_OR_EXPR:
18515 case TRUTH_ORIF_EXPR:
18516 case BIT_IOR_EXPR:
18517 op = DW_OP_or;
18518 goto do_binop;
18519
18520 case FLOOR_DIV_EXPR:
18521 case CEIL_DIV_EXPR:
18522 case ROUND_DIV_EXPR:
18523 case TRUNC_DIV_EXPR:
18524 case EXACT_DIV_EXPR:
18525 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18526 return 0;
18527 op = DW_OP_div;
18528 goto do_binop;
18529
18530 case MINUS_EXPR:
18531 op = DW_OP_minus;
18532 goto do_binop;
18533
18534 case FLOOR_MOD_EXPR:
18535 case CEIL_MOD_EXPR:
18536 case ROUND_MOD_EXPR:
18537 case TRUNC_MOD_EXPR:
18538 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18539 {
18540 op = DW_OP_mod;
18541 goto do_binop;
18542 }
18543 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18544 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18545 if (list_ret == 0 || list_ret1 == 0)
18546 return 0;
18547
18548 add_loc_list (&list_ret, list_ret1);
18549 if (list_ret == 0)
18550 return 0;
18551 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18552 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18553 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_div, 0, 0));
18554 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_mul, 0, 0));
18555 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_minus, 0, 0));
18556 break;
18557
18558 case MULT_EXPR:
18559 op = DW_OP_mul;
18560 goto do_binop;
18561
18562 case LSHIFT_EXPR:
18563 op = DW_OP_shl;
18564 goto do_binop;
18565
18566 case RSHIFT_EXPR:
18567 op = (TYPE_UNSIGNED (TREE_TYPE (loc)) ? DW_OP_shr : DW_OP_shra);
18568 goto do_binop;
18569
18570 case POINTER_PLUS_EXPR:
18571 case PLUS_EXPR:
18572 do_plus:
18573 if (tree_fits_shwi_p (TREE_OPERAND (loc, 1)))
18574 {
18575 /* Big unsigned numbers can fit in HOST_WIDE_INT but it may be
18576 smarter to encode their opposite. The DW_OP_plus_uconst operation
18577 takes 1 + X bytes, X being the size of the ULEB128 addend. On the
18578 other hand, a "<push literal>; DW_OP_minus" pattern takes 1 + Y
18579 bytes, Y being the size of the operation that pushes the opposite
18580 of the addend. So let's choose the smallest representation. */
18581 const tree tree_addend = TREE_OPERAND (loc, 1);
18582 offset_int wi_addend;
18583 HOST_WIDE_INT shwi_addend;
18584 dw_loc_descr_ref loc_naddend;
18585
18586 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18587 if (list_ret == 0)
18588 return 0;
18589
18590 /* Try to get the literal to push. It is the opposite of the addend,
18591 so as we rely on wrapping during DWARF evaluation, first decode
18592 the literal as a "DWARF-sized" signed number. */
18593 wi_addend = wi::to_offset (tree_addend);
18594 wi_addend = wi::sext (wi_addend, DWARF2_ADDR_SIZE * 8);
18595 shwi_addend = wi_addend.to_shwi ();
18596 loc_naddend = (shwi_addend != INTTYPE_MINIMUM (HOST_WIDE_INT))
18597 ? int_loc_descriptor (-shwi_addend)
18598 : NULL;
18599
18600 if (loc_naddend != NULL
18601 && ((unsigned) size_of_uleb128 (shwi_addend)
18602 > size_of_loc_descr (loc_naddend)))
18603 {
18604 add_loc_descr_to_each (list_ret, loc_naddend);
18605 add_loc_descr_to_each (list_ret,
18606 new_loc_descr (DW_OP_minus, 0, 0));
18607 }
18608 else
18609 {
18610 for (dw_loc_descr_ref loc_cur = loc_naddend; loc_cur != NULL; )
18611 {
18612 loc_naddend = loc_cur;
18613 loc_cur = loc_cur->dw_loc_next;
18614 ggc_free (loc_naddend);
18615 }
18616 loc_list_plus_const (list_ret, wi_addend.to_shwi ());
18617 }
18618 break;
18619 }
18620
18621 op = DW_OP_plus;
18622 goto do_binop;
18623
18624 case LE_EXPR:
18625 op = DW_OP_le;
18626 goto do_comp_binop;
18627
18628 case GE_EXPR:
18629 op = DW_OP_ge;
18630 goto do_comp_binop;
18631
18632 case LT_EXPR:
18633 op = DW_OP_lt;
18634 goto do_comp_binop;
18635
18636 case GT_EXPR:
18637 op = DW_OP_gt;
18638 goto do_comp_binop;
18639
18640 do_comp_binop:
18641 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (loc, 0))))
18642 {
18643 list_ret = loc_list_from_tree (TREE_OPERAND (loc, 0), 0, context);
18644 list_ret1 = loc_list_from_tree (TREE_OPERAND (loc, 1), 0, context);
18645 list_ret = loc_list_from_uint_comparison (list_ret, list_ret1,
18646 TREE_CODE (loc));
18647 break;
18648 }
18649 else
18650 goto do_binop;
18651
18652 case EQ_EXPR:
18653 op = DW_OP_eq;
18654 goto do_binop;
18655
18656 case NE_EXPR:
18657 op = DW_OP_ne;
18658 goto do_binop;
18659
18660 do_binop:
18661 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18662 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18663 if (list_ret == 0 || list_ret1 == 0)
18664 return 0;
18665
18666 add_loc_list (&list_ret, list_ret1);
18667 if (list_ret == 0)
18668 return 0;
18669 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18670 break;
18671
18672 case TRUTH_NOT_EXPR:
18673 case BIT_NOT_EXPR:
18674 op = DW_OP_not;
18675 goto do_unop;
18676
18677 case ABS_EXPR:
18678 op = DW_OP_abs;
18679 goto do_unop;
18680
18681 case NEGATE_EXPR:
18682 op = DW_OP_neg;
18683 goto do_unop;
18684
18685 do_unop:
18686 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18687 if (list_ret == 0)
18688 return 0;
18689
18690 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18691 break;
18692
18693 case MIN_EXPR:
18694 case MAX_EXPR:
18695 {
18696 const enum tree_code code =
18697 TREE_CODE (loc) == MIN_EXPR ? GT_EXPR : LT_EXPR;
18698
18699 loc = build3 (COND_EXPR, TREE_TYPE (loc),
18700 build2 (code, integer_type_node,
18701 TREE_OPERAND (loc, 0), TREE_OPERAND (loc, 1)),
18702 TREE_OPERAND (loc, 1), TREE_OPERAND (loc, 0));
18703 }
18704
18705 /* fall through */
18706
18707 case COND_EXPR:
18708 {
18709 dw_loc_descr_ref lhs
18710 = loc_descriptor_from_tree (TREE_OPERAND (loc, 1), 0, context);
18711 dw_loc_list_ref rhs
18712 = loc_list_from_tree_1 (TREE_OPERAND (loc, 2), 0, context);
18713 dw_loc_descr_ref bra_node, jump_node, tmp;
18714
18715 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18716 if (list_ret == 0 || lhs == 0 || rhs == 0)
18717 return 0;
18718
18719 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
18720 add_loc_descr_to_each (list_ret, bra_node);
18721
18722 add_loc_list (&list_ret, rhs);
18723 jump_node = new_loc_descr (DW_OP_skip, 0, 0);
18724 add_loc_descr_to_each (list_ret, jump_node);
18725
18726 add_loc_descr_to_each (list_ret, lhs);
18727 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18728 bra_node->dw_loc_oprnd1.v.val_loc = lhs;
18729
18730 /* ??? Need a node to point the skip at. Use a nop. */
18731 tmp = new_loc_descr (DW_OP_nop, 0, 0);
18732 add_loc_descr_to_each (list_ret, tmp);
18733 jump_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18734 jump_node->dw_loc_oprnd1.v.val_loc = tmp;
18735 }
18736 break;
18737
18738 case FIX_TRUNC_EXPR:
18739 return 0;
18740
18741 default:
18742 /* Leave front-end specific codes as simply unknown. This comes
18743 up, for instance, with the C STMT_EXPR. */
18744 if ((unsigned int) TREE_CODE (loc)
18745 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE)
18746 {
18747 expansion_failed (loc, NULL_RTX,
18748 "language specific tree node");
18749 return 0;
18750 }
18751
18752 /* Otherwise this is a generic code; we should just lists all of
18753 these explicitly. We forgot one. */
18754 if (flag_checking)
18755 gcc_unreachable ();
18756
18757 /* In a release build, we want to degrade gracefully: better to
18758 generate incomplete debugging information than to crash. */
18759 return NULL;
18760 }
18761
18762 if (!ret && !list_ret)
18763 return 0;
18764
18765 if (want_address == 2 && !have_address
18766 && (dwarf_version >= 4 || !dwarf_strict))
18767 {
18768 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
18769 {
18770 expansion_failed (loc, NULL_RTX,
18771 "DWARF address size mismatch");
18772 return 0;
18773 }
18774 if (ret)
18775 add_loc_descr (&ret, new_loc_descr (DW_OP_stack_value, 0, 0));
18776 else
18777 add_loc_descr_to_each (list_ret,
18778 new_loc_descr (DW_OP_stack_value, 0, 0));
18779 have_address = 1;
18780 }
18781 /* Show if we can't fill the request for an address. */
18782 if (want_address && !have_address)
18783 {
18784 expansion_failed (loc, NULL_RTX,
18785 "Want address and only have value");
18786 return 0;
18787 }
18788
18789 gcc_assert (!ret || !list_ret);
18790
18791 /* If we've got an address and don't want one, dereference. */
18792 if (!want_address && have_address)
18793 {
18794 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
18795
18796 if (size > DWARF2_ADDR_SIZE || size == -1)
18797 {
18798 expansion_failed (loc, NULL_RTX,
18799 "DWARF address size mismatch");
18800 return 0;
18801 }
18802 else if (size == DWARF2_ADDR_SIZE)
18803 op = DW_OP_deref;
18804 else
18805 op = DW_OP_deref_size;
18806
18807 if (ret)
18808 add_loc_descr (&ret, new_loc_descr (op, size, 0));
18809 else
18810 add_loc_descr_to_each (list_ret, new_loc_descr (op, size, 0));
18811 }
18812 if (ret)
18813 list_ret = new_loc_list (ret, NULL, 0, NULL, 0, NULL);
18814
18815 return list_ret;
18816 }
18817
18818 /* Likewise, but strip useless DW_OP_nop operations in the resulting
18819 expressions. */
18820
18821 static dw_loc_list_ref
18822 loc_list_from_tree (tree loc, int want_address,
18823 struct loc_descr_context *context)
18824 {
18825 dw_loc_list_ref result = loc_list_from_tree_1 (loc, want_address, context);
18826
18827 for (dw_loc_list_ref loc_cur = result;
18828 loc_cur != NULL; loc_cur = loc_cur->dw_loc_next)
18829 loc_descr_without_nops (loc_cur->expr);
18830 return result;
18831 }
18832
18833 /* Same as above but return only single location expression. */
18834 static dw_loc_descr_ref
18835 loc_descriptor_from_tree (tree loc, int want_address,
18836 struct loc_descr_context *context)
18837 {
18838 dw_loc_list_ref ret = loc_list_from_tree (loc, want_address, context);
18839 if (!ret)
18840 return NULL;
18841 if (ret->dw_loc_next)
18842 {
18843 expansion_failed (loc, NULL_RTX,
18844 "Location list where only loc descriptor needed");
18845 return NULL;
18846 }
18847 return ret->expr;
18848 }
18849
18850 /* Given a pointer to what is assumed to be a FIELD_DECL node, return a
18851 pointer to the declared type for the relevant field variable, or return
18852 `integer_type_node' if the given node turns out to be an
18853 ERROR_MARK node. */
18854
18855 static inline tree
18856 field_type (const_tree decl)
18857 {
18858 tree type;
18859
18860 if (TREE_CODE (decl) == ERROR_MARK)
18861 return integer_type_node;
18862
18863 type = DECL_BIT_FIELD_TYPE (decl);
18864 if (type == NULL_TREE)
18865 type = TREE_TYPE (decl);
18866
18867 return type;
18868 }
18869
18870 /* Given a pointer to a tree node, return the alignment in bits for
18871 it, or else return BITS_PER_WORD if the node actually turns out to
18872 be an ERROR_MARK node. */
18873
18874 static inline unsigned
18875 simple_type_align_in_bits (const_tree type)
18876 {
18877 return (TREE_CODE (type) != ERROR_MARK) ? TYPE_ALIGN (type) : BITS_PER_WORD;
18878 }
18879
18880 static inline unsigned
18881 simple_decl_align_in_bits (const_tree decl)
18882 {
18883 return (TREE_CODE (decl) != ERROR_MARK) ? DECL_ALIGN (decl) : BITS_PER_WORD;
18884 }
18885
18886 /* Return the result of rounding T up to ALIGN. */
18887
18888 static inline offset_int
18889 round_up_to_align (const offset_int &t, unsigned int align)
18890 {
18891 return wi::udiv_trunc (t + align - 1, align) * align;
18892 }
18893
18894 /* Compute the size of TYPE in bytes. If possible, return NULL and store the
18895 size as an integer constant in CST_SIZE. Otherwise, if possible, return a
18896 DWARF expression that computes the size. Return NULL and set CST_SIZE to -1
18897 if we fail to return the size in one of these two forms. */
18898
18899 static dw_loc_descr_ref
18900 type_byte_size (const_tree type, HOST_WIDE_INT *cst_size)
18901 {
18902 tree tree_size;
18903 struct loc_descr_context ctx;
18904
18905 /* Return a constant integer in priority, if possible. */
18906 *cst_size = int_size_in_bytes (type);
18907 if (*cst_size != -1)
18908 return NULL;
18909
18910 ctx.context_type = const_cast<tree> (type);
18911 ctx.base_decl = NULL_TREE;
18912 ctx.dpi = NULL;
18913 ctx.placeholder_arg = false;
18914 ctx.placeholder_seen = false;
18915
18916 type = TYPE_MAIN_VARIANT (type);
18917 tree_size = TYPE_SIZE_UNIT (type);
18918 return ((tree_size != NULL_TREE)
18919 ? loc_descriptor_from_tree (tree_size, 0, &ctx)
18920 : NULL);
18921 }
18922
18923 /* Helper structure for RECORD_TYPE processing. */
18924 struct vlr_context
18925 {
18926 /* Root RECORD_TYPE. It is needed to generate data member location
18927 descriptions in variable-length records (VLR), but also to cope with
18928 variants, which are composed of nested structures multiplexed with
18929 QUAL_UNION_TYPE nodes. Each time such a structure is passed to a
18930 function processing a FIELD_DECL, it is required to be non null. */
18931 tree struct_type;
18932 /* When generating a variant part in a RECORD_TYPE (i.e. a nested
18933 QUAL_UNION_TYPE), this holds an expression that computes the offset for
18934 this variant part as part of the root record (in storage units). For
18935 regular records, it must be NULL_TREE. */
18936 tree variant_part_offset;
18937 };
18938
18939 /* Given a pointer to a FIELD_DECL, compute the byte offset of the lowest
18940 addressed byte of the "containing object" for the given FIELD_DECL. If
18941 possible, return a native constant through CST_OFFSET (in which case NULL is
18942 returned); otherwise return a DWARF expression that computes the offset.
18943
18944 Set *CST_OFFSET to 0 and return NULL if we are unable to determine what
18945 that offset is, either because the argument turns out to be a pointer to an
18946 ERROR_MARK node, or because the offset expression is too complex for us.
18947
18948 CTX is required: see the comment for VLR_CONTEXT. */
18949
18950 static dw_loc_descr_ref
18951 field_byte_offset (const_tree decl, struct vlr_context *ctx,
18952 HOST_WIDE_INT *cst_offset)
18953 {
18954 tree tree_result;
18955 dw_loc_list_ref loc_result;
18956
18957 *cst_offset = 0;
18958
18959 if (TREE_CODE (decl) == ERROR_MARK)
18960 return NULL;
18961 else
18962 gcc_assert (TREE_CODE (decl) == FIELD_DECL);
18963
18964 /* We cannot handle variable bit offsets at the moment, so abort if it's the
18965 case. */
18966 if (TREE_CODE (DECL_FIELD_BIT_OFFSET (decl)) != INTEGER_CST)
18967 return NULL;
18968
18969 #ifdef PCC_BITFIELD_TYPE_MATTERS
18970 /* We used to handle only constant offsets in all cases. Now, we handle
18971 properly dynamic byte offsets only when PCC bitfield type doesn't
18972 matter. */
18973 if (PCC_BITFIELD_TYPE_MATTERS
18974 && TREE_CODE (DECL_FIELD_OFFSET (decl)) == INTEGER_CST)
18975 {
18976 offset_int object_offset_in_bits;
18977 offset_int object_offset_in_bytes;
18978 offset_int bitpos_int;
18979 tree type;
18980 tree field_size_tree;
18981 offset_int deepest_bitpos;
18982 offset_int field_size_in_bits;
18983 unsigned int type_align_in_bits;
18984 unsigned int decl_align_in_bits;
18985 offset_int type_size_in_bits;
18986
18987 bitpos_int = wi::to_offset (bit_position (decl));
18988 type = field_type (decl);
18989 type_size_in_bits = offset_int_type_size_in_bits (type);
18990 type_align_in_bits = simple_type_align_in_bits (type);
18991
18992 field_size_tree = DECL_SIZE (decl);
18993
18994 /* The size could be unspecified if there was an error, or for
18995 a flexible array member. */
18996 if (!field_size_tree)
18997 field_size_tree = bitsize_zero_node;
18998
18999 /* If the size of the field is not constant, use the type size. */
19000 if (TREE_CODE (field_size_tree) == INTEGER_CST)
19001 field_size_in_bits = wi::to_offset (field_size_tree);
19002 else
19003 field_size_in_bits = type_size_in_bits;
19004
19005 decl_align_in_bits = simple_decl_align_in_bits (decl);
19006
19007 /* The GCC front-end doesn't make any attempt to keep track of the
19008 starting bit offset (relative to the start of the containing
19009 structure type) of the hypothetical "containing object" for a
19010 bit-field. Thus, when computing the byte offset value for the
19011 start of the "containing object" of a bit-field, we must deduce
19012 this information on our own. This can be rather tricky to do in
19013 some cases. For example, handling the following structure type
19014 definition when compiling for an i386/i486 target (which only
19015 aligns long long's to 32-bit boundaries) can be very tricky:
19016
19017 struct S { int field1; long long field2:31; };
19018
19019 Fortunately, there is a simple rule-of-thumb which can be used
19020 in such cases. When compiling for an i386/i486, GCC will
19021 allocate 8 bytes for the structure shown above. It decides to
19022 do this based upon one simple rule for bit-field allocation.
19023 GCC allocates each "containing object" for each bit-field at
19024 the first (i.e. lowest addressed) legitimate alignment boundary
19025 (based upon the required minimum alignment for the declared
19026 type of the field) which it can possibly use, subject to the
19027 condition that there is still enough available space remaining
19028 in the containing object (when allocated at the selected point)
19029 to fully accommodate all of the bits of the bit-field itself.
19030
19031 This simple rule makes it obvious why GCC allocates 8 bytes for
19032 each object of the structure type shown above. When looking
19033 for a place to allocate the "containing object" for `field2',
19034 the compiler simply tries to allocate a 64-bit "containing
19035 object" at each successive 32-bit boundary (starting at zero)
19036 until it finds a place to allocate that 64- bit field such that
19037 at least 31 contiguous (and previously unallocated) bits remain
19038 within that selected 64 bit field. (As it turns out, for the
19039 example above, the compiler finds it is OK to allocate the
19040 "containing object" 64-bit field at bit-offset zero within the
19041 structure type.)
19042
19043 Here we attempt to work backwards from the limited set of facts
19044 we're given, and we try to deduce from those facts, where GCC
19045 must have believed that the containing object started (within
19046 the structure type). The value we deduce is then used (by the
19047 callers of this routine) to generate DW_AT_location and
19048 DW_AT_bit_offset attributes for fields (both bit-fields and, in
19049 the case of DW_AT_location, regular fields as well). */
19050
19051 /* Figure out the bit-distance from the start of the structure to
19052 the "deepest" bit of the bit-field. */
19053 deepest_bitpos = bitpos_int + field_size_in_bits;
19054
19055 /* This is the tricky part. Use some fancy footwork to deduce
19056 where the lowest addressed bit of the containing object must
19057 be. */
19058 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19059
19060 /* Round up to type_align by default. This works best for
19061 bitfields. */
19062 object_offset_in_bits
19063 = round_up_to_align (object_offset_in_bits, type_align_in_bits);
19064
19065 if (wi::gtu_p (object_offset_in_bits, bitpos_int))
19066 {
19067 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19068
19069 /* Round up to decl_align instead. */
19070 object_offset_in_bits
19071 = round_up_to_align (object_offset_in_bits, decl_align_in_bits);
19072 }
19073
19074 object_offset_in_bytes
19075 = wi::lrshift (object_offset_in_bits, LOG2_BITS_PER_UNIT);
19076 if (ctx->variant_part_offset == NULL_TREE)
19077 {
19078 *cst_offset = object_offset_in_bytes.to_shwi ();
19079 return NULL;
19080 }
19081 tree_result = wide_int_to_tree (sizetype, object_offset_in_bytes);
19082 }
19083 else
19084 #endif /* PCC_BITFIELD_TYPE_MATTERS */
19085 tree_result = byte_position (decl);
19086
19087 if (ctx->variant_part_offset != NULL_TREE)
19088 tree_result = fold_build2 (PLUS_EXPR, TREE_TYPE (tree_result),
19089 ctx->variant_part_offset, tree_result);
19090
19091 /* If the byte offset is a constant, it's simplier to handle a native
19092 constant rather than a DWARF expression. */
19093 if (TREE_CODE (tree_result) == INTEGER_CST)
19094 {
19095 *cst_offset = wi::to_offset (tree_result).to_shwi ();
19096 return NULL;
19097 }
19098 struct loc_descr_context loc_ctx = {
19099 ctx->struct_type, /* context_type */
19100 NULL_TREE, /* base_decl */
19101 NULL, /* dpi */
19102 false, /* placeholder_arg */
19103 false /* placeholder_seen */
19104 };
19105 loc_result = loc_list_from_tree (tree_result, 0, &loc_ctx);
19106
19107 /* We want a DWARF expression: abort if we only have a location list with
19108 multiple elements. */
19109 if (!loc_result || !single_element_loc_list_p (loc_result))
19110 return NULL;
19111 else
19112 return loc_result->expr;
19113 }
19114 \f
19115 /* The following routines define various Dwarf attributes and any data
19116 associated with them. */
19117
19118 /* Add a location description attribute value to a DIE.
19119
19120 This emits location attributes suitable for whole variables and
19121 whole parameters. Note that the location attributes for struct fields are
19122 generated by the routine `data_member_location_attribute' below. */
19123
19124 static inline void
19125 add_AT_location_description (dw_die_ref die, enum dwarf_attribute attr_kind,
19126 dw_loc_list_ref descr)
19127 {
19128 bool check_no_locviews = true;
19129 if (descr == 0)
19130 return;
19131 if (single_element_loc_list_p (descr))
19132 add_AT_loc (die, attr_kind, descr->expr);
19133 else
19134 {
19135 add_AT_loc_list (die, attr_kind, descr);
19136 gcc_assert (descr->ll_symbol);
19137 if (attr_kind == DW_AT_location && descr->vl_symbol
19138 && dwarf2out_locviews_in_attribute ())
19139 {
19140 add_AT_view_list (die, DW_AT_GNU_locviews);
19141 check_no_locviews = false;
19142 }
19143 }
19144
19145 if (check_no_locviews)
19146 gcc_assert (!get_AT (die, DW_AT_GNU_locviews));
19147 }
19148
19149 /* Add DW_AT_accessibility attribute to DIE if needed. */
19150
19151 static void
19152 add_accessibility_attribute (dw_die_ref die, tree decl)
19153 {
19154 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
19155 children, otherwise the default is DW_ACCESS_public. In DWARF2
19156 the default has always been DW_ACCESS_public. */
19157 if (TREE_PROTECTED (decl))
19158 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
19159 else if (TREE_PRIVATE (decl))
19160 {
19161 if (dwarf_version == 2
19162 || die->die_parent == NULL
19163 || die->die_parent->die_tag != DW_TAG_class_type)
19164 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
19165 }
19166 else if (dwarf_version > 2
19167 && die->die_parent
19168 && die->die_parent->die_tag == DW_TAG_class_type)
19169 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
19170 }
19171
19172 /* Attach the specialized form of location attribute used for data members of
19173 struct and union types. In the special case of a FIELD_DECL node which
19174 represents a bit-field, the "offset" part of this special location
19175 descriptor must indicate the distance in bytes from the lowest-addressed
19176 byte of the containing struct or union type to the lowest-addressed byte of
19177 the "containing object" for the bit-field. (See the `field_byte_offset'
19178 function above).
19179
19180 For any given bit-field, the "containing object" is a hypothetical object
19181 (of some integral or enum type) within which the given bit-field lives. The
19182 type of this hypothetical "containing object" is always the same as the
19183 declared type of the individual bit-field itself (for GCC anyway... the
19184 DWARF spec doesn't actually mandate this). Note that it is the size (in
19185 bytes) of the hypothetical "containing object" which will be given in the
19186 DW_AT_byte_size attribute for this bit-field. (See the
19187 `byte_size_attribute' function below.) It is also used when calculating the
19188 value of the DW_AT_bit_offset attribute. (See the `bit_offset_attribute'
19189 function below.)
19190
19191 CTX is required: see the comment for VLR_CONTEXT. */
19192
19193 static void
19194 add_data_member_location_attribute (dw_die_ref die,
19195 tree decl,
19196 struct vlr_context *ctx)
19197 {
19198 HOST_WIDE_INT offset;
19199 dw_loc_descr_ref loc_descr = 0;
19200
19201 if (TREE_CODE (decl) == TREE_BINFO)
19202 {
19203 /* We're working on the TAG_inheritance for a base class. */
19204 if (BINFO_VIRTUAL_P (decl) && is_cxx ())
19205 {
19206 /* For C++ virtual bases we can't just use BINFO_OFFSET, as they
19207 aren't at a fixed offset from all (sub)objects of the same
19208 type. We need to extract the appropriate offset from our
19209 vtable. The following dwarf expression means
19210
19211 BaseAddr = ObAddr + *((*ObAddr) - Offset)
19212
19213 This is specific to the V3 ABI, of course. */
19214
19215 dw_loc_descr_ref tmp;
19216
19217 /* Make a copy of the object address. */
19218 tmp = new_loc_descr (DW_OP_dup, 0, 0);
19219 add_loc_descr (&loc_descr, tmp);
19220
19221 /* Extract the vtable address. */
19222 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19223 add_loc_descr (&loc_descr, tmp);
19224
19225 /* Calculate the address of the offset. */
19226 offset = tree_to_shwi (BINFO_VPTR_FIELD (decl));
19227 gcc_assert (offset < 0);
19228
19229 tmp = int_loc_descriptor (-offset);
19230 add_loc_descr (&loc_descr, tmp);
19231 tmp = new_loc_descr (DW_OP_minus, 0, 0);
19232 add_loc_descr (&loc_descr, tmp);
19233
19234 /* Extract the offset. */
19235 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19236 add_loc_descr (&loc_descr, tmp);
19237
19238 /* Add it to the object address. */
19239 tmp = new_loc_descr (DW_OP_plus, 0, 0);
19240 add_loc_descr (&loc_descr, tmp);
19241 }
19242 else
19243 offset = tree_to_shwi (BINFO_OFFSET (decl));
19244 }
19245 else
19246 {
19247 loc_descr = field_byte_offset (decl, ctx, &offset);
19248
19249 /* If loc_descr is available then we know the field offset is dynamic.
19250 However, GDB does not handle dynamic field offsets very well at the
19251 moment. */
19252 if (loc_descr != NULL && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
19253 {
19254 loc_descr = NULL;
19255 offset = 0;
19256 }
19257
19258 /* Data member location evalutation starts with the base address on the
19259 stack. Compute the field offset and add it to this base address. */
19260 else if (loc_descr != NULL)
19261 add_loc_descr (&loc_descr, new_loc_descr (DW_OP_plus, 0, 0));
19262 }
19263
19264 if (! loc_descr)
19265 {
19266 /* While DW_AT_data_bit_offset has been added already in DWARF4,
19267 e.g. GDB only added support to it in November 2016. For DWARF5
19268 we need newer debug info consumers anyway. We might change this
19269 to dwarf_version >= 4 once most consumers catched up. */
19270 if (dwarf_version >= 5
19271 && TREE_CODE (decl) == FIELD_DECL
19272 && DECL_BIT_FIELD_TYPE (decl))
19273 {
19274 tree off = bit_position (decl);
19275 if (tree_fits_uhwi_p (off) && get_AT (die, DW_AT_bit_size))
19276 {
19277 remove_AT (die, DW_AT_byte_size);
19278 remove_AT (die, DW_AT_bit_offset);
19279 add_AT_unsigned (die, DW_AT_data_bit_offset, tree_to_uhwi (off));
19280 return;
19281 }
19282 }
19283 if (dwarf_version > 2)
19284 {
19285 /* Don't need to output a location expression, just the constant. */
19286 if (offset < 0)
19287 add_AT_int (die, DW_AT_data_member_location, offset);
19288 else
19289 add_AT_unsigned (die, DW_AT_data_member_location, offset);
19290 return;
19291 }
19292 else
19293 {
19294 enum dwarf_location_atom op;
19295
19296 /* The DWARF2 standard says that we should assume that the structure
19297 address is already on the stack, so we can specify a structure
19298 field address by using DW_OP_plus_uconst. */
19299 op = DW_OP_plus_uconst;
19300 loc_descr = new_loc_descr (op, offset, 0);
19301 }
19302 }
19303
19304 add_AT_loc (die, DW_AT_data_member_location, loc_descr);
19305 }
19306
19307 /* Writes integer values to dw_vec_const array. */
19308
19309 static void
19310 insert_int (HOST_WIDE_INT val, unsigned int size, unsigned char *dest)
19311 {
19312 while (size != 0)
19313 {
19314 *dest++ = val & 0xff;
19315 val >>= 8;
19316 --size;
19317 }
19318 }
19319
19320 /* Reads integers from dw_vec_const array. Inverse of insert_int. */
19321
19322 static HOST_WIDE_INT
19323 extract_int (const unsigned char *src, unsigned int size)
19324 {
19325 HOST_WIDE_INT val = 0;
19326
19327 src += size;
19328 while (size != 0)
19329 {
19330 val <<= 8;
19331 val |= *--src & 0xff;
19332 --size;
19333 }
19334 return val;
19335 }
19336
19337 /* Writes wide_int values to dw_vec_const array. */
19338
19339 static void
19340 insert_wide_int (const wide_int &val, unsigned char *dest, int elt_size)
19341 {
19342 int i;
19343
19344 if (elt_size <= HOST_BITS_PER_WIDE_INT/BITS_PER_UNIT)
19345 {
19346 insert_int ((HOST_WIDE_INT) val.elt (0), elt_size, dest);
19347 return;
19348 }
19349
19350 /* We'd have to extend this code to support odd sizes. */
19351 gcc_assert (elt_size % (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT) == 0);
19352
19353 int n = elt_size / (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
19354
19355 if (WORDS_BIG_ENDIAN)
19356 for (i = n - 1; i >= 0; i--)
19357 {
19358 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19359 dest += sizeof (HOST_WIDE_INT);
19360 }
19361 else
19362 for (i = 0; i < n; i++)
19363 {
19364 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19365 dest += sizeof (HOST_WIDE_INT);
19366 }
19367 }
19368
19369 /* Writes floating point values to dw_vec_const array. */
19370
19371 static void
19372 insert_float (const_rtx rtl, unsigned char *array)
19373 {
19374 long val[4];
19375 int i;
19376 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19377
19378 real_to_target (val, CONST_DOUBLE_REAL_VALUE (rtl), mode);
19379
19380 /* real_to_target puts 32-bit pieces in each long. Pack them. */
19381 for (i = 0; i < GET_MODE_SIZE (mode) / 4; i++)
19382 {
19383 insert_int (val[i], 4, array);
19384 array += 4;
19385 }
19386 }
19387
19388 /* Attach a DW_AT_const_value attribute for a variable or a parameter which
19389 does not have a "location" either in memory or in a register. These
19390 things can arise in GNU C when a constant is passed as an actual parameter
19391 to an inlined function. They can also arise in C++ where declared
19392 constants do not necessarily get memory "homes". */
19393
19394 static bool
19395 add_const_value_attribute (dw_die_ref die, rtx rtl)
19396 {
19397 switch (GET_CODE (rtl))
19398 {
19399 case CONST_INT:
19400 {
19401 HOST_WIDE_INT val = INTVAL (rtl);
19402
19403 if (val < 0)
19404 add_AT_int (die, DW_AT_const_value, val);
19405 else
19406 add_AT_unsigned (die, DW_AT_const_value, (unsigned HOST_WIDE_INT) val);
19407 }
19408 return true;
19409
19410 case CONST_WIDE_INT:
19411 {
19412 wide_int w1 = rtx_mode_t (rtl, MAX_MODE_INT);
19413 unsigned int prec = MIN (wi::min_precision (w1, UNSIGNED),
19414 (unsigned int)CONST_WIDE_INT_NUNITS (rtl) * HOST_BITS_PER_WIDE_INT);
19415 wide_int w = wi::zext (w1, prec);
19416 add_AT_wide (die, DW_AT_const_value, w);
19417 }
19418 return true;
19419
19420 case CONST_DOUBLE:
19421 /* Note that a CONST_DOUBLE rtx could represent either an integer or a
19422 floating-point constant. A CONST_DOUBLE is used whenever the
19423 constant requires more than one word in order to be adequately
19424 represented. */
19425 if (TARGET_SUPPORTS_WIDE_INT == 0
19426 && !SCALAR_FLOAT_MODE_P (GET_MODE (rtl)))
19427 add_AT_double (die, DW_AT_const_value,
19428 CONST_DOUBLE_HIGH (rtl), CONST_DOUBLE_LOW (rtl));
19429 else
19430 {
19431 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19432 unsigned int length = GET_MODE_SIZE (mode);
19433 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
19434
19435 insert_float (rtl, array);
19436 add_AT_vec (die, DW_AT_const_value, length / 4, 4, array);
19437 }
19438 return true;
19439
19440 case CONST_VECTOR:
19441 {
19442 unsigned int length;
19443 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
19444 return false;
19445
19446 machine_mode mode = GET_MODE (rtl);
19447 unsigned int elt_size = GET_MODE_UNIT_SIZE (mode);
19448 unsigned char *array
19449 = ggc_vec_alloc<unsigned char> (length * elt_size);
19450 unsigned int i;
19451 unsigned char *p;
19452 machine_mode imode = GET_MODE_INNER (mode);
19453
19454 switch (GET_MODE_CLASS (mode))
19455 {
19456 case MODE_VECTOR_INT:
19457 for (i = 0, p = array; i < length; i++, p += elt_size)
19458 {
19459 rtx elt = CONST_VECTOR_ELT (rtl, i);
19460 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
19461 }
19462 break;
19463
19464 case MODE_VECTOR_FLOAT:
19465 for (i = 0, p = array; i < length; i++, p += elt_size)
19466 {
19467 rtx elt = CONST_VECTOR_ELT (rtl, i);
19468 insert_float (elt, p);
19469 }
19470 break;
19471
19472 default:
19473 gcc_unreachable ();
19474 }
19475
19476 add_AT_vec (die, DW_AT_const_value, length, elt_size, array);
19477 }
19478 return true;
19479
19480 case CONST_STRING:
19481 if (dwarf_version >= 4 || !dwarf_strict)
19482 {
19483 dw_loc_descr_ref loc_result;
19484 resolve_one_addr (&rtl);
19485 rtl_addr:
19486 loc_result = new_addr_loc_descr (rtl, dtprel_false);
19487 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
19488 add_AT_loc (die, DW_AT_location, loc_result);
19489 vec_safe_push (used_rtx_array, rtl);
19490 return true;
19491 }
19492 return false;
19493
19494 case CONST:
19495 if (CONSTANT_P (XEXP (rtl, 0)))
19496 return add_const_value_attribute (die, XEXP (rtl, 0));
19497 /* FALLTHROUGH */
19498 case SYMBOL_REF:
19499 if (!const_ok_for_output (rtl))
19500 return false;
19501 /* FALLTHROUGH */
19502 case LABEL_REF:
19503 if (dwarf_version >= 4 || !dwarf_strict)
19504 goto rtl_addr;
19505 return false;
19506
19507 case PLUS:
19508 /* In cases where an inlined instance of an inline function is passed
19509 the address of an `auto' variable (which is local to the caller) we
19510 can get a situation where the DECL_RTL of the artificial local
19511 variable (for the inlining) which acts as a stand-in for the
19512 corresponding formal parameter (of the inline function) will look
19513 like (plus:SI (reg:SI FRAME_PTR) (const_int ...)). This is not
19514 exactly a compile-time constant expression, but it isn't the address
19515 of the (artificial) local variable either. Rather, it represents the
19516 *value* which the artificial local variable always has during its
19517 lifetime. We currently have no way to represent such quasi-constant
19518 values in Dwarf, so for now we just punt and generate nothing. */
19519 return false;
19520
19521 case HIGH:
19522 case CONST_FIXED:
19523 return false;
19524
19525 case MEM:
19526 if (GET_CODE (XEXP (rtl, 0)) == CONST_STRING
19527 && MEM_READONLY_P (rtl)
19528 && GET_MODE (rtl) == BLKmode)
19529 {
19530 add_AT_string (die, DW_AT_const_value, XSTR (XEXP (rtl, 0), 0));
19531 return true;
19532 }
19533 return false;
19534
19535 default:
19536 /* No other kinds of rtx should be possible here. */
19537 gcc_unreachable ();
19538 }
19539 return false;
19540 }
19541
19542 /* Determine whether the evaluation of EXPR references any variables
19543 or functions which aren't otherwise used (and therefore may not be
19544 output). */
19545 static tree
19546 reference_to_unused (tree * tp, int * walk_subtrees,
19547 void * data ATTRIBUTE_UNUSED)
19548 {
19549 if (! EXPR_P (*tp) && ! CONSTANT_CLASS_P (*tp))
19550 *walk_subtrees = 0;
19551
19552 if (DECL_P (*tp) && ! TREE_PUBLIC (*tp) && ! TREE_USED (*tp)
19553 && ! TREE_ASM_WRITTEN (*tp))
19554 return *tp;
19555 /* ??? The C++ FE emits debug information for using decls, so
19556 putting gcc_unreachable here falls over. See PR31899. For now
19557 be conservative. */
19558 else if (!symtab->global_info_ready && VAR_OR_FUNCTION_DECL_P (*tp))
19559 return *tp;
19560 else if (VAR_P (*tp))
19561 {
19562 varpool_node *node = varpool_node::get (*tp);
19563 if (!node || !node->definition)
19564 return *tp;
19565 }
19566 else if (TREE_CODE (*tp) == FUNCTION_DECL
19567 && (!DECL_EXTERNAL (*tp) || DECL_DECLARED_INLINE_P (*tp)))
19568 {
19569 /* The call graph machinery must have finished analyzing,
19570 optimizing and gimplifying the CU by now.
19571 So if *TP has no call graph node associated
19572 to it, it means *TP will not be emitted. */
19573 if (!cgraph_node::get (*tp))
19574 return *tp;
19575 }
19576 else if (TREE_CODE (*tp) == STRING_CST && !TREE_ASM_WRITTEN (*tp))
19577 return *tp;
19578
19579 return NULL_TREE;
19580 }
19581
19582 /* Generate an RTL constant from a decl initializer INIT with decl type TYPE,
19583 for use in a later add_const_value_attribute call. */
19584
19585 static rtx
19586 rtl_for_decl_init (tree init, tree type)
19587 {
19588 rtx rtl = NULL_RTX;
19589
19590 STRIP_NOPS (init);
19591
19592 /* If a variable is initialized with a string constant without embedded
19593 zeros, build CONST_STRING. */
19594 if (TREE_CODE (init) == STRING_CST && TREE_CODE (type) == ARRAY_TYPE)
19595 {
19596 tree enttype = TREE_TYPE (type);
19597 tree domain = TYPE_DOMAIN (type);
19598 scalar_int_mode mode;
19599
19600 if (is_int_mode (TYPE_MODE (enttype), &mode)
19601 && GET_MODE_SIZE (mode) == 1
19602 && domain
19603 && TYPE_MAX_VALUE (domain)
19604 && TREE_CODE (TYPE_MAX_VALUE (domain)) == INTEGER_CST
19605 && integer_zerop (TYPE_MIN_VALUE (domain))
19606 && compare_tree_int (TYPE_MAX_VALUE (domain),
19607 TREE_STRING_LENGTH (init) - 1) == 0
19608 && ((size_t) TREE_STRING_LENGTH (init)
19609 == strlen (TREE_STRING_POINTER (init)) + 1))
19610 {
19611 rtl = gen_rtx_CONST_STRING (VOIDmode,
19612 ggc_strdup (TREE_STRING_POINTER (init)));
19613 rtl = gen_rtx_MEM (BLKmode, rtl);
19614 MEM_READONLY_P (rtl) = 1;
19615 }
19616 }
19617 /* Other aggregates, and complex values, could be represented using
19618 CONCAT: FIXME! */
19619 else if (AGGREGATE_TYPE_P (type)
19620 || (TREE_CODE (init) == VIEW_CONVERT_EXPR
19621 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (init, 0))))
19622 || TREE_CODE (type) == COMPLEX_TYPE)
19623 ;
19624 /* Vectors only work if their mode is supported by the target.
19625 FIXME: generic vectors ought to work too. */
19626 else if (TREE_CODE (type) == VECTOR_TYPE
19627 && !VECTOR_MODE_P (TYPE_MODE (type)))
19628 ;
19629 /* If the initializer is something that we know will expand into an
19630 immediate RTL constant, expand it now. We must be careful not to
19631 reference variables which won't be output. */
19632 else if (initializer_constant_valid_p (init, type)
19633 && ! walk_tree (&init, reference_to_unused, NULL, NULL))
19634 {
19635 /* Convert vector CONSTRUCTOR initializers to VECTOR_CST if
19636 possible. */
19637 if (TREE_CODE (type) == VECTOR_TYPE)
19638 switch (TREE_CODE (init))
19639 {
19640 case VECTOR_CST:
19641 break;
19642 case CONSTRUCTOR:
19643 if (TREE_CONSTANT (init))
19644 {
19645 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (init);
19646 bool constant_p = true;
19647 tree value;
19648 unsigned HOST_WIDE_INT ix;
19649
19650 /* Even when ctor is constant, it might contain non-*_CST
19651 elements (e.g. { 1.0/0.0 - 1.0/0.0, 0.0 }) and those don't
19652 belong into VECTOR_CST nodes. */
19653 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
19654 if (!CONSTANT_CLASS_P (value))
19655 {
19656 constant_p = false;
19657 break;
19658 }
19659
19660 if (constant_p)
19661 {
19662 init = build_vector_from_ctor (type, elts);
19663 break;
19664 }
19665 }
19666 /* FALLTHRU */
19667
19668 default:
19669 return NULL;
19670 }
19671
19672 rtl = expand_expr (init, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
19673
19674 /* If expand_expr returns a MEM, it wasn't immediate. */
19675 gcc_assert (!rtl || !MEM_P (rtl));
19676 }
19677
19678 return rtl;
19679 }
19680
19681 /* Generate RTL for the variable DECL to represent its location. */
19682
19683 static rtx
19684 rtl_for_decl_location (tree decl)
19685 {
19686 rtx rtl;
19687
19688 /* Here we have to decide where we are going to say the parameter "lives"
19689 (as far as the debugger is concerned). We only have a couple of
19690 choices. GCC provides us with DECL_RTL and with DECL_INCOMING_RTL.
19691
19692 DECL_RTL normally indicates where the parameter lives during most of the
19693 activation of the function. If optimization is enabled however, this
19694 could be either NULL or else a pseudo-reg. Both of those cases indicate
19695 that the parameter doesn't really live anywhere (as far as the code
19696 generation parts of GCC are concerned) during most of the function's
19697 activation. That will happen (for example) if the parameter is never
19698 referenced within the function.
19699
19700 We could just generate a location descriptor here for all non-NULL
19701 non-pseudo values of DECL_RTL and ignore all of the rest, but we can be
19702 a little nicer than that if we also consider DECL_INCOMING_RTL in cases
19703 where DECL_RTL is NULL or is a pseudo-reg.
19704
19705 Note however that we can only get away with using DECL_INCOMING_RTL as
19706 a backup substitute for DECL_RTL in certain limited cases. In cases
19707 where DECL_ARG_TYPE (decl) indicates the same type as TREE_TYPE (decl),
19708 we can be sure that the parameter was passed using the same type as it is
19709 declared to have within the function, and that its DECL_INCOMING_RTL
19710 points us to a place where a value of that type is passed.
19711
19712 In cases where DECL_ARG_TYPE (decl) and TREE_TYPE (decl) are different,
19713 we cannot (in general) use DECL_INCOMING_RTL as a substitute for DECL_RTL
19714 because in these cases DECL_INCOMING_RTL points us to a value of some
19715 type which is *different* from the type of the parameter itself. Thus,
19716 if we tried to use DECL_INCOMING_RTL to generate a location attribute in
19717 such cases, the debugger would end up (for example) trying to fetch a
19718 `float' from a place which actually contains the first part of a
19719 `double'. That would lead to really incorrect and confusing
19720 output at debug-time.
19721
19722 So, in general, we *do not* use DECL_INCOMING_RTL as a backup for DECL_RTL
19723 in cases where DECL_ARG_TYPE (decl) != TREE_TYPE (decl). There
19724 are a couple of exceptions however. On little-endian machines we can
19725 get away with using DECL_INCOMING_RTL even when DECL_ARG_TYPE (decl) is
19726 not the same as TREE_TYPE (decl), but only when DECL_ARG_TYPE (decl) is
19727 an integral type that is smaller than TREE_TYPE (decl). These cases arise
19728 when (on a little-endian machine) a non-prototyped function has a
19729 parameter declared to be of type `short' or `char'. In such cases,
19730 TREE_TYPE (decl) will be `short' or `char', DECL_ARG_TYPE (decl) will
19731 be `int', and DECL_INCOMING_RTL will point to the lowest-order byte of the
19732 passed `int' value. If the debugger then uses that address to fetch
19733 a `short' or a `char' (on a little-endian machine) the result will be
19734 the correct data, so we allow for such exceptional cases below.
19735
19736 Note that our goal here is to describe the place where the given formal
19737 parameter lives during most of the function's activation (i.e. between the
19738 end of the prologue and the start of the epilogue). We'll do that as best
19739 as we can. Note however that if the given formal parameter is modified
19740 sometime during the execution of the function, then a stack backtrace (at
19741 debug-time) will show the function as having been called with the *new*
19742 value rather than the value which was originally passed in. This happens
19743 rarely enough that it is not a major problem, but it *is* a problem, and
19744 I'd like to fix it.
19745
19746 A future version of dwarf2out.c may generate two additional attributes for
19747 any given DW_TAG_formal_parameter DIE which will describe the "passed
19748 type" and the "passed location" for the given formal parameter in addition
19749 to the attributes we now generate to indicate the "declared type" and the
19750 "active location" for each parameter. This additional set of attributes
19751 could be used by debuggers for stack backtraces. Separately, note that
19752 sometimes DECL_RTL can be NULL and DECL_INCOMING_RTL can be NULL also.
19753 This happens (for example) for inlined-instances of inline function formal
19754 parameters which are never referenced. This really shouldn't be
19755 happening. All PARM_DECL nodes should get valid non-NULL
19756 DECL_INCOMING_RTL values. FIXME. */
19757
19758 /* Use DECL_RTL as the "location" unless we find something better. */
19759 rtl = DECL_RTL_IF_SET (decl);
19760
19761 /* When generating abstract instances, ignore everything except
19762 constants, symbols living in memory, and symbols living in
19763 fixed registers. */
19764 if (! reload_completed)
19765 {
19766 if (rtl
19767 && (CONSTANT_P (rtl)
19768 || (MEM_P (rtl)
19769 && CONSTANT_P (XEXP (rtl, 0)))
19770 || (REG_P (rtl)
19771 && VAR_P (decl)
19772 && TREE_STATIC (decl))))
19773 {
19774 rtl = targetm.delegitimize_address (rtl);
19775 return rtl;
19776 }
19777 rtl = NULL_RTX;
19778 }
19779 else if (TREE_CODE (decl) == PARM_DECL)
19780 {
19781 if (rtl == NULL_RTX
19782 || is_pseudo_reg (rtl)
19783 || (MEM_P (rtl)
19784 && is_pseudo_reg (XEXP (rtl, 0))
19785 && DECL_INCOMING_RTL (decl)
19786 && MEM_P (DECL_INCOMING_RTL (decl))
19787 && GET_MODE (rtl) == GET_MODE (DECL_INCOMING_RTL (decl))))
19788 {
19789 tree declared_type = TREE_TYPE (decl);
19790 tree passed_type = DECL_ARG_TYPE (decl);
19791 machine_mode dmode = TYPE_MODE (declared_type);
19792 machine_mode pmode = TYPE_MODE (passed_type);
19793
19794 /* This decl represents a formal parameter which was optimized out.
19795 Note that DECL_INCOMING_RTL may be NULL in here, but we handle
19796 all cases where (rtl == NULL_RTX) just below. */
19797 if (dmode == pmode)
19798 rtl = DECL_INCOMING_RTL (decl);
19799 else if ((rtl == NULL_RTX || is_pseudo_reg (rtl))
19800 && SCALAR_INT_MODE_P (dmode)
19801 && known_le (GET_MODE_SIZE (dmode), GET_MODE_SIZE (pmode))
19802 && DECL_INCOMING_RTL (decl))
19803 {
19804 rtx inc = DECL_INCOMING_RTL (decl);
19805 if (REG_P (inc))
19806 rtl = inc;
19807 else if (MEM_P (inc))
19808 {
19809 if (BYTES_BIG_ENDIAN)
19810 rtl = adjust_address_nv (inc, dmode,
19811 GET_MODE_SIZE (pmode)
19812 - GET_MODE_SIZE (dmode));
19813 else
19814 rtl = inc;
19815 }
19816 }
19817 }
19818
19819 /* If the parm was passed in registers, but lives on the stack, then
19820 make a big endian correction if the mode of the type of the
19821 parameter is not the same as the mode of the rtl. */
19822 /* ??? This is the same series of checks that are made in dbxout.c before
19823 we reach the big endian correction code there. It isn't clear if all
19824 of these checks are necessary here, but keeping them all is the safe
19825 thing to do. */
19826 else if (MEM_P (rtl)
19827 && XEXP (rtl, 0) != const0_rtx
19828 && ! CONSTANT_P (XEXP (rtl, 0))
19829 /* Not passed in memory. */
19830 && !MEM_P (DECL_INCOMING_RTL (decl))
19831 /* Not passed by invisible reference. */
19832 && (!REG_P (XEXP (rtl, 0))
19833 || REGNO (XEXP (rtl, 0)) == HARD_FRAME_POINTER_REGNUM
19834 || REGNO (XEXP (rtl, 0)) == STACK_POINTER_REGNUM
19835 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
19836 || REGNO (XEXP (rtl, 0)) == ARG_POINTER_REGNUM
19837 #endif
19838 )
19839 /* Big endian correction check. */
19840 && BYTES_BIG_ENDIAN
19841 && TYPE_MODE (TREE_TYPE (decl)) != GET_MODE (rtl)
19842 && known_lt (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))),
19843 UNITS_PER_WORD))
19844 {
19845 machine_mode addr_mode = get_address_mode (rtl);
19846 poly_int64 offset = (UNITS_PER_WORD
19847 - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))));
19848
19849 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
19850 plus_constant (addr_mode, XEXP (rtl, 0), offset));
19851 }
19852 }
19853 else if (VAR_P (decl)
19854 && rtl
19855 && MEM_P (rtl)
19856 && GET_MODE (rtl) != TYPE_MODE (TREE_TYPE (decl)))
19857 {
19858 machine_mode addr_mode = get_address_mode (rtl);
19859 poly_int64 offset = byte_lowpart_offset (TYPE_MODE (TREE_TYPE (decl)),
19860 GET_MODE (rtl));
19861
19862 /* If a variable is declared "register" yet is smaller than
19863 a register, then if we store the variable to memory, it
19864 looks like we're storing a register-sized value, when in
19865 fact we are not. We need to adjust the offset of the
19866 storage location to reflect the actual value's bytes,
19867 else gdb will not be able to display it. */
19868 if (maybe_ne (offset, 0))
19869 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
19870 plus_constant (addr_mode, XEXP (rtl, 0), offset));
19871 }
19872
19873 /* A variable with no DECL_RTL but a DECL_INITIAL is a compile-time constant,
19874 and will have been substituted directly into all expressions that use it.
19875 C does not have such a concept, but C++ and other languages do. */
19876 if (!rtl && VAR_P (decl) && DECL_INITIAL (decl))
19877 rtl = rtl_for_decl_init (DECL_INITIAL (decl), TREE_TYPE (decl));
19878
19879 if (rtl)
19880 rtl = targetm.delegitimize_address (rtl);
19881
19882 /* If we don't look past the constant pool, we risk emitting a
19883 reference to a constant pool entry that isn't referenced from
19884 code, and thus is not emitted. */
19885 if (rtl)
19886 rtl = avoid_constant_pool_reference (rtl);
19887
19888 /* Try harder to get a rtl. If this symbol ends up not being emitted
19889 in the current CU, resolve_addr will remove the expression referencing
19890 it. */
19891 if (rtl == NULL_RTX
19892 && !(early_dwarf && (flag_generate_lto || flag_generate_offload))
19893 && VAR_P (decl)
19894 && !DECL_EXTERNAL (decl)
19895 && TREE_STATIC (decl)
19896 && DECL_NAME (decl)
19897 && !DECL_HARD_REGISTER (decl)
19898 && DECL_MODE (decl) != VOIDmode)
19899 {
19900 rtl = make_decl_rtl_for_debug (decl);
19901 if (!MEM_P (rtl)
19902 || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF
19903 || SYMBOL_REF_DECL (XEXP (rtl, 0)) != decl)
19904 rtl = NULL_RTX;
19905 }
19906
19907 return rtl;
19908 }
19909
19910 /* Check whether decl is a Fortran COMMON symbol. If not, NULL_TREE is
19911 returned. If so, the decl for the COMMON block is returned, and the
19912 value is the offset into the common block for the symbol. */
19913
19914 static tree
19915 fortran_common (tree decl, HOST_WIDE_INT *value)
19916 {
19917 tree val_expr, cvar;
19918 machine_mode mode;
19919 poly_int64 bitsize, bitpos;
19920 tree offset;
19921 HOST_WIDE_INT cbitpos;
19922 int unsignedp, reversep, volatilep = 0;
19923
19924 /* If the decl isn't a VAR_DECL, or if it isn't static, or if
19925 it does not have a value (the offset into the common area), or if it
19926 is thread local (as opposed to global) then it isn't common, and shouldn't
19927 be handled as such. */
19928 if (!VAR_P (decl)
19929 || !TREE_STATIC (decl)
19930 || !DECL_HAS_VALUE_EXPR_P (decl)
19931 || !is_fortran ())
19932 return NULL_TREE;
19933
19934 val_expr = DECL_VALUE_EXPR (decl);
19935 if (TREE_CODE (val_expr) != COMPONENT_REF)
19936 return NULL_TREE;
19937
19938 cvar = get_inner_reference (val_expr, &bitsize, &bitpos, &offset, &mode,
19939 &unsignedp, &reversep, &volatilep);
19940
19941 if (cvar == NULL_TREE
19942 || !VAR_P (cvar)
19943 || DECL_ARTIFICIAL (cvar)
19944 || !TREE_PUBLIC (cvar)
19945 /* We don't expect to have to cope with variable offsets,
19946 since at present all static data must have a constant size. */
19947 || !bitpos.is_constant (&cbitpos))
19948 return NULL_TREE;
19949
19950 *value = 0;
19951 if (offset != NULL)
19952 {
19953 if (!tree_fits_shwi_p (offset))
19954 return NULL_TREE;
19955 *value = tree_to_shwi (offset);
19956 }
19957 if (cbitpos != 0)
19958 *value += cbitpos / BITS_PER_UNIT;
19959
19960 return cvar;
19961 }
19962
19963 /* Generate *either* a DW_AT_location attribute or else a DW_AT_const_value
19964 data attribute for a variable or a parameter. We generate the
19965 DW_AT_const_value attribute only in those cases where the given variable
19966 or parameter does not have a true "location" either in memory or in a
19967 register. This can happen (for example) when a constant is passed as an
19968 actual argument in a call to an inline function. (It's possible that
19969 these things can crop up in other ways also.) Note that one type of
19970 constant value which can be passed into an inlined function is a constant
19971 pointer. This can happen for example if an actual argument in an inlined
19972 function call evaluates to a compile-time constant address.
19973
19974 CACHE_P is true if it is worth caching the location list for DECL,
19975 so that future calls can reuse it rather than regenerate it from scratch.
19976 This is true for BLOCK_NONLOCALIZED_VARS in inlined subroutines,
19977 since we will need to refer to them each time the function is inlined. */
19978
19979 static bool
19980 add_location_or_const_value_attribute (dw_die_ref die, tree decl, bool cache_p)
19981 {
19982 rtx rtl;
19983 dw_loc_list_ref list;
19984 var_loc_list *loc_list;
19985 cached_dw_loc_list *cache;
19986
19987 if (early_dwarf)
19988 return false;
19989
19990 if (TREE_CODE (decl) == ERROR_MARK)
19991 return false;
19992
19993 if (get_AT (die, DW_AT_location)
19994 || get_AT (die, DW_AT_const_value))
19995 return true;
19996
19997 gcc_assert (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL
19998 || TREE_CODE (decl) == RESULT_DECL);
19999
20000 /* Try to get some constant RTL for this decl, and use that as the value of
20001 the location. */
20002
20003 rtl = rtl_for_decl_location (decl);
20004 if (rtl && (CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20005 && add_const_value_attribute (die, rtl))
20006 return true;
20007
20008 /* See if we have single element location list that is equivalent to
20009 a constant value. That way we are better to use add_const_value_attribute
20010 rather than expanding constant value equivalent. */
20011 loc_list = lookup_decl_loc (decl);
20012 if (loc_list
20013 && loc_list->first
20014 && loc_list->first->next == NULL
20015 && NOTE_P (loc_list->first->loc)
20016 && NOTE_VAR_LOCATION (loc_list->first->loc)
20017 && NOTE_VAR_LOCATION_LOC (loc_list->first->loc))
20018 {
20019 struct var_loc_node *node;
20020
20021 node = loc_list->first;
20022 rtl = NOTE_VAR_LOCATION_LOC (node->loc);
20023 if (GET_CODE (rtl) == EXPR_LIST)
20024 rtl = XEXP (rtl, 0);
20025 if ((CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20026 && add_const_value_attribute (die, rtl))
20027 return true;
20028 }
20029 /* If this decl is from BLOCK_NONLOCALIZED_VARS, we might need its
20030 list several times. See if we've already cached the contents. */
20031 list = NULL;
20032 if (loc_list == NULL || cached_dw_loc_list_table == NULL)
20033 cache_p = false;
20034 if (cache_p)
20035 {
20036 cache = cached_dw_loc_list_table->find_with_hash (decl, DECL_UID (decl));
20037 if (cache)
20038 list = cache->loc_list;
20039 }
20040 if (list == NULL)
20041 {
20042 list = loc_list_from_tree (decl, decl_by_reference_p (decl) ? 0 : 2,
20043 NULL);
20044 /* It is usually worth caching this result if the decl is from
20045 BLOCK_NONLOCALIZED_VARS and if the list has at least two elements. */
20046 if (cache_p && list && list->dw_loc_next)
20047 {
20048 cached_dw_loc_list **slot
20049 = cached_dw_loc_list_table->find_slot_with_hash (decl,
20050 DECL_UID (decl),
20051 INSERT);
20052 cache = ggc_cleared_alloc<cached_dw_loc_list> ();
20053 cache->decl_id = DECL_UID (decl);
20054 cache->loc_list = list;
20055 *slot = cache;
20056 }
20057 }
20058 if (list)
20059 {
20060 add_AT_location_description (die, DW_AT_location, list);
20061 return true;
20062 }
20063 /* None of that worked, so it must not really have a location;
20064 try adding a constant value attribute from the DECL_INITIAL. */
20065 return tree_add_const_value_attribute_for_decl (die, decl);
20066 }
20067
20068 /* Helper function for tree_add_const_value_attribute. Natively encode
20069 initializer INIT into an array. Return true if successful. */
20070
20071 static bool
20072 native_encode_initializer (tree init, unsigned char *array, int size)
20073 {
20074 tree type;
20075
20076 if (init == NULL_TREE)
20077 return false;
20078
20079 STRIP_NOPS (init);
20080 switch (TREE_CODE (init))
20081 {
20082 case STRING_CST:
20083 type = TREE_TYPE (init);
20084 if (TREE_CODE (type) == ARRAY_TYPE)
20085 {
20086 tree enttype = TREE_TYPE (type);
20087 scalar_int_mode mode;
20088
20089 if (!is_int_mode (TYPE_MODE (enttype), &mode)
20090 || GET_MODE_SIZE (mode) != 1)
20091 return false;
20092 if (int_size_in_bytes (type) != size)
20093 return false;
20094 if (size > TREE_STRING_LENGTH (init))
20095 {
20096 memcpy (array, TREE_STRING_POINTER (init),
20097 TREE_STRING_LENGTH (init));
20098 memset (array + TREE_STRING_LENGTH (init),
20099 '\0', size - TREE_STRING_LENGTH (init));
20100 }
20101 else
20102 memcpy (array, TREE_STRING_POINTER (init), size);
20103 return true;
20104 }
20105 return false;
20106 case CONSTRUCTOR:
20107 type = TREE_TYPE (init);
20108 if (int_size_in_bytes (type) != size)
20109 return false;
20110 if (TREE_CODE (type) == ARRAY_TYPE)
20111 {
20112 HOST_WIDE_INT min_index;
20113 unsigned HOST_WIDE_INT cnt;
20114 int curpos = 0, fieldsize;
20115 constructor_elt *ce;
20116
20117 if (TYPE_DOMAIN (type) == NULL_TREE
20118 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
20119 return false;
20120
20121 fieldsize = int_size_in_bytes (TREE_TYPE (type));
20122 if (fieldsize <= 0)
20123 return false;
20124
20125 min_index = tree_to_shwi (TYPE_MIN_VALUE (TYPE_DOMAIN (type)));
20126 memset (array, '\0', size);
20127 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
20128 {
20129 tree val = ce->value;
20130 tree index = ce->index;
20131 int pos = curpos;
20132 if (index && TREE_CODE (index) == RANGE_EXPR)
20133 pos = (tree_to_shwi (TREE_OPERAND (index, 0)) - min_index)
20134 * fieldsize;
20135 else if (index)
20136 pos = (tree_to_shwi (index) - min_index) * fieldsize;
20137
20138 if (val)
20139 {
20140 STRIP_NOPS (val);
20141 if (!native_encode_initializer (val, array + pos, fieldsize))
20142 return false;
20143 }
20144 curpos = pos + fieldsize;
20145 if (index && TREE_CODE (index) == RANGE_EXPR)
20146 {
20147 int count = tree_to_shwi (TREE_OPERAND (index, 1))
20148 - tree_to_shwi (TREE_OPERAND (index, 0));
20149 while (count-- > 0)
20150 {
20151 if (val)
20152 memcpy (array + curpos, array + pos, fieldsize);
20153 curpos += fieldsize;
20154 }
20155 }
20156 gcc_assert (curpos <= size);
20157 }
20158 return true;
20159 }
20160 else if (TREE_CODE (type) == RECORD_TYPE
20161 || TREE_CODE (type) == UNION_TYPE)
20162 {
20163 tree field = NULL_TREE;
20164 unsigned HOST_WIDE_INT cnt;
20165 constructor_elt *ce;
20166
20167 if (int_size_in_bytes (type) != size)
20168 return false;
20169
20170 if (TREE_CODE (type) == RECORD_TYPE)
20171 field = TYPE_FIELDS (type);
20172
20173 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
20174 {
20175 tree val = ce->value;
20176 int pos, fieldsize;
20177
20178 if (ce->index != 0)
20179 field = ce->index;
20180
20181 if (val)
20182 STRIP_NOPS (val);
20183
20184 if (field == NULL_TREE || DECL_BIT_FIELD (field))
20185 return false;
20186
20187 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
20188 && TYPE_DOMAIN (TREE_TYPE (field))
20189 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
20190 return false;
20191 else if (DECL_SIZE_UNIT (field) == NULL_TREE
20192 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
20193 return false;
20194 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
20195 pos = int_byte_position (field);
20196 gcc_assert (pos + fieldsize <= size);
20197 if (val && fieldsize != 0
20198 && !native_encode_initializer (val, array + pos, fieldsize))
20199 return false;
20200 }
20201 return true;
20202 }
20203 return false;
20204 case VIEW_CONVERT_EXPR:
20205 case NON_LVALUE_EXPR:
20206 return native_encode_initializer (TREE_OPERAND (init, 0), array, size);
20207 default:
20208 return native_encode_expr (init, array, size) == size;
20209 }
20210 }
20211
20212 /* Attach a DW_AT_const_value attribute to DIE. The value of the
20213 attribute is the const value T. */
20214
20215 static bool
20216 tree_add_const_value_attribute (dw_die_ref die, tree t)
20217 {
20218 tree init;
20219 tree type = TREE_TYPE (t);
20220 rtx rtl;
20221
20222 if (!t || !TREE_TYPE (t) || TREE_TYPE (t) == error_mark_node)
20223 return false;
20224
20225 init = t;
20226 gcc_assert (!DECL_P (init));
20227
20228 if (TREE_CODE (init) == INTEGER_CST)
20229 {
20230 if (tree_fits_uhwi_p (init))
20231 {
20232 add_AT_unsigned (die, DW_AT_const_value, tree_to_uhwi (init));
20233 return true;
20234 }
20235 if (tree_fits_shwi_p (init))
20236 {
20237 add_AT_int (die, DW_AT_const_value, tree_to_shwi (init));
20238 return true;
20239 }
20240 }
20241 if (! early_dwarf)
20242 {
20243 rtl = rtl_for_decl_init (init, type);
20244 if (rtl)
20245 return add_const_value_attribute (die, rtl);
20246 }
20247 /* If the host and target are sane, try harder. */
20248 if (CHAR_BIT == 8 && BITS_PER_UNIT == 8
20249 && initializer_constant_valid_p (init, type))
20250 {
20251 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (init));
20252 if (size > 0 && (int) size == size)
20253 {
20254 unsigned char *array = ggc_cleared_vec_alloc<unsigned char> (size);
20255
20256 if (native_encode_initializer (init, array, size))
20257 {
20258 add_AT_vec (die, DW_AT_const_value, size, 1, array);
20259 return true;
20260 }
20261 ggc_free (array);
20262 }
20263 }
20264 return false;
20265 }
20266
20267 /* Attach a DW_AT_const_value attribute to VAR_DIE. The value of the
20268 attribute is the const value of T, where T is an integral constant
20269 variable with static storage duration
20270 (so it can't be a PARM_DECL or a RESULT_DECL). */
20271
20272 static bool
20273 tree_add_const_value_attribute_for_decl (dw_die_ref var_die, tree decl)
20274 {
20275
20276 if (!decl
20277 || (!VAR_P (decl) && TREE_CODE (decl) != CONST_DECL)
20278 || (VAR_P (decl) && !TREE_STATIC (decl)))
20279 return false;
20280
20281 if (TREE_READONLY (decl)
20282 && ! TREE_THIS_VOLATILE (decl)
20283 && DECL_INITIAL (decl))
20284 /* OK */;
20285 else
20286 return false;
20287
20288 /* Don't add DW_AT_const_value if abstract origin already has one. */
20289 if (get_AT (var_die, DW_AT_const_value))
20290 return false;
20291
20292 return tree_add_const_value_attribute (var_die, DECL_INITIAL (decl));
20293 }
20294
20295 /* Convert the CFI instructions for the current function into a
20296 location list. This is used for DW_AT_frame_base when we targeting
20297 a dwarf2 consumer that does not support the dwarf3
20298 DW_OP_call_frame_cfa. OFFSET is a constant to be added to all CFA
20299 expressions. */
20300
20301 static dw_loc_list_ref
20302 convert_cfa_to_fb_loc_list (HOST_WIDE_INT offset)
20303 {
20304 int ix;
20305 dw_fde_ref fde;
20306 dw_loc_list_ref list, *list_tail;
20307 dw_cfi_ref cfi;
20308 dw_cfa_location last_cfa, next_cfa;
20309 const char *start_label, *last_label, *section;
20310 dw_cfa_location remember;
20311
20312 fde = cfun->fde;
20313 gcc_assert (fde != NULL);
20314
20315 section = secname_for_decl (current_function_decl);
20316 list_tail = &list;
20317 list = NULL;
20318
20319 memset (&next_cfa, 0, sizeof (next_cfa));
20320 next_cfa.reg = INVALID_REGNUM;
20321 remember = next_cfa;
20322
20323 start_label = fde->dw_fde_begin;
20324
20325 /* ??? Bald assumption that the CIE opcode list does not contain
20326 advance opcodes. */
20327 FOR_EACH_VEC_ELT (*cie_cfi_vec, ix, cfi)
20328 lookup_cfa_1 (cfi, &next_cfa, &remember);
20329
20330 last_cfa = next_cfa;
20331 last_label = start_label;
20332
20333 if (fde->dw_fde_second_begin && fde->dw_fde_switch_cfi_index == 0)
20334 {
20335 /* If the first partition contained no CFI adjustments, the
20336 CIE opcodes apply to the whole first partition. */
20337 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20338 fde->dw_fde_begin, 0, fde->dw_fde_end, 0, section);
20339 list_tail =&(*list_tail)->dw_loc_next;
20340 start_label = last_label = fde->dw_fde_second_begin;
20341 }
20342
20343 FOR_EACH_VEC_SAFE_ELT (fde->dw_fde_cfi, ix, cfi)
20344 {
20345 switch (cfi->dw_cfi_opc)
20346 {
20347 case DW_CFA_set_loc:
20348 case DW_CFA_advance_loc1:
20349 case DW_CFA_advance_loc2:
20350 case DW_CFA_advance_loc4:
20351 if (!cfa_equal_p (&last_cfa, &next_cfa))
20352 {
20353 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20354 start_label, 0, last_label, 0, section);
20355
20356 list_tail = &(*list_tail)->dw_loc_next;
20357 last_cfa = next_cfa;
20358 start_label = last_label;
20359 }
20360 last_label = cfi->dw_cfi_oprnd1.dw_cfi_addr;
20361 break;
20362
20363 case DW_CFA_advance_loc:
20364 /* The encoding is complex enough that we should never emit this. */
20365 gcc_unreachable ();
20366
20367 default:
20368 lookup_cfa_1 (cfi, &next_cfa, &remember);
20369 break;
20370 }
20371 if (ix + 1 == fde->dw_fde_switch_cfi_index)
20372 {
20373 if (!cfa_equal_p (&last_cfa, &next_cfa))
20374 {
20375 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20376 start_label, 0, last_label, 0, section);
20377
20378 list_tail = &(*list_tail)->dw_loc_next;
20379 last_cfa = next_cfa;
20380 start_label = last_label;
20381 }
20382 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20383 start_label, 0, fde->dw_fde_end, 0, section);
20384 list_tail = &(*list_tail)->dw_loc_next;
20385 start_label = last_label = fde->dw_fde_second_begin;
20386 }
20387 }
20388
20389 if (!cfa_equal_p (&last_cfa, &next_cfa))
20390 {
20391 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20392 start_label, 0, last_label, 0, section);
20393 list_tail = &(*list_tail)->dw_loc_next;
20394 start_label = last_label;
20395 }
20396
20397 *list_tail = new_loc_list (build_cfa_loc (&next_cfa, offset),
20398 start_label, 0,
20399 fde->dw_fde_second_begin
20400 ? fde->dw_fde_second_end : fde->dw_fde_end, 0,
20401 section);
20402
20403 maybe_gen_llsym (list);
20404
20405 return list;
20406 }
20407
20408 /* Compute a displacement from the "steady-state frame pointer" to the
20409 frame base (often the same as the CFA), and store it in
20410 frame_pointer_fb_offset. OFFSET is added to the displacement
20411 before the latter is negated. */
20412
20413 static void
20414 compute_frame_pointer_to_fb_displacement (poly_int64 offset)
20415 {
20416 rtx reg, elim;
20417
20418 #ifdef FRAME_POINTER_CFA_OFFSET
20419 reg = frame_pointer_rtx;
20420 offset += FRAME_POINTER_CFA_OFFSET (current_function_decl);
20421 #else
20422 reg = arg_pointer_rtx;
20423 offset += ARG_POINTER_CFA_OFFSET (current_function_decl);
20424 #endif
20425
20426 elim = (ira_use_lra_p
20427 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
20428 : eliminate_regs (reg, VOIDmode, NULL_RTX));
20429 elim = strip_offset_and_add (elim, &offset);
20430
20431 frame_pointer_fb_offset = -offset;
20432
20433 /* ??? AVR doesn't set up valid eliminations when there is no stack frame
20434 in which to eliminate. This is because it's stack pointer isn't
20435 directly accessible as a register within the ISA. To work around
20436 this, assume that while we cannot provide a proper value for
20437 frame_pointer_fb_offset, we won't need one either. We can use
20438 hard frame pointer in debug info even if frame pointer isn't used
20439 since hard frame pointer in debug info is encoded with DW_OP_fbreg
20440 which uses the DW_AT_frame_base attribute, not hard frame pointer
20441 directly. */
20442 frame_pointer_fb_offset_valid
20443 = (elim == hard_frame_pointer_rtx || elim == stack_pointer_rtx);
20444 }
20445
20446 /* Generate a DW_AT_name attribute given some string value to be included as
20447 the value of the attribute. */
20448
20449 static void
20450 add_name_attribute (dw_die_ref die, const char *name_string)
20451 {
20452 if (name_string != NULL && *name_string != 0)
20453 {
20454 if (demangle_name_func)
20455 name_string = (*demangle_name_func) (name_string);
20456
20457 add_AT_string (die, DW_AT_name, name_string);
20458 }
20459 }
20460
20461 /* Generate a DW_AT_description attribute given some string value to be included
20462 as the value of the attribute. */
20463
20464 static void
20465 add_desc_attribute (dw_die_ref die, const char *name_string)
20466 {
20467 if (!flag_describe_dies || (dwarf_version < 3 && dwarf_strict))
20468 return;
20469
20470 if (name_string == NULL || *name_string == 0)
20471 return;
20472
20473 if (demangle_name_func)
20474 name_string = (*demangle_name_func) (name_string);
20475
20476 add_AT_string (die, DW_AT_description, name_string);
20477 }
20478
20479 /* Generate a DW_AT_description attribute given some decl to be included
20480 as the value of the attribute. */
20481
20482 static void
20483 add_desc_attribute (dw_die_ref die, tree decl)
20484 {
20485 tree decl_name;
20486
20487 if (!flag_describe_dies || (dwarf_version < 3 && dwarf_strict))
20488 return;
20489
20490 if (decl == NULL_TREE || !DECL_P (decl))
20491 return;
20492 decl_name = DECL_NAME (decl);
20493
20494 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
20495 {
20496 const char *name = dwarf2_name (decl, 0);
20497 add_desc_attribute (die, name ? name : IDENTIFIER_POINTER (decl_name));
20498 }
20499 else
20500 {
20501 char *desc = print_generic_expr_to_str (decl);
20502 add_desc_attribute (die, desc);
20503 free (desc);
20504 }
20505 }
20506
20507 /* Retrieve the descriptive type of TYPE, if any, make sure it has a
20508 DIE and attach a DW_AT_GNAT_descriptive_type attribute to the DIE
20509 of TYPE accordingly.
20510
20511 ??? This is a temporary measure until after we're able to generate
20512 regular DWARF for the complex Ada type system. */
20513
20514 static void
20515 add_gnat_descriptive_type_attribute (dw_die_ref die, tree type,
20516 dw_die_ref context_die)
20517 {
20518 tree dtype;
20519 dw_die_ref dtype_die;
20520
20521 if (!lang_hooks.types.descriptive_type)
20522 return;
20523
20524 dtype = lang_hooks.types.descriptive_type (type);
20525 if (!dtype)
20526 return;
20527
20528 dtype_die = lookup_type_die (dtype);
20529 if (!dtype_die)
20530 {
20531 gen_type_die (dtype, context_die);
20532 dtype_die = lookup_type_die (dtype);
20533 gcc_assert (dtype_die);
20534 }
20535
20536 add_AT_die_ref (die, DW_AT_GNAT_descriptive_type, dtype_die);
20537 }
20538
20539 /* Retrieve the comp_dir string suitable for use with DW_AT_comp_dir. */
20540
20541 static const char *
20542 comp_dir_string (void)
20543 {
20544 const char *wd;
20545 char *wd1;
20546 static const char *cached_wd = NULL;
20547
20548 if (cached_wd != NULL)
20549 return cached_wd;
20550
20551 wd = get_src_pwd ();
20552 if (wd == NULL)
20553 return NULL;
20554
20555 if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR)
20556 {
20557 int wdlen;
20558
20559 wdlen = strlen (wd);
20560 wd1 = ggc_vec_alloc<char> (wdlen + 2);
20561 strcpy (wd1, wd);
20562 wd1 [wdlen] = DIR_SEPARATOR;
20563 wd1 [wdlen + 1] = 0;
20564 wd = wd1;
20565 }
20566
20567 cached_wd = remap_debug_filename (wd);
20568 return cached_wd;
20569 }
20570
20571 /* Generate a DW_AT_comp_dir attribute for DIE. */
20572
20573 static void
20574 add_comp_dir_attribute (dw_die_ref die)
20575 {
20576 const char * wd = comp_dir_string ();
20577 if (wd != NULL)
20578 add_AT_string (die, DW_AT_comp_dir, wd);
20579 }
20580
20581 /* Given a tree node VALUE describing a scalar attribute ATTR (i.e. a bound, a
20582 pointer computation, ...), output a representation for that bound according
20583 to the accepted FORMS (see enum dw_scalar_form) and add it to DIE. See
20584 loc_list_from_tree for the meaning of CONTEXT. */
20585
20586 static void
20587 add_scalar_info (dw_die_ref die, enum dwarf_attribute attr, tree value,
20588 int forms, struct loc_descr_context *context)
20589 {
20590 dw_die_ref context_die, decl_die = NULL;
20591 dw_loc_list_ref list;
20592 bool strip_conversions = true;
20593 bool placeholder_seen = false;
20594
20595 while (strip_conversions)
20596 switch (TREE_CODE (value))
20597 {
20598 case ERROR_MARK:
20599 case SAVE_EXPR:
20600 return;
20601
20602 CASE_CONVERT:
20603 case VIEW_CONVERT_EXPR:
20604 value = TREE_OPERAND (value, 0);
20605 break;
20606
20607 default:
20608 strip_conversions = false;
20609 break;
20610 }
20611
20612 /* If possible and permitted, output the attribute as a constant. */
20613 if ((forms & dw_scalar_form_constant) != 0
20614 && TREE_CODE (value) == INTEGER_CST)
20615 {
20616 unsigned int prec = simple_type_size_in_bits (TREE_TYPE (value));
20617
20618 /* If HOST_WIDE_INT is big enough then represent the bound as
20619 a constant value. We need to choose a form based on
20620 whether the type is signed or unsigned. We cannot just
20621 call add_AT_unsigned if the value itself is positive
20622 (add_AT_unsigned might add the unsigned value encoded as
20623 DW_FORM_data[1248]). Some DWARF consumers will lookup the
20624 bounds type and then sign extend any unsigned values found
20625 for signed types. This is needed only for
20626 DW_AT_{lower,upper}_bound, since for most other attributes,
20627 consumers will treat DW_FORM_data[1248] as unsigned values,
20628 regardless of the underlying type. */
20629 if (prec <= HOST_BITS_PER_WIDE_INT
20630 || tree_fits_uhwi_p (value))
20631 {
20632 if (TYPE_UNSIGNED (TREE_TYPE (value)))
20633 add_AT_unsigned (die, attr, TREE_INT_CST_LOW (value));
20634 else
20635 add_AT_int (die, attr, TREE_INT_CST_LOW (value));
20636 }
20637 else
20638 /* Otherwise represent the bound as an unsigned value with
20639 the precision of its type. The precision and signedness
20640 of the type will be necessary to re-interpret it
20641 unambiguously. */
20642 add_AT_wide (die, attr, wi::to_wide (value));
20643 return;
20644 }
20645
20646 /* Otherwise, if it's possible and permitted too, output a reference to
20647 another DIE. */
20648 if ((forms & dw_scalar_form_reference) != 0)
20649 {
20650 tree decl = NULL_TREE;
20651
20652 /* Some type attributes reference an outer type. For instance, the upper
20653 bound of an array may reference an embedding record (this happens in
20654 Ada). */
20655 if (TREE_CODE (value) == COMPONENT_REF
20656 && TREE_CODE (TREE_OPERAND (value, 0)) == PLACEHOLDER_EXPR
20657 && TREE_CODE (TREE_OPERAND (value, 1)) == FIELD_DECL)
20658 decl = TREE_OPERAND (value, 1);
20659
20660 else if (VAR_P (value)
20661 || TREE_CODE (value) == PARM_DECL
20662 || TREE_CODE (value) == RESULT_DECL)
20663 decl = value;
20664
20665 if (decl != NULL_TREE)
20666 {
20667 decl_die = lookup_decl_die (decl);
20668
20669 /* ??? Can this happen, or should the variable have been bound
20670 first? Probably it can, since I imagine that we try to create
20671 the types of parameters in the order in which they exist in
20672 the list, and won't have created a forward reference to a
20673 later parameter. */
20674 if (decl_die != NULL)
20675 {
20676 if (get_AT (decl_die, DW_AT_location)
20677 || get_AT (decl_die, DW_AT_const_value))
20678 {
20679 add_AT_die_ref (die, attr, decl_die);
20680 return;
20681 }
20682 }
20683 }
20684 }
20685
20686 /* Last chance: try to create a stack operation procedure to evaluate the
20687 value. Do nothing if even that is not possible or permitted. */
20688 if ((forms & dw_scalar_form_exprloc) == 0)
20689 return;
20690
20691 list = loc_list_from_tree (value, 2, context);
20692 if (context && context->placeholder_arg)
20693 {
20694 placeholder_seen = context->placeholder_seen;
20695 context->placeholder_seen = false;
20696 }
20697 if (list == NULL || single_element_loc_list_p (list))
20698 {
20699 /* If this attribute is not a reference nor constant, it is
20700 a DWARF expression rather than location description. For that
20701 loc_list_from_tree (value, 0, &context) is needed. */
20702 dw_loc_list_ref list2 = loc_list_from_tree (value, 0, context);
20703 if (list2 && single_element_loc_list_p (list2))
20704 {
20705 if (placeholder_seen)
20706 {
20707 struct dwarf_procedure_info dpi;
20708 dpi.fndecl = NULL_TREE;
20709 dpi.args_count = 1;
20710 if (!resolve_args_picking (list2->expr, 1, &dpi))
20711 return;
20712 }
20713 add_AT_loc (die, attr, list2->expr);
20714 return;
20715 }
20716 }
20717
20718 /* If that failed to give a single element location list, fall back to
20719 outputting this as a reference... still if permitted. */
20720 if (list == NULL
20721 || (forms & dw_scalar_form_reference) == 0
20722 || placeholder_seen)
20723 return;
20724
20725 if (!decl_die)
20726 {
20727 if (current_function_decl == 0)
20728 context_die = comp_unit_die ();
20729 else
20730 context_die = lookup_decl_die (current_function_decl);
20731
20732 decl_die = new_die (DW_TAG_variable, context_die, value);
20733 add_AT_flag (decl_die, DW_AT_artificial, 1);
20734 add_type_attribute (decl_die, TREE_TYPE (value), TYPE_QUAL_CONST, false,
20735 context_die);
20736 }
20737
20738 add_AT_location_description (decl_die, DW_AT_location, list);
20739 add_AT_die_ref (die, attr, decl_die);
20740 }
20741
20742 /* Return the default for DW_AT_lower_bound, or -1 if there is not any
20743 default. */
20744
20745 static int
20746 lower_bound_default (void)
20747 {
20748 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
20749 {
20750 case DW_LANG_C:
20751 case DW_LANG_C89:
20752 case DW_LANG_C99:
20753 case DW_LANG_C11:
20754 case DW_LANG_C_plus_plus:
20755 case DW_LANG_C_plus_plus_11:
20756 case DW_LANG_C_plus_plus_14:
20757 case DW_LANG_ObjC:
20758 case DW_LANG_ObjC_plus_plus:
20759 return 0;
20760 case DW_LANG_Fortran77:
20761 case DW_LANG_Fortran90:
20762 case DW_LANG_Fortran95:
20763 case DW_LANG_Fortran03:
20764 case DW_LANG_Fortran08:
20765 return 1;
20766 case DW_LANG_UPC:
20767 case DW_LANG_D:
20768 case DW_LANG_Python:
20769 return dwarf_version >= 4 ? 0 : -1;
20770 case DW_LANG_Ada95:
20771 case DW_LANG_Ada83:
20772 case DW_LANG_Cobol74:
20773 case DW_LANG_Cobol85:
20774 case DW_LANG_Modula2:
20775 case DW_LANG_PLI:
20776 return dwarf_version >= 4 ? 1 : -1;
20777 default:
20778 return -1;
20779 }
20780 }
20781
20782 /* Given a tree node describing an array bound (either lower or upper) output
20783 a representation for that bound. */
20784
20785 static void
20786 add_bound_info (dw_die_ref subrange_die, enum dwarf_attribute bound_attr,
20787 tree bound, struct loc_descr_context *context)
20788 {
20789 int dflt;
20790
20791 while (1)
20792 switch (TREE_CODE (bound))
20793 {
20794 /* Strip all conversions. */
20795 CASE_CONVERT:
20796 case VIEW_CONVERT_EXPR:
20797 bound = TREE_OPERAND (bound, 0);
20798 break;
20799
20800 /* All fixed-bounds are represented by INTEGER_CST nodes. Lower bounds
20801 are even omitted when they are the default. */
20802 case INTEGER_CST:
20803 /* If the value for this bound is the default one, we can even omit the
20804 attribute. */
20805 if (bound_attr == DW_AT_lower_bound
20806 && tree_fits_shwi_p (bound)
20807 && (dflt = lower_bound_default ()) != -1
20808 && tree_to_shwi (bound) == dflt)
20809 return;
20810
20811 /* FALLTHRU */
20812
20813 default:
20814 /* Because of the complex interaction there can be with other GNAT
20815 encodings, GDB isn't ready yet to handle proper DWARF description
20816 for self-referencial subrange bounds: let GNAT encodings do the
20817 magic in such a case. */
20818 if (is_ada ()
20819 && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL
20820 && contains_placeholder_p (bound))
20821 return;
20822
20823 add_scalar_info (subrange_die, bound_attr, bound,
20824 dw_scalar_form_constant
20825 | dw_scalar_form_exprloc
20826 | dw_scalar_form_reference,
20827 context);
20828 return;
20829 }
20830 }
20831
20832 /* Add subscript info to TYPE_DIE, describing an array TYPE, collapsing
20833 possibly nested array subscripts in a flat sequence if COLLAPSE_P is true.
20834 Note that the block of subscript information for an array type also
20835 includes information about the element type of the given array type.
20836
20837 This function reuses previously set type and bound information if
20838 available. */
20839
20840 static void
20841 add_subscript_info (dw_die_ref type_die, tree type, bool collapse_p)
20842 {
20843 unsigned dimension_number;
20844 tree lower, upper;
20845 dw_die_ref child = type_die->die_child;
20846
20847 for (dimension_number = 0;
20848 TREE_CODE (type) == ARRAY_TYPE && (dimension_number == 0 || collapse_p);
20849 type = TREE_TYPE (type), dimension_number++)
20850 {
20851 tree domain = TYPE_DOMAIN (type);
20852
20853 if (TYPE_STRING_FLAG (type) && is_fortran () && dimension_number > 0)
20854 break;
20855
20856 /* Arrays come in three flavors: Unspecified bounds, fixed bounds,
20857 and (in GNU C only) variable bounds. Handle all three forms
20858 here. */
20859
20860 /* Find and reuse a previously generated DW_TAG_subrange_type if
20861 available.
20862
20863 For multi-dimensional arrays, as we iterate through the
20864 various dimensions in the enclosing for loop above, we also
20865 iterate through the DIE children and pick at each
20866 DW_TAG_subrange_type previously generated (if available).
20867 Each child DW_TAG_subrange_type DIE describes the range of
20868 the current dimension. At this point we should have as many
20869 DW_TAG_subrange_type's as we have dimensions in the
20870 array. */
20871 dw_die_ref subrange_die = NULL;
20872 if (child)
20873 while (1)
20874 {
20875 child = child->die_sib;
20876 if (child->die_tag == DW_TAG_subrange_type)
20877 subrange_die = child;
20878 if (child == type_die->die_child)
20879 {
20880 /* If we wrapped around, stop looking next time. */
20881 child = NULL;
20882 break;
20883 }
20884 if (child->die_tag == DW_TAG_subrange_type)
20885 break;
20886 }
20887 if (!subrange_die)
20888 subrange_die = new_die (DW_TAG_subrange_type, type_die, NULL);
20889
20890 if (domain)
20891 {
20892 /* We have an array type with specified bounds. */
20893 lower = TYPE_MIN_VALUE (domain);
20894 upper = TYPE_MAX_VALUE (domain);
20895
20896 /* Define the index type. */
20897 if (TREE_TYPE (domain)
20898 && !get_AT (subrange_die, DW_AT_type))
20899 {
20900 /* ??? This is probably an Ada unnamed subrange type. Ignore the
20901 TREE_TYPE field. We can't emit debug info for this
20902 because it is an unnamed integral type. */
20903 if (TREE_CODE (domain) == INTEGER_TYPE
20904 && TYPE_NAME (domain) == NULL_TREE
20905 && TREE_CODE (TREE_TYPE (domain)) == INTEGER_TYPE
20906 && TYPE_NAME (TREE_TYPE (domain)) == NULL_TREE)
20907 ;
20908 else
20909 add_type_attribute (subrange_die, TREE_TYPE (domain),
20910 TYPE_UNQUALIFIED, false, type_die);
20911 }
20912
20913 /* ??? If upper is NULL, the array has unspecified length,
20914 but it does have a lower bound. This happens with Fortran
20915 dimension arr(N:*)
20916 Since the debugger is definitely going to need to know N
20917 to produce useful results, go ahead and output the lower
20918 bound solo, and hope the debugger can cope. */
20919
20920 if (!get_AT (subrange_die, DW_AT_lower_bound))
20921 add_bound_info (subrange_die, DW_AT_lower_bound, lower, NULL);
20922 if (!get_AT (subrange_die, DW_AT_upper_bound)
20923 && !get_AT (subrange_die, DW_AT_count))
20924 {
20925 if (upper)
20926 add_bound_info (subrange_die, DW_AT_upper_bound, upper, NULL);
20927 else if ((is_c () || is_cxx ()) && COMPLETE_TYPE_P (type))
20928 /* Zero-length array. */
20929 add_bound_info (subrange_die, DW_AT_count,
20930 build_int_cst (TREE_TYPE (lower), 0), NULL);
20931 }
20932 }
20933
20934 /* Otherwise we have an array type with an unspecified length. The
20935 DWARF-2 spec does not say how to handle this; let's just leave out the
20936 bounds. */
20937 }
20938 }
20939
20940 /* Add a DW_AT_byte_size attribute to DIE with TREE_NODE's size. */
20941
20942 static void
20943 add_byte_size_attribute (dw_die_ref die, tree tree_node)
20944 {
20945 dw_die_ref decl_die;
20946 HOST_WIDE_INT size;
20947 dw_loc_descr_ref size_expr = NULL;
20948
20949 switch (TREE_CODE (tree_node))
20950 {
20951 case ERROR_MARK:
20952 size = 0;
20953 break;
20954 case ENUMERAL_TYPE:
20955 case RECORD_TYPE:
20956 case UNION_TYPE:
20957 case QUAL_UNION_TYPE:
20958 if (TREE_CODE (TYPE_SIZE_UNIT (tree_node)) == VAR_DECL
20959 && (decl_die = lookup_decl_die (TYPE_SIZE_UNIT (tree_node))))
20960 {
20961 add_AT_die_ref (die, DW_AT_byte_size, decl_die);
20962 return;
20963 }
20964 size_expr = type_byte_size (tree_node, &size);
20965 break;
20966 case FIELD_DECL:
20967 /* For a data member of a struct or union, the DW_AT_byte_size is
20968 generally given as the number of bytes normally allocated for an
20969 object of the *declared* type of the member itself. This is true
20970 even for bit-fields. */
20971 size = int_size_in_bytes (field_type (tree_node));
20972 break;
20973 default:
20974 gcc_unreachable ();
20975 }
20976
20977 /* Support for dynamically-sized objects was introduced by DWARFv3.
20978 At the moment, GDB does not handle variable byte sizes very well,
20979 though. */
20980 if ((dwarf_version >= 3 || !dwarf_strict)
20981 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL
20982 && size_expr != NULL)
20983 add_AT_loc (die, DW_AT_byte_size, size_expr);
20984
20985 /* Note that `size' might be -1 when we get to this point. If it is, that
20986 indicates that the byte size of the entity in question is variable and
20987 that we could not generate a DWARF expression that computes it. */
20988 if (size >= 0)
20989 add_AT_unsigned (die, DW_AT_byte_size, size);
20990 }
20991
20992 /* Add a DW_AT_alignment attribute to DIE with TREE_NODE's non-default
20993 alignment. */
20994
20995 static void
20996 add_alignment_attribute (dw_die_ref die, tree tree_node)
20997 {
20998 if (dwarf_version < 5 && dwarf_strict)
20999 return;
21000
21001 unsigned align;
21002
21003 if (DECL_P (tree_node))
21004 {
21005 if (!DECL_USER_ALIGN (tree_node))
21006 return;
21007
21008 align = DECL_ALIGN_UNIT (tree_node);
21009 }
21010 else if (TYPE_P (tree_node))
21011 {
21012 if (!TYPE_USER_ALIGN (tree_node))
21013 return;
21014
21015 align = TYPE_ALIGN_UNIT (tree_node);
21016 }
21017 else
21018 gcc_unreachable ();
21019
21020 add_AT_unsigned (die, DW_AT_alignment, align);
21021 }
21022
21023 /* For a FIELD_DECL node which represents a bit-field, output an attribute
21024 which specifies the distance in bits from the highest order bit of the
21025 "containing object" for the bit-field to the highest order bit of the
21026 bit-field itself.
21027
21028 For any given bit-field, the "containing object" is a hypothetical object
21029 (of some integral or enum type) within which the given bit-field lives. The
21030 type of this hypothetical "containing object" is always the same as the
21031 declared type of the individual bit-field itself. The determination of the
21032 exact location of the "containing object" for a bit-field is rather
21033 complicated. It's handled by the `field_byte_offset' function (above).
21034
21035 CTX is required: see the comment for VLR_CONTEXT.
21036
21037 Note that it is the size (in bytes) of the hypothetical "containing object"
21038 which will be given in the DW_AT_byte_size attribute for this bit-field.
21039 (See `byte_size_attribute' above). */
21040
21041 static inline void
21042 add_bit_offset_attribute (dw_die_ref die, tree decl, struct vlr_context *ctx)
21043 {
21044 HOST_WIDE_INT object_offset_in_bytes;
21045 tree original_type = DECL_BIT_FIELD_TYPE (decl);
21046 HOST_WIDE_INT bitpos_int;
21047 HOST_WIDE_INT highest_order_object_bit_offset;
21048 HOST_WIDE_INT highest_order_field_bit_offset;
21049 HOST_WIDE_INT bit_offset;
21050
21051 field_byte_offset (decl, ctx, &object_offset_in_bytes);
21052
21053 /* Must be a field and a bit field. */
21054 gcc_assert (original_type && TREE_CODE (decl) == FIELD_DECL);
21055
21056 /* We can't yet handle bit-fields whose offsets are variable, so if we
21057 encounter such things, just return without generating any attribute
21058 whatsoever. Likewise for variable or too large size. */
21059 if (! tree_fits_shwi_p (bit_position (decl))
21060 || ! tree_fits_uhwi_p (DECL_SIZE (decl)))
21061 return;
21062
21063 bitpos_int = int_bit_position (decl);
21064
21065 /* Note that the bit offset is always the distance (in bits) from the
21066 highest-order bit of the "containing object" to the highest-order bit of
21067 the bit-field itself. Since the "high-order end" of any object or field
21068 is different on big-endian and little-endian machines, the computation
21069 below must take account of these differences. */
21070 highest_order_object_bit_offset = object_offset_in_bytes * BITS_PER_UNIT;
21071 highest_order_field_bit_offset = bitpos_int;
21072
21073 if (! BYTES_BIG_ENDIAN)
21074 {
21075 highest_order_field_bit_offset += tree_to_shwi (DECL_SIZE (decl));
21076 highest_order_object_bit_offset +=
21077 simple_type_size_in_bits (original_type);
21078 }
21079
21080 bit_offset
21081 = (! BYTES_BIG_ENDIAN
21082 ? highest_order_object_bit_offset - highest_order_field_bit_offset
21083 : highest_order_field_bit_offset - highest_order_object_bit_offset);
21084
21085 if (bit_offset < 0)
21086 add_AT_int (die, DW_AT_bit_offset, bit_offset);
21087 else
21088 add_AT_unsigned (die, DW_AT_bit_offset, (unsigned HOST_WIDE_INT) bit_offset);
21089 }
21090
21091 /* For a FIELD_DECL node which represents a bit field, output an attribute
21092 which specifies the length in bits of the given field. */
21093
21094 static inline void
21095 add_bit_size_attribute (dw_die_ref die, tree decl)
21096 {
21097 /* Must be a field and a bit field. */
21098 gcc_assert (TREE_CODE (decl) == FIELD_DECL
21099 && DECL_BIT_FIELD_TYPE (decl));
21100
21101 if (tree_fits_uhwi_p (DECL_SIZE (decl)))
21102 add_AT_unsigned (die, DW_AT_bit_size, tree_to_uhwi (DECL_SIZE (decl)));
21103 }
21104
21105 /* If the compiled language is ANSI C, then add a 'prototyped'
21106 attribute, if arg types are given for the parameters of a function. */
21107
21108 static inline void
21109 add_prototyped_attribute (dw_die_ref die, tree func_type)
21110 {
21111 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
21112 {
21113 case DW_LANG_C:
21114 case DW_LANG_C89:
21115 case DW_LANG_C99:
21116 case DW_LANG_C11:
21117 case DW_LANG_ObjC:
21118 if (prototype_p (func_type))
21119 add_AT_flag (die, DW_AT_prototyped, 1);
21120 break;
21121 default:
21122 break;
21123 }
21124 }
21125
21126 /* Add an 'abstract_origin' attribute below a given DIE. The DIE is found
21127 by looking in the type declaration, the object declaration equate table or
21128 the block mapping. */
21129
21130 static inline void
21131 add_abstract_origin_attribute (dw_die_ref die, tree origin)
21132 {
21133 dw_die_ref origin_die = NULL;
21134
21135 /* For late LTO debug output we want to refer directly to the abstract
21136 DIE in the early debug rather to the possibly existing concrete
21137 instance and avoid creating that just for this purpose. */
21138 sym_off_pair *desc;
21139 if (in_lto_p
21140 && external_die_map
21141 && (desc = external_die_map->get (origin)))
21142 {
21143 add_AT_external_die_ref (die, DW_AT_abstract_origin,
21144 desc->sym, desc->off);
21145 return;
21146 }
21147
21148 if (DECL_P (origin))
21149 origin_die = lookup_decl_die (origin);
21150 else if (TYPE_P (origin))
21151 origin_die = lookup_type_die (origin);
21152 else if (TREE_CODE (origin) == BLOCK)
21153 origin_die = lookup_block_die (origin);
21154
21155 /* XXX: Functions that are never lowered don't always have correct block
21156 trees (in the case of java, they simply have no block tree, in some other
21157 languages). For these functions, there is nothing we can really do to
21158 output correct debug info for inlined functions in all cases. Rather
21159 than die, we'll just produce deficient debug info now, in that we will
21160 have variables without a proper abstract origin. In the future, when all
21161 functions are lowered, we should re-add a gcc_assert (origin_die)
21162 here. */
21163
21164 if (origin_die)
21165 add_AT_die_ref (die, DW_AT_abstract_origin, origin_die);
21166 }
21167
21168 /* We do not currently support the pure_virtual attribute. */
21169
21170 static inline void
21171 add_pure_or_virtual_attribute (dw_die_ref die, tree func_decl)
21172 {
21173 if (DECL_VINDEX (func_decl))
21174 {
21175 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
21176
21177 if (tree_fits_shwi_p (DECL_VINDEX (func_decl)))
21178 add_AT_loc (die, DW_AT_vtable_elem_location,
21179 new_loc_descr (DW_OP_constu,
21180 tree_to_shwi (DECL_VINDEX (func_decl)),
21181 0));
21182
21183 /* GNU extension: Record what type this method came from originally. */
21184 if (debug_info_level > DINFO_LEVEL_TERSE
21185 && DECL_CONTEXT (func_decl))
21186 add_AT_die_ref (die, DW_AT_containing_type,
21187 lookup_type_die (DECL_CONTEXT (func_decl)));
21188 }
21189 }
21190 \f
21191 /* Add a DW_AT_linkage_name or DW_AT_MIPS_linkage_name attribute for the
21192 given decl. This used to be a vendor extension until after DWARF 4
21193 standardized it. */
21194
21195 static void
21196 add_linkage_attr (dw_die_ref die, tree decl)
21197 {
21198 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
21199
21200 /* Mimic what assemble_name_raw does with a leading '*'. */
21201 if (name[0] == '*')
21202 name = &name[1];
21203
21204 if (dwarf_version >= 4)
21205 add_AT_string (die, DW_AT_linkage_name, name);
21206 else
21207 add_AT_string (die, DW_AT_MIPS_linkage_name, name);
21208 }
21209
21210 /* Add source coordinate attributes for the given decl. */
21211
21212 static void
21213 add_src_coords_attributes (dw_die_ref die, tree decl)
21214 {
21215 expanded_location s;
21216
21217 if (LOCATION_LOCUS (DECL_SOURCE_LOCATION (decl)) == UNKNOWN_LOCATION)
21218 return;
21219 s = expand_location (DECL_SOURCE_LOCATION (decl));
21220 add_AT_file (die, DW_AT_decl_file, lookup_filename (s.file));
21221 add_AT_unsigned (die, DW_AT_decl_line, s.line);
21222 if (debug_column_info && s.column)
21223 add_AT_unsigned (die, DW_AT_decl_column, s.column);
21224 }
21225
21226 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl. */
21227
21228 static void
21229 add_linkage_name_raw (dw_die_ref die, tree decl)
21230 {
21231 /* Defer until we have an assembler name set. */
21232 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
21233 {
21234 limbo_die_node *asm_name;
21235
21236 asm_name = ggc_cleared_alloc<limbo_die_node> ();
21237 asm_name->die = die;
21238 asm_name->created_for = decl;
21239 asm_name->next = deferred_asm_name;
21240 deferred_asm_name = asm_name;
21241 }
21242 else if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl))
21243 add_linkage_attr (die, decl);
21244 }
21245
21246 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl if desired. */
21247
21248 static void
21249 add_linkage_name (dw_die_ref die, tree decl)
21250 {
21251 if (debug_info_level > DINFO_LEVEL_NONE
21252 && VAR_OR_FUNCTION_DECL_P (decl)
21253 && TREE_PUBLIC (decl)
21254 && !(VAR_P (decl) && DECL_REGISTER (decl))
21255 && die->die_tag != DW_TAG_member)
21256 add_linkage_name_raw (die, decl);
21257 }
21258
21259 /* Add a DW_AT_name attribute and source coordinate attribute for the
21260 given decl, but only if it actually has a name. */
21261
21262 static void
21263 add_name_and_src_coords_attributes (dw_die_ref die, tree decl,
21264 bool no_linkage_name)
21265 {
21266 tree decl_name;
21267
21268 decl_name = DECL_NAME (decl);
21269 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
21270 {
21271 const char *name = dwarf2_name (decl, 0);
21272 if (name)
21273 add_name_attribute (die, name);
21274 else
21275 add_desc_attribute (die, decl);
21276
21277 if (! DECL_ARTIFICIAL (decl))
21278 add_src_coords_attributes (die, decl);
21279
21280 if (!no_linkage_name)
21281 add_linkage_name (die, decl);
21282 }
21283 else
21284 add_desc_attribute (die, decl);
21285
21286 #ifdef VMS_DEBUGGING_INFO
21287 /* Get the function's name, as described by its RTL. This may be different
21288 from the DECL_NAME name used in the source file. */
21289 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_ASM_WRITTEN (decl))
21290 {
21291 add_AT_addr (die, DW_AT_VMS_rtnbeg_pd_address,
21292 XEXP (DECL_RTL (decl), 0), false);
21293 vec_safe_push (used_rtx_array, XEXP (DECL_RTL (decl), 0));
21294 }
21295 #endif /* VMS_DEBUGGING_INFO */
21296 }
21297
21298 /* Add VALUE as a DW_AT_discr_value attribute to DIE. */
21299
21300 static void
21301 add_discr_value (dw_die_ref die, dw_discr_value *value)
21302 {
21303 dw_attr_node attr;
21304
21305 attr.dw_attr = DW_AT_discr_value;
21306 attr.dw_attr_val.val_class = dw_val_class_discr_value;
21307 attr.dw_attr_val.val_entry = NULL;
21308 attr.dw_attr_val.v.val_discr_value.pos = value->pos;
21309 if (value->pos)
21310 attr.dw_attr_val.v.val_discr_value.v.uval = value->v.uval;
21311 else
21312 attr.dw_attr_val.v.val_discr_value.v.sval = value->v.sval;
21313 add_dwarf_attr (die, &attr);
21314 }
21315
21316 /* Add DISCR_LIST as a DW_AT_discr_list to DIE. */
21317
21318 static void
21319 add_discr_list (dw_die_ref die, dw_discr_list_ref discr_list)
21320 {
21321 dw_attr_node attr;
21322
21323 attr.dw_attr = DW_AT_discr_list;
21324 attr.dw_attr_val.val_class = dw_val_class_discr_list;
21325 attr.dw_attr_val.val_entry = NULL;
21326 attr.dw_attr_val.v.val_discr_list = discr_list;
21327 add_dwarf_attr (die, &attr);
21328 }
21329
21330 static inline dw_discr_list_ref
21331 AT_discr_list (dw_attr_node *attr)
21332 {
21333 return attr->dw_attr_val.v.val_discr_list;
21334 }
21335
21336 #ifdef VMS_DEBUGGING_INFO
21337 /* Output the debug main pointer die for VMS */
21338
21339 void
21340 dwarf2out_vms_debug_main_pointer (void)
21341 {
21342 char label[MAX_ARTIFICIAL_LABEL_BYTES];
21343 dw_die_ref die;
21344
21345 /* Allocate the VMS debug main subprogram die. */
21346 die = new_die_raw (DW_TAG_subprogram);
21347 add_name_attribute (die, VMS_DEBUG_MAIN_POINTER);
21348 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
21349 current_function_funcdef_no);
21350 add_AT_lbl_id (die, DW_AT_entry_pc, label);
21351
21352 /* Make it the first child of comp_unit_die (). */
21353 die->die_parent = comp_unit_die ();
21354 if (comp_unit_die ()->die_child)
21355 {
21356 die->die_sib = comp_unit_die ()->die_child->die_sib;
21357 comp_unit_die ()->die_child->die_sib = die;
21358 }
21359 else
21360 {
21361 die->die_sib = die;
21362 comp_unit_die ()->die_child = die;
21363 }
21364 }
21365 #endif /* VMS_DEBUGGING_INFO */
21366
21367 /* walk_tree helper function for uses_local_type, below. */
21368
21369 static tree
21370 uses_local_type_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
21371 {
21372 if (!TYPE_P (*tp))
21373 *walk_subtrees = 0;
21374 else
21375 {
21376 tree name = TYPE_NAME (*tp);
21377 if (name && DECL_P (name) && decl_function_context (name))
21378 return *tp;
21379 }
21380 return NULL_TREE;
21381 }
21382
21383 /* If TYPE involves a function-local type (including a local typedef to a
21384 non-local type), returns that type; otherwise returns NULL_TREE. */
21385
21386 static tree
21387 uses_local_type (tree type)
21388 {
21389 tree used = walk_tree_without_duplicates (&type, uses_local_type_r, NULL);
21390 return used;
21391 }
21392
21393 /* Return the DIE for the scope that immediately contains this type.
21394 Non-named types that do not involve a function-local type get global
21395 scope. Named types nested in namespaces or other types get their
21396 containing scope. All other types (i.e. function-local named types) get
21397 the current active scope. */
21398
21399 static dw_die_ref
21400 scope_die_for (tree t, dw_die_ref context_die)
21401 {
21402 dw_die_ref scope_die = NULL;
21403 tree containing_scope;
21404
21405 /* Non-types always go in the current scope. */
21406 gcc_assert (TYPE_P (t));
21407
21408 /* Use the scope of the typedef, rather than the scope of the type
21409 it refers to. */
21410 if (TYPE_NAME (t) && DECL_P (TYPE_NAME (t)))
21411 containing_scope = DECL_CONTEXT (TYPE_NAME (t));
21412 else
21413 containing_scope = TYPE_CONTEXT (t);
21414
21415 /* Use the containing namespace if there is one. */
21416 if (containing_scope && TREE_CODE (containing_scope) == NAMESPACE_DECL)
21417 {
21418 if (context_die == lookup_decl_die (containing_scope))
21419 /* OK */;
21420 else if (debug_info_level > DINFO_LEVEL_TERSE)
21421 context_die = get_context_die (containing_scope);
21422 else
21423 containing_scope = NULL_TREE;
21424 }
21425
21426 /* Ignore function type "scopes" from the C frontend. They mean that
21427 a tagged type is local to a parmlist of a function declarator, but
21428 that isn't useful to DWARF. */
21429 if (containing_scope && TREE_CODE (containing_scope) == FUNCTION_TYPE)
21430 containing_scope = NULL_TREE;
21431
21432 if (SCOPE_FILE_SCOPE_P (containing_scope))
21433 {
21434 /* If T uses a local type keep it local as well, to avoid references
21435 to function-local DIEs from outside the function. */
21436 if (current_function_decl && uses_local_type (t))
21437 scope_die = context_die;
21438 else
21439 scope_die = comp_unit_die ();
21440 }
21441 else if (TYPE_P (containing_scope))
21442 {
21443 /* For types, we can just look up the appropriate DIE. */
21444 if (debug_info_level > DINFO_LEVEL_TERSE)
21445 scope_die = get_context_die (containing_scope);
21446 else
21447 {
21448 scope_die = lookup_type_die_strip_naming_typedef (containing_scope);
21449 if (scope_die == NULL)
21450 scope_die = comp_unit_die ();
21451 }
21452 }
21453 else
21454 scope_die = context_die;
21455
21456 return scope_die;
21457 }
21458
21459 /* Returns nonzero if CONTEXT_DIE is internal to a function. */
21460
21461 static inline int
21462 local_scope_p (dw_die_ref context_die)
21463 {
21464 for (; context_die; context_die = context_die->die_parent)
21465 if (context_die->die_tag == DW_TAG_inlined_subroutine
21466 || context_die->die_tag == DW_TAG_subprogram)
21467 return 1;
21468
21469 return 0;
21470 }
21471
21472 /* Returns nonzero if CONTEXT_DIE is a class. */
21473
21474 static inline int
21475 class_scope_p (dw_die_ref context_die)
21476 {
21477 return (context_die
21478 && (context_die->die_tag == DW_TAG_structure_type
21479 || context_die->die_tag == DW_TAG_class_type
21480 || context_die->die_tag == DW_TAG_interface_type
21481 || context_die->die_tag == DW_TAG_union_type));
21482 }
21483
21484 /* Returns nonzero if CONTEXT_DIE is a class or namespace, for deciding
21485 whether or not to treat a DIE in this context as a declaration. */
21486
21487 static inline int
21488 class_or_namespace_scope_p (dw_die_ref context_die)
21489 {
21490 return (class_scope_p (context_die)
21491 || (context_die && context_die->die_tag == DW_TAG_namespace));
21492 }
21493
21494 /* Many forms of DIEs require a "type description" attribute. This
21495 routine locates the proper "type descriptor" die for the type given
21496 by 'type' plus any additional qualifiers given by 'cv_quals', and
21497 adds a DW_AT_type attribute below the given die. */
21498
21499 static void
21500 add_type_attribute (dw_die_ref object_die, tree type, int cv_quals,
21501 bool reverse, dw_die_ref context_die)
21502 {
21503 enum tree_code code = TREE_CODE (type);
21504 dw_die_ref type_die = NULL;
21505
21506 /* ??? If this type is an unnamed subrange type of an integral, floating-point
21507 or fixed-point type, use the inner type. This is because we have no
21508 support for unnamed types in base_type_die. This can happen if this is
21509 an Ada subrange type. Correct solution is emit a subrange type die. */
21510 if ((code == INTEGER_TYPE || code == REAL_TYPE || code == FIXED_POINT_TYPE)
21511 && TREE_TYPE (type) != 0 && TYPE_NAME (type) == 0)
21512 type = TREE_TYPE (type), code = TREE_CODE (type);
21513
21514 if (code == ERROR_MARK
21515 /* Handle a special case. For functions whose return type is void, we
21516 generate *no* type attribute. (Note that no object may have type
21517 `void', so this only applies to function return types). */
21518 || code == VOID_TYPE)
21519 return;
21520
21521 type_die = modified_type_die (type,
21522 cv_quals | TYPE_QUALS (type),
21523 reverse,
21524 context_die);
21525
21526 if (type_die != NULL)
21527 add_AT_die_ref (object_die, DW_AT_type, type_die);
21528 }
21529
21530 /* Given an object die, add the calling convention attribute for the
21531 function call type. */
21532 static void
21533 add_calling_convention_attribute (dw_die_ref subr_die, tree decl)
21534 {
21535 enum dwarf_calling_convention value = DW_CC_normal;
21536
21537 value = ((enum dwarf_calling_convention)
21538 targetm.dwarf_calling_convention (TREE_TYPE (decl)));
21539
21540 if (is_fortran ()
21541 && id_equal (DECL_ASSEMBLER_NAME (decl), "MAIN__"))
21542 {
21543 /* DWARF 2 doesn't provide a way to identify a program's source-level
21544 entry point. DW_AT_calling_convention attributes are only meant
21545 to describe functions' calling conventions. However, lacking a
21546 better way to signal the Fortran main program, we used this for
21547 a long time, following existing custom. Now, DWARF 4 has
21548 DW_AT_main_subprogram, which we add below, but some tools still
21549 rely on the old way, which we thus keep. */
21550 value = DW_CC_program;
21551
21552 if (dwarf_version >= 4 || !dwarf_strict)
21553 add_AT_flag (subr_die, DW_AT_main_subprogram, 1);
21554 }
21555
21556 /* Only add the attribute if the backend requests it, and
21557 is not DW_CC_normal. */
21558 if (value && (value != DW_CC_normal))
21559 add_AT_unsigned (subr_die, DW_AT_calling_convention, value);
21560 }
21561
21562 /* Given a tree pointer to a struct, class, union, or enum type node, return
21563 a pointer to the (string) tag name for the given type, or zero if the type
21564 was declared without a tag. */
21565
21566 static const char *
21567 type_tag (const_tree type)
21568 {
21569 const char *name = 0;
21570
21571 if (TYPE_NAME (type) != 0)
21572 {
21573 tree t = 0;
21574
21575 /* Find the IDENTIFIER_NODE for the type name. */
21576 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
21577 && !TYPE_NAMELESS (type))
21578 t = TYPE_NAME (type);
21579
21580 /* The g++ front end makes the TYPE_NAME of *each* tagged type point to
21581 a TYPE_DECL node, regardless of whether or not a `typedef' was
21582 involved. */
21583 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
21584 && ! DECL_IGNORED_P (TYPE_NAME (type)))
21585 {
21586 /* We want to be extra verbose. Don't call dwarf_name if
21587 DECL_NAME isn't set. The default hook for decl_printable_name
21588 doesn't like that, and in this context it's correct to return
21589 0, instead of "<anonymous>" or the like. */
21590 if (DECL_NAME (TYPE_NAME (type))
21591 && !DECL_NAMELESS (TYPE_NAME (type)))
21592 name = lang_hooks.dwarf_name (TYPE_NAME (type), 2);
21593 }
21594
21595 /* Now get the name as a string, or invent one. */
21596 if (!name && t != 0)
21597 name = IDENTIFIER_POINTER (t);
21598 }
21599
21600 return (name == 0 || *name == '\0') ? 0 : name;
21601 }
21602
21603 /* Return the type associated with a data member, make a special check
21604 for bit field types. */
21605
21606 static inline tree
21607 member_declared_type (const_tree member)
21608 {
21609 return (DECL_BIT_FIELD_TYPE (member)
21610 ? DECL_BIT_FIELD_TYPE (member) : TREE_TYPE (member));
21611 }
21612
21613 /* Get the decl's label, as described by its RTL. This may be different
21614 from the DECL_NAME name used in the source file. */
21615
21616 #if 0
21617 static const char *
21618 decl_start_label (tree decl)
21619 {
21620 rtx x;
21621 const char *fnname;
21622
21623 x = DECL_RTL (decl);
21624 gcc_assert (MEM_P (x));
21625
21626 x = XEXP (x, 0);
21627 gcc_assert (GET_CODE (x) == SYMBOL_REF);
21628
21629 fnname = XSTR (x, 0);
21630 return fnname;
21631 }
21632 #endif
21633 \f
21634 /* For variable-length arrays that have been previously generated, but
21635 may be incomplete due to missing subscript info, fill the subscript
21636 info. Return TRUE if this is one of those cases. */
21637 static bool
21638 fill_variable_array_bounds (tree type)
21639 {
21640 if (TREE_ASM_WRITTEN (type)
21641 && TREE_CODE (type) == ARRAY_TYPE
21642 && variably_modified_type_p (type, NULL))
21643 {
21644 dw_die_ref array_die = lookup_type_die (type);
21645 if (!array_die)
21646 return false;
21647 add_subscript_info (array_die, type, !is_ada ());
21648 return true;
21649 }
21650 return false;
21651 }
21652
21653 /* These routines generate the internal representation of the DIE's for
21654 the compilation unit. Debugging information is collected by walking
21655 the declaration trees passed in from dwarf2out_decl(). */
21656
21657 static void
21658 gen_array_type_die (tree type, dw_die_ref context_die)
21659 {
21660 dw_die_ref array_die;
21661
21662 /* GNU compilers represent multidimensional array types as sequences of one
21663 dimensional array types whose element types are themselves array types.
21664 We sometimes squish that down to a single array_type DIE with multiple
21665 subscripts in the Dwarf debugging info. The draft Dwarf specification
21666 say that we are allowed to do this kind of compression in C, because
21667 there is no difference between an array of arrays and a multidimensional
21668 array. We don't do this for Ada to remain as close as possible to the
21669 actual representation, which is especially important against the language
21670 flexibilty wrt arrays of variable size. */
21671
21672 bool collapse_nested_arrays = !is_ada ();
21673
21674 if (fill_variable_array_bounds (type))
21675 return;
21676
21677 dw_die_ref scope_die = scope_die_for (type, context_die);
21678 tree element_type;
21679
21680 /* Emit DW_TAG_string_type for Fortran character types (with kind 1 only, as
21681 DW_TAG_string_type doesn't have DW_AT_type attribute). */
21682 if (TYPE_STRING_FLAG (type)
21683 && TREE_CODE (type) == ARRAY_TYPE
21684 && is_fortran ()
21685 && TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (char_type_node))
21686 {
21687 HOST_WIDE_INT size;
21688
21689 array_die = new_die (DW_TAG_string_type, scope_die, type);
21690 add_name_attribute (array_die, type_tag (type));
21691 equate_type_number_to_die (type, array_die);
21692 size = int_size_in_bytes (type);
21693 if (size >= 0)
21694 add_AT_unsigned (array_die, DW_AT_byte_size, size);
21695 /* ??? We can't annotate types late, but for LTO we may not
21696 generate a location early either (gfortran.dg/save_6.f90). */
21697 else if (! (early_dwarf && (flag_generate_lto || flag_generate_offload))
21698 && TYPE_DOMAIN (type) != NULL_TREE
21699 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != NULL_TREE)
21700 {
21701 tree szdecl = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
21702 tree rszdecl = szdecl;
21703
21704 size = int_size_in_bytes (TREE_TYPE (szdecl));
21705 if (!DECL_P (szdecl))
21706 {
21707 if (TREE_CODE (szdecl) == INDIRECT_REF
21708 && DECL_P (TREE_OPERAND (szdecl, 0)))
21709 {
21710 rszdecl = TREE_OPERAND (szdecl, 0);
21711 if (int_size_in_bytes (TREE_TYPE (rszdecl))
21712 != DWARF2_ADDR_SIZE)
21713 size = 0;
21714 }
21715 else
21716 size = 0;
21717 }
21718 if (size > 0)
21719 {
21720 dw_loc_list_ref loc
21721 = loc_list_from_tree (rszdecl, szdecl == rszdecl ? 2 : 0,
21722 NULL);
21723 if (loc)
21724 {
21725 add_AT_location_description (array_die, DW_AT_string_length,
21726 loc);
21727 if (size != DWARF2_ADDR_SIZE)
21728 add_AT_unsigned (array_die, dwarf_version >= 5
21729 ? DW_AT_string_length_byte_size
21730 : DW_AT_byte_size, size);
21731 }
21732 }
21733 }
21734 return;
21735 }
21736
21737 array_die = new_die (DW_TAG_array_type, scope_die, type);
21738 add_name_attribute (array_die, type_tag (type));
21739 equate_type_number_to_die (type, array_die);
21740
21741 if (TREE_CODE (type) == VECTOR_TYPE)
21742 add_AT_flag (array_die, DW_AT_GNU_vector, 1);
21743
21744 /* For Fortran multidimensional arrays use DW_ORD_col_major ordering. */
21745 if (is_fortran ()
21746 && TREE_CODE (type) == ARRAY_TYPE
21747 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE
21748 && !TYPE_STRING_FLAG (TREE_TYPE (type)))
21749 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21750
21751 #if 0
21752 /* We default the array ordering. Debuggers will probably do the right
21753 things even if DW_AT_ordering is not present. It's not even an issue
21754 until we start to get into multidimensional arrays anyway. If a debugger
21755 is ever caught doing the Wrong Thing for multi-dimensional arrays,
21756 then we'll have to put the DW_AT_ordering attribute back in. (But if
21757 and when we find out that we need to put these in, we will only do so
21758 for multidimensional arrays. */
21759 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21760 #endif
21761
21762 if (TREE_CODE (type) == VECTOR_TYPE)
21763 {
21764 /* For VECTOR_TYPEs we use an array die with appropriate bounds. */
21765 dw_die_ref subrange_die = new_die (DW_TAG_subrange_type, array_die, NULL);
21766 add_bound_info (subrange_die, DW_AT_lower_bound, size_zero_node, NULL);
21767 add_bound_info (subrange_die, DW_AT_upper_bound,
21768 size_int (TYPE_VECTOR_SUBPARTS (type) - 1), NULL);
21769 }
21770 else
21771 add_subscript_info (array_die, type, collapse_nested_arrays);
21772
21773 /* Add representation of the type of the elements of this array type and
21774 emit the corresponding DIE if we haven't done it already. */
21775 element_type = TREE_TYPE (type);
21776 if (collapse_nested_arrays)
21777 while (TREE_CODE (element_type) == ARRAY_TYPE)
21778 {
21779 if (TYPE_STRING_FLAG (element_type) && is_fortran ())
21780 break;
21781 element_type = TREE_TYPE (element_type);
21782 }
21783
21784 add_type_attribute (array_die, element_type, TYPE_UNQUALIFIED,
21785 TREE_CODE (type) == ARRAY_TYPE
21786 && TYPE_REVERSE_STORAGE_ORDER (type),
21787 context_die);
21788
21789 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21790 if (TYPE_ARTIFICIAL (type))
21791 add_AT_flag (array_die, DW_AT_artificial, 1);
21792
21793 if (get_AT (array_die, DW_AT_name))
21794 add_pubtype (type, array_die);
21795
21796 add_alignment_attribute (array_die, type);
21797 }
21798
21799 /* This routine generates DIE for array with hidden descriptor, details
21800 are filled into *info by a langhook. */
21801
21802 static void
21803 gen_descr_array_type_die (tree type, struct array_descr_info *info,
21804 dw_die_ref context_die)
21805 {
21806 const dw_die_ref scope_die = scope_die_for (type, context_die);
21807 const dw_die_ref array_die = new_die (DW_TAG_array_type, scope_die, type);
21808 struct loc_descr_context context = { type, info->base_decl, NULL,
21809 false, false };
21810 enum dwarf_tag subrange_tag = DW_TAG_subrange_type;
21811 int dim;
21812
21813 add_name_attribute (array_die, type_tag (type));
21814 equate_type_number_to_die (type, array_die);
21815
21816 if (info->ndimensions > 1)
21817 switch (info->ordering)
21818 {
21819 case array_descr_ordering_row_major:
21820 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21821 break;
21822 case array_descr_ordering_column_major:
21823 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21824 break;
21825 default:
21826 break;
21827 }
21828
21829 if (dwarf_version >= 3 || !dwarf_strict)
21830 {
21831 if (info->data_location)
21832 add_scalar_info (array_die, DW_AT_data_location, info->data_location,
21833 dw_scalar_form_exprloc, &context);
21834 if (info->associated)
21835 add_scalar_info (array_die, DW_AT_associated, info->associated,
21836 dw_scalar_form_constant
21837 | dw_scalar_form_exprloc
21838 | dw_scalar_form_reference, &context);
21839 if (info->allocated)
21840 add_scalar_info (array_die, DW_AT_allocated, info->allocated,
21841 dw_scalar_form_constant
21842 | dw_scalar_form_exprloc
21843 | dw_scalar_form_reference, &context);
21844 if (info->stride)
21845 {
21846 const enum dwarf_attribute attr
21847 = (info->stride_in_bits) ? DW_AT_bit_stride : DW_AT_byte_stride;
21848 const int forms
21849 = (info->stride_in_bits)
21850 ? dw_scalar_form_constant
21851 : (dw_scalar_form_constant
21852 | dw_scalar_form_exprloc
21853 | dw_scalar_form_reference);
21854
21855 add_scalar_info (array_die, attr, info->stride, forms, &context);
21856 }
21857 }
21858 if (dwarf_version >= 5)
21859 {
21860 if (info->rank)
21861 {
21862 add_scalar_info (array_die, DW_AT_rank, info->rank,
21863 dw_scalar_form_constant
21864 | dw_scalar_form_exprloc, &context);
21865 subrange_tag = DW_TAG_generic_subrange;
21866 context.placeholder_arg = true;
21867 }
21868 }
21869
21870 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21871
21872 for (dim = 0; dim < info->ndimensions; dim++)
21873 {
21874 dw_die_ref subrange_die = new_die (subrange_tag, array_die, NULL);
21875
21876 if (info->dimen[dim].bounds_type)
21877 add_type_attribute (subrange_die,
21878 info->dimen[dim].bounds_type, TYPE_UNQUALIFIED,
21879 false, context_die);
21880 if (info->dimen[dim].lower_bound)
21881 add_bound_info (subrange_die, DW_AT_lower_bound,
21882 info->dimen[dim].lower_bound, &context);
21883 if (info->dimen[dim].upper_bound)
21884 add_bound_info (subrange_die, DW_AT_upper_bound,
21885 info->dimen[dim].upper_bound, &context);
21886 if ((dwarf_version >= 3 || !dwarf_strict) && info->dimen[dim].stride)
21887 add_scalar_info (subrange_die, DW_AT_byte_stride,
21888 info->dimen[dim].stride,
21889 dw_scalar_form_constant
21890 | dw_scalar_form_exprloc
21891 | dw_scalar_form_reference,
21892 &context);
21893 }
21894
21895 gen_type_die (info->element_type, context_die);
21896 add_type_attribute (array_die, info->element_type, TYPE_UNQUALIFIED,
21897 TREE_CODE (type) == ARRAY_TYPE
21898 && TYPE_REVERSE_STORAGE_ORDER (type),
21899 context_die);
21900
21901 if (get_AT (array_die, DW_AT_name))
21902 add_pubtype (type, array_die);
21903
21904 add_alignment_attribute (array_die, type);
21905 }
21906
21907 #if 0
21908 static void
21909 gen_entry_point_die (tree decl, dw_die_ref context_die)
21910 {
21911 tree origin = decl_ultimate_origin (decl);
21912 dw_die_ref decl_die = new_die (DW_TAG_entry_point, context_die, decl);
21913
21914 if (origin != NULL)
21915 add_abstract_origin_attribute (decl_die, origin);
21916 else
21917 {
21918 add_name_and_src_coords_attributes (decl_die, decl);
21919 add_type_attribute (decl_die, TREE_TYPE (TREE_TYPE (decl)),
21920 TYPE_UNQUALIFIED, false, context_die);
21921 }
21922
21923 if (DECL_ABSTRACT_P (decl))
21924 equate_decl_number_to_die (decl, decl_die);
21925 else
21926 add_AT_lbl_id (decl_die, DW_AT_low_pc, decl_start_label (decl));
21927 }
21928 #endif
21929
21930 /* Walk through the list of incomplete types again, trying once more to
21931 emit full debugging info for them. */
21932
21933 static void
21934 retry_incomplete_types (void)
21935 {
21936 set_early_dwarf s;
21937 int i;
21938
21939 for (i = vec_safe_length (incomplete_types) - 1; i >= 0; i--)
21940 if (should_emit_struct_debug ((*incomplete_types)[i], DINFO_USAGE_DIR_USE))
21941 gen_type_die ((*incomplete_types)[i], comp_unit_die ());
21942 vec_safe_truncate (incomplete_types, 0);
21943 }
21944
21945 /* Determine what tag to use for a record type. */
21946
21947 static enum dwarf_tag
21948 record_type_tag (tree type)
21949 {
21950 if (! lang_hooks.types.classify_record)
21951 return DW_TAG_structure_type;
21952
21953 switch (lang_hooks.types.classify_record (type))
21954 {
21955 case RECORD_IS_STRUCT:
21956 return DW_TAG_structure_type;
21957
21958 case RECORD_IS_CLASS:
21959 return DW_TAG_class_type;
21960
21961 case RECORD_IS_INTERFACE:
21962 if (dwarf_version >= 3 || !dwarf_strict)
21963 return DW_TAG_interface_type;
21964 return DW_TAG_structure_type;
21965
21966 default:
21967 gcc_unreachable ();
21968 }
21969 }
21970
21971 /* Generate a DIE to represent an enumeration type. Note that these DIEs
21972 include all of the information about the enumeration values also. Each
21973 enumerated type name/value is listed as a child of the enumerated type
21974 DIE. */
21975
21976 static dw_die_ref
21977 gen_enumeration_type_die (tree type, dw_die_ref context_die)
21978 {
21979 dw_die_ref type_die = lookup_type_die (type);
21980 dw_die_ref orig_type_die = type_die;
21981
21982 if (type_die == NULL)
21983 {
21984 type_die = new_die (DW_TAG_enumeration_type,
21985 scope_die_for (type, context_die), type);
21986 equate_type_number_to_die (type, type_die);
21987 add_name_attribute (type_die, type_tag (type));
21988 if ((dwarf_version >= 4 || !dwarf_strict)
21989 && ENUM_IS_SCOPED (type))
21990 add_AT_flag (type_die, DW_AT_enum_class, 1);
21991 if (ENUM_IS_OPAQUE (type) && TYPE_SIZE (type))
21992 add_AT_flag (type_die, DW_AT_declaration, 1);
21993 if (!dwarf_strict)
21994 add_AT_unsigned (type_die, DW_AT_encoding,
21995 TYPE_UNSIGNED (type)
21996 ? DW_ATE_unsigned
21997 : DW_ATE_signed);
21998 }
21999 else if (! TYPE_SIZE (type) || ENUM_IS_OPAQUE (type))
22000 return type_die;
22001 else
22002 remove_AT (type_die, DW_AT_declaration);
22003
22004 /* Handle a GNU C/C++ extension, i.e. incomplete enum types. If the
22005 given enum type is incomplete, do not generate the DW_AT_byte_size
22006 attribute or the DW_AT_element_list attribute. */
22007 if (TYPE_SIZE (type))
22008 {
22009 tree link;
22010
22011 if (!ENUM_IS_OPAQUE (type))
22012 TREE_ASM_WRITTEN (type) = 1;
22013 if (!orig_type_die || !get_AT (type_die, DW_AT_byte_size))
22014 add_byte_size_attribute (type_die, type);
22015 if (!orig_type_die || !get_AT (type_die, DW_AT_alignment))
22016 add_alignment_attribute (type_die, type);
22017 if ((dwarf_version >= 3 || !dwarf_strict)
22018 && (!orig_type_die || !get_AT (type_die, DW_AT_type)))
22019 {
22020 tree underlying = lang_hooks.types.enum_underlying_base_type (type);
22021 add_type_attribute (type_die, underlying, TYPE_UNQUALIFIED, false,
22022 context_die);
22023 }
22024 if (TYPE_STUB_DECL (type) != NULL_TREE)
22025 {
22026 if (!orig_type_die || !get_AT (type_die, DW_AT_decl_file))
22027 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
22028 if (!orig_type_die || !get_AT (type_die, DW_AT_accessibility))
22029 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
22030 }
22031
22032 /* If the first reference to this type was as the return type of an
22033 inline function, then it may not have a parent. Fix this now. */
22034 if (type_die->die_parent == NULL)
22035 add_child_die (scope_die_for (type, context_die), type_die);
22036
22037 for (link = TYPE_VALUES (type);
22038 link != NULL; link = TREE_CHAIN (link))
22039 {
22040 dw_die_ref enum_die = new_die (DW_TAG_enumerator, type_die, link);
22041 tree value = TREE_VALUE (link);
22042
22043 gcc_assert (!ENUM_IS_OPAQUE (type));
22044 add_name_attribute (enum_die,
22045 IDENTIFIER_POINTER (TREE_PURPOSE (link)));
22046
22047 if (TREE_CODE (value) == CONST_DECL)
22048 value = DECL_INITIAL (value);
22049
22050 if (simple_type_size_in_bits (TREE_TYPE (value))
22051 <= HOST_BITS_PER_WIDE_INT || tree_fits_shwi_p (value))
22052 {
22053 /* For constant forms created by add_AT_unsigned DWARF
22054 consumers (GDB, elfutils, etc.) always zero extend
22055 the value. Only when the actual value is negative
22056 do we need to use add_AT_int to generate a constant
22057 form that can represent negative values. */
22058 HOST_WIDE_INT val = TREE_INT_CST_LOW (value);
22059 if (TYPE_UNSIGNED (TREE_TYPE (value)) || val >= 0)
22060 add_AT_unsigned (enum_die, DW_AT_const_value,
22061 (unsigned HOST_WIDE_INT) val);
22062 else
22063 add_AT_int (enum_die, DW_AT_const_value, val);
22064 }
22065 else
22066 /* Enumeration constants may be wider than HOST_WIDE_INT. Handle
22067 that here. TODO: This should be re-worked to use correct
22068 signed/unsigned double tags for all cases. */
22069 add_AT_wide (enum_die, DW_AT_const_value, wi::to_wide (value));
22070 }
22071
22072 add_gnat_descriptive_type_attribute (type_die, type, context_die);
22073 if (TYPE_ARTIFICIAL (type)
22074 && (!orig_type_die || !get_AT (type_die, DW_AT_artificial)))
22075 add_AT_flag (type_die, DW_AT_artificial, 1);
22076 }
22077 else
22078 add_AT_flag (type_die, DW_AT_declaration, 1);
22079
22080 add_pubtype (type, type_die);
22081
22082 return type_die;
22083 }
22084
22085 /* Generate a DIE to represent either a real live formal parameter decl or to
22086 represent just the type of some formal parameter position in some function
22087 type.
22088
22089 Note that this routine is a bit unusual because its argument may be a
22090 ..._DECL node (i.e. either a PARM_DECL or perhaps a VAR_DECL which
22091 represents an inlining of some PARM_DECL) or else some sort of a ..._TYPE
22092 node. If it's the former then this function is being called to output a
22093 DIE to represent a formal parameter object (or some inlining thereof). If
22094 it's the latter, then this function is only being called to output a
22095 DW_TAG_formal_parameter DIE to stand as a placeholder for some formal
22096 argument type of some subprogram type.
22097 If EMIT_NAME_P is true, name and source coordinate attributes
22098 are emitted. */
22099
22100 static dw_die_ref
22101 gen_formal_parameter_die (tree node, tree origin, bool emit_name_p,
22102 dw_die_ref context_die)
22103 {
22104 tree node_or_origin = node ? node : origin;
22105 tree ultimate_origin;
22106 dw_die_ref parm_die = NULL;
22107
22108 if (DECL_P (node_or_origin))
22109 {
22110 parm_die = lookup_decl_die (node);
22111
22112 /* If the contexts differ, we may not be talking about the same
22113 thing.
22114 ??? When in LTO the DIE parent is the "abstract" copy and the
22115 context_die is the specification "copy". But this whole block
22116 should eventually be no longer needed. */
22117 if (parm_die && parm_die->die_parent != context_die && !in_lto_p)
22118 {
22119 if (!DECL_ABSTRACT_P (node))
22120 {
22121 /* This can happen when creating an inlined instance, in
22122 which case we need to create a new DIE that will get
22123 annotated with DW_AT_abstract_origin. */
22124 parm_die = NULL;
22125 }
22126 else
22127 gcc_unreachable ();
22128 }
22129
22130 if (parm_die && parm_die->die_parent == NULL)
22131 {
22132 /* Check that parm_die already has the right attributes that
22133 we would have added below. If any attributes are
22134 missing, fall through to add them. */
22135 if (! DECL_ABSTRACT_P (node_or_origin)
22136 && !get_AT (parm_die, DW_AT_location)
22137 && !get_AT (parm_die, DW_AT_const_value))
22138 /* We are missing location info, and are about to add it. */
22139 ;
22140 else
22141 {
22142 add_child_die (context_die, parm_die);
22143 return parm_die;
22144 }
22145 }
22146 }
22147
22148 /* If we have a previously generated DIE, use it, unless this is an
22149 concrete instance (origin != NULL), in which case we need a new
22150 DIE with a corresponding DW_AT_abstract_origin. */
22151 bool reusing_die;
22152 if (parm_die && origin == NULL)
22153 reusing_die = true;
22154 else
22155 {
22156 parm_die = new_die (DW_TAG_formal_parameter, context_die, node);
22157 reusing_die = false;
22158 }
22159
22160 switch (TREE_CODE_CLASS (TREE_CODE (node_or_origin)))
22161 {
22162 case tcc_declaration:
22163 ultimate_origin = decl_ultimate_origin (node_or_origin);
22164 if (node || ultimate_origin)
22165 origin = ultimate_origin;
22166
22167 if (reusing_die)
22168 goto add_location;
22169
22170 if (origin != NULL)
22171 add_abstract_origin_attribute (parm_die, origin);
22172 else if (emit_name_p)
22173 add_name_and_src_coords_attributes (parm_die, node);
22174 if (origin == NULL
22175 || (! DECL_ABSTRACT_P (node_or_origin)
22176 && variably_modified_type_p (TREE_TYPE (node_or_origin),
22177 decl_function_context
22178 (node_or_origin))))
22179 {
22180 tree type = TREE_TYPE (node_or_origin);
22181 if (decl_by_reference_p (node_or_origin))
22182 add_type_attribute (parm_die, TREE_TYPE (type),
22183 TYPE_UNQUALIFIED,
22184 false, context_die);
22185 else
22186 add_type_attribute (parm_die, type,
22187 decl_quals (node_or_origin),
22188 false, context_die);
22189 }
22190 if (origin == NULL && DECL_ARTIFICIAL (node))
22191 add_AT_flag (parm_die, DW_AT_artificial, 1);
22192 add_location:
22193 if (node && node != origin)
22194 equate_decl_number_to_die (node, parm_die);
22195 if (! DECL_ABSTRACT_P (node_or_origin))
22196 add_location_or_const_value_attribute (parm_die, node_or_origin,
22197 node == NULL);
22198
22199 break;
22200
22201 case tcc_type:
22202 /* We were called with some kind of a ..._TYPE node. */
22203 add_type_attribute (parm_die, node_or_origin, TYPE_UNQUALIFIED, false,
22204 context_die);
22205 break;
22206
22207 default:
22208 gcc_unreachable ();
22209 }
22210
22211 return parm_die;
22212 }
22213
22214 /* Generate and return a DW_TAG_GNU_formal_parameter_pack. Also generate
22215 children DW_TAG_formal_parameter DIEs representing the arguments of the
22216 parameter pack.
22217
22218 PARM_PACK must be a function parameter pack.
22219 PACK_ARG is the first argument of the parameter pack. Its TREE_CHAIN
22220 must point to the subsequent arguments of the function PACK_ARG belongs to.
22221 SUBR_DIE is the DIE of the function PACK_ARG belongs to.
22222 If NEXT_ARG is non NULL, *NEXT_ARG is set to the function argument
22223 following the last one for which a DIE was generated. */
22224
22225 static dw_die_ref
22226 gen_formal_parameter_pack_die (tree parm_pack,
22227 tree pack_arg,
22228 dw_die_ref subr_die,
22229 tree *next_arg)
22230 {
22231 tree arg;
22232 dw_die_ref parm_pack_die;
22233
22234 gcc_assert (parm_pack
22235 && lang_hooks.function_parameter_pack_p (parm_pack)
22236 && subr_die);
22237
22238 parm_pack_die = new_die (DW_TAG_GNU_formal_parameter_pack, subr_die, parm_pack);
22239 add_src_coords_attributes (parm_pack_die, parm_pack);
22240
22241 for (arg = pack_arg; arg; arg = DECL_CHAIN (arg))
22242 {
22243 if (! lang_hooks.decls.function_parm_expanded_from_pack_p (arg,
22244 parm_pack))
22245 break;
22246 gen_formal_parameter_die (arg, NULL,
22247 false /* Don't emit name attribute. */,
22248 parm_pack_die);
22249 }
22250 if (next_arg)
22251 *next_arg = arg;
22252 return parm_pack_die;
22253 }
22254
22255 /* Generate a special type of DIE used as a stand-in for a trailing ellipsis
22256 at the end of an (ANSI prototyped) formal parameters list. */
22257
22258 static void
22259 gen_unspecified_parameters_die (tree decl_or_type, dw_die_ref context_die)
22260 {
22261 new_die (DW_TAG_unspecified_parameters, context_die, decl_or_type);
22262 }
22263
22264 /* Generate a list of nameless DW_TAG_formal_parameter DIEs (and perhaps a
22265 DW_TAG_unspecified_parameters DIE) to represent the types of the formal
22266 parameters as specified in some function type specification (except for
22267 those which appear as part of a function *definition*). */
22268
22269 static void
22270 gen_formal_types_die (tree function_or_method_type, dw_die_ref context_die)
22271 {
22272 tree link;
22273 tree formal_type = NULL;
22274 tree first_parm_type;
22275 tree arg;
22276
22277 if (TREE_CODE (function_or_method_type) == FUNCTION_DECL)
22278 {
22279 arg = DECL_ARGUMENTS (function_or_method_type);
22280 function_or_method_type = TREE_TYPE (function_or_method_type);
22281 }
22282 else
22283 arg = NULL_TREE;
22284
22285 first_parm_type = TYPE_ARG_TYPES (function_or_method_type);
22286
22287 /* Make our first pass over the list of formal parameter types and output a
22288 DW_TAG_formal_parameter DIE for each one. */
22289 for (link = first_parm_type; link; )
22290 {
22291 dw_die_ref parm_die;
22292
22293 formal_type = TREE_VALUE (link);
22294 if (formal_type == void_type_node)
22295 break;
22296
22297 /* Output a (nameless) DIE to represent the formal parameter itself. */
22298 parm_die = gen_formal_parameter_die (formal_type, NULL,
22299 true /* Emit name attribute. */,
22300 context_die);
22301 if (TREE_CODE (function_or_method_type) == METHOD_TYPE
22302 && link == first_parm_type)
22303 {
22304 add_AT_flag (parm_die, DW_AT_artificial, 1);
22305 if (dwarf_version >= 3 || !dwarf_strict)
22306 add_AT_die_ref (context_die, DW_AT_object_pointer, parm_die);
22307 }
22308 else if (arg && DECL_ARTIFICIAL (arg))
22309 add_AT_flag (parm_die, DW_AT_artificial, 1);
22310
22311 link = TREE_CHAIN (link);
22312 if (arg)
22313 arg = DECL_CHAIN (arg);
22314 }
22315
22316 /* If this function type has an ellipsis, add a
22317 DW_TAG_unspecified_parameters DIE to the end of the parameter list. */
22318 if (formal_type != void_type_node)
22319 gen_unspecified_parameters_die (function_or_method_type, context_die);
22320
22321 /* Make our second (and final) pass over the list of formal parameter types
22322 and output DIEs to represent those types (as necessary). */
22323 for (link = TYPE_ARG_TYPES (function_or_method_type);
22324 link && TREE_VALUE (link);
22325 link = TREE_CHAIN (link))
22326 gen_type_die (TREE_VALUE (link), context_die);
22327 }
22328
22329 /* We want to generate the DIE for TYPE so that we can generate the
22330 die for MEMBER, which has been defined; we will need to refer back
22331 to the member declaration nested within TYPE. If we're trying to
22332 generate minimal debug info for TYPE, processing TYPE won't do the
22333 trick; we need to attach the member declaration by hand. */
22334
22335 static void
22336 gen_type_die_for_member (tree type, tree member, dw_die_ref context_die)
22337 {
22338 gen_type_die (type, context_die);
22339
22340 /* If we're trying to avoid duplicate debug info, we may not have
22341 emitted the member decl for this function. Emit it now. */
22342 if (TYPE_STUB_DECL (type)
22343 && TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))
22344 && ! lookup_decl_die (member))
22345 {
22346 dw_die_ref type_die;
22347 gcc_assert (!decl_ultimate_origin (member));
22348
22349 type_die = lookup_type_die_strip_naming_typedef (type);
22350 if (TREE_CODE (member) == FUNCTION_DECL)
22351 gen_subprogram_die (member, type_die);
22352 else if (TREE_CODE (member) == FIELD_DECL)
22353 {
22354 /* Ignore the nameless fields that are used to skip bits but handle
22355 C++ anonymous unions and structs. */
22356 if (DECL_NAME (member) != NULL_TREE
22357 || TREE_CODE (TREE_TYPE (member)) == UNION_TYPE
22358 || TREE_CODE (TREE_TYPE (member)) == RECORD_TYPE)
22359 {
22360 struct vlr_context vlr_ctx = {
22361 DECL_CONTEXT (member), /* struct_type */
22362 NULL_TREE /* variant_part_offset */
22363 };
22364 gen_type_die (member_declared_type (member), type_die);
22365 gen_field_die (member, &vlr_ctx, type_die);
22366 }
22367 }
22368 else
22369 gen_variable_die (member, NULL_TREE, type_die);
22370 }
22371 }
22372 \f
22373 /* Forward declare these functions, because they are mutually recursive
22374 with their set_block_* pairing functions. */
22375 static void set_decl_origin_self (tree);
22376
22377 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
22378 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
22379 that it points to the node itself, thus indicating that the node is its
22380 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
22381 the given node is NULL, recursively descend the decl/block tree which
22382 it is the root of, and for each other ..._DECL or BLOCK node contained
22383 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
22384 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
22385 values to point to themselves. */
22386
22387 static void
22388 set_block_origin_self (tree stmt)
22389 {
22390 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
22391 {
22392 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
22393
22394 {
22395 tree local_decl;
22396
22397 for (local_decl = BLOCK_VARS (stmt);
22398 local_decl != NULL_TREE;
22399 local_decl = DECL_CHAIN (local_decl))
22400 /* Do not recurse on nested functions since the inlining status
22401 of parent and child can be different as per the DWARF spec. */
22402 if (TREE_CODE (local_decl) != FUNCTION_DECL
22403 && !DECL_EXTERNAL (local_decl))
22404 set_decl_origin_self (local_decl);
22405 }
22406
22407 {
22408 tree subblock;
22409
22410 for (subblock = BLOCK_SUBBLOCKS (stmt);
22411 subblock != NULL_TREE;
22412 subblock = BLOCK_CHAIN (subblock))
22413 set_block_origin_self (subblock); /* Recurse. */
22414 }
22415 }
22416 }
22417
22418 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
22419 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
22420 node to so that it points to the node itself, thus indicating that the
22421 node represents its own (abstract) origin. Additionally, if the
22422 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
22423 the decl/block tree of which the given node is the root of, and for
22424 each other ..._DECL or BLOCK node contained therein whose
22425 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
22426 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
22427 point to themselves. */
22428
22429 static void
22430 set_decl_origin_self (tree decl)
22431 {
22432 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
22433 {
22434 DECL_ABSTRACT_ORIGIN (decl) = decl;
22435 if (TREE_CODE (decl) == FUNCTION_DECL)
22436 {
22437 tree arg;
22438
22439 for (arg = DECL_ARGUMENTS (decl); arg; arg = DECL_CHAIN (arg))
22440 DECL_ABSTRACT_ORIGIN (arg) = arg;
22441 if (DECL_INITIAL (decl) != NULL_TREE
22442 && DECL_INITIAL (decl) != error_mark_node)
22443 set_block_origin_self (DECL_INITIAL (decl));
22444 }
22445 }
22446 }
22447 \f
22448 /* Mark the early DIE for DECL as the abstract instance. */
22449
22450 static void
22451 dwarf2out_abstract_function (tree decl)
22452 {
22453 dw_die_ref old_die;
22454
22455 /* Make sure we have the actual abstract inline, not a clone. */
22456 decl = DECL_ORIGIN (decl);
22457
22458 if (DECL_IGNORED_P (decl))
22459 return;
22460
22461 /* In LTO we're all set. We already created abstract instances
22462 early and we want to avoid creating a concrete instance of that
22463 if we don't output it. */
22464 if (in_lto_p)
22465 return;
22466
22467 old_die = lookup_decl_die (decl);
22468 gcc_assert (old_die != NULL);
22469 if (get_AT (old_die, DW_AT_inline))
22470 /* We've already generated the abstract instance. */
22471 return;
22472
22473 /* Go ahead and put DW_AT_inline on the DIE. */
22474 if (DECL_DECLARED_INLINE_P (decl))
22475 {
22476 if (cgraph_function_possibly_inlined_p (decl))
22477 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_inlined);
22478 else
22479 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_not_inlined);
22480 }
22481 else
22482 {
22483 if (cgraph_function_possibly_inlined_p (decl))
22484 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_inlined);
22485 else
22486 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_not_inlined);
22487 }
22488
22489 if (DECL_DECLARED_INLINE_P (decl)
22490 && lookup_attribute ("artificial", DECL_ATTRIBUTES (decl)))
22491 add_AT_flag (old_die, DW_AT_artificial, 1);
22492
22493 set_decl_origin_self (decl);
22494 }
22495
22496 /* Helper function of premark_used_types() which gets called through
22497 htab_traverse.
22498
22499 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22500 marked as unused by prune_unused_types. */
22501
22502 bool
22503 premark_used_types_helper (tree const &type, void *)
22504 {
22505 dw_die_ref die;
22506
22507 die = lookup_type_die (type);
22508 if (die != NULL)
22509 die->die_perennial_p = 1;
22510 return true;
22511 }
22512
22513 /* Helper function of premark_types_used_by_global_vars which gets called
22514 through htab_traverse.
22515
22516 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22517 marked as unused by prune_unused_types. The DIE of the type is marked
22518 only if the global variable using the type will actually be emitted. */
22519
22520 int
22521 premark_types_used_by_global_vars_helper (types_used_by_vars_entry **slot,
22522 void *)
22523 {
22524 struct types_used_by_vars_entry *entry;
22525 dw_die_ref die;
22526
22527 entry = (struct types_used_by_vars_entry *) *slot;
22528 gcc_assert (entry->type != NULL
22529 && entry->var_decl != NULL);
22530 die = lookup_type_die (entry->type);
22531 if (die)
22532 {
22533 /* Ask cgraph if the global variable really is to be emitted.
22534 If yes, then we'll keep the DIE of ENTRY->TYPE. */
22535 varpool_node *node = varpool_node::get (entry->var_decl);
22536 if (node && node->definition)
22537 {
22538 die->die_perennial_p = 1;
22539 /* Keep the parent DIEs as well. */
22540 while ((die = die->die_parent) && die->die_perennial_p == 0)
22541 die->die_perennial_p = 1;
22542 }
22543 }
22544 return 1;
22545 }
22546
22547 /* Mark all members of used_types_hash as perennial. */
22548
22549 static void
22550 premark_used_types (struct function *fun)
22551 {
22552 if (fun && fun->used_types_hash)
22553 fun->used_types_hash->traverse<void *, premark_used_types_helper> (NULL);
22554 }
22555
22556 /* Mark all members of types_used_by_vars_entry as perennial. */
22557
22558 static void
22559 premark_types_used_by_global_vars (void)
22560 {
22561 if (types_used_by_vars_hash)
22562 types_used_by_vars_hash
22563 ->traverse<void *, premark_types_used_by_global_vars_helper> (NULL);
22564 }
22565
22566 /* Generate a DW_TAG_call_site DIE in function DECL under SUBR_DIE
22567 for CA_LOC call arg loc node. */
22568
22569 static dw_die_ref
22570 gen_call_site_die (tree decl, dw_die_ref subr_die,
22571 struct call_arg_loc_node *ca_loc)
22572 {
22573 dw_die_ref stmt_die = NULL, die;
22574 tree block = ca_loc->block;
22575
22576 while (block
22577 && block != DECL_INITIAL (decl)
22578 && TREE_CODE (block) == BLOCK)
22579 {
22580 stmt_die = lookup_block_die (block);
22581 if (stmt_die)
22582 break;
22583 block = BLOCK_SUPERCONTEXT (block);
22584 }
22585 if (stmt_die == NULL)
22586 stmt_die = subr_die;
22587 die = new_die (dwarf_TAG (DW_TAG_call_site), stmt_die, NULL_TREE);
22588 add_AT_lbl_id (die, dwarf_AT (DW_AT_call_return_pc), ca_loc->label);
22589 if (ca_loc->tail_call_p)
22590 add_AT_flag (die, dwarf_AT (DW_AT_call_tail_call), 1);
22591 if (ca_loc->symbol_ref)
22592 {
22593 dw_die_ref tdie = lookup_decl_die (SYMBOL_REF_DECL (ca_loc->symbol_ref));
22594 if (tdie)
22595 add_AT_die_ref (die, dwarf_AT (DW_AT_call_origin), tdie);
22596 else
22597 add_AT_addr (die, dwarf_AT (DW_AT_call_origin), ca_loc->symbol_ref,
22598 false);
22599 }
22600 return die;
22601 }
22602
22603 /* Generate a DIE to represent a declared function (either file-scope or
22604 block-local). */
22605
22606 static void
22607 gen_subprogram_die (tree decl, dw_die_ref context_die)
22608 {
22609 tree origin = decl_ultimate_origin (decl);
22610 dw_die_ref subr_die;
22611 dw_die_ref old_die = lookup_decl_die (decl);
22612
22613 /* This function gets called multiple times for different stages of
22614 the debug process. For example, for func() in this code:
22615
22616 namespace S
22617 {
22618 void func() { ... }
22619 }
22620
22621 ...we get called 4 times. Twice in early debug and twice in
22622 late debug:
22623
22624 Early debug
22625 -----------
22626
22627 1. Once while generating func() within the namespace. This is
22628 the declaration. The declaration bit below is set, as the
22629 context is the namespace.
22630
22631 A new DIE will be generated with DW_AT_declaration set.
22632
22633 2. Once for func() itself. This is the specification. The
22634 declaration bit below is clear as the context is the CU.
22635
22636 We will use the cached DIE from (1) to create a new DIE with
22637 DW_AT_specification pointing to the declaration in (1).
22638
22639 Late debug via rest_of_handle_final()
22640 -------------------------------------
22641
22642 3. Once generating func() within the namespace. This is also the
22643 declaration, as in (1), but this time we will early exit below
22644 as we have a cached DIE and a declaration needs no additional
22645 annotations (no locations), as the source declaration line
22646 info is enough.
22647
22648 4. Once for func() itself. As in (2), this is the specification,
22649 but this time we will re-use the cached DIE, and just annotate
22650 it with the location information that should now be available.
22651
22652 For something without namespaces, but with abstract instances, we
22653 are also called a multiple times:
22654
22655 class Base
22656 {
22657 public:
22658 Base (); // constructor declaration (1)
22659 };
22660
22661 Base::Base () { } // constructor specification (2)
22662
22663 Early debug
22664 -----------
22665
22666 1. Once for the Base() constructor by virtue of it being a
22667 member of the Base class. This is done via
22668 rest_of_type_compilation.
22669
22670 This is a declaration, so a new DIE will be created with
22671 DW_AT_declaration.
22672
22673 2. Once for the Base() constructor definition, but this time
22674 while generating the abstract instance of the base
22675 constructor (__base_ctor) which is being generated via early
22676 debug of reachable functions.
22677
22678 Even though we have a cached version of the declaration (1),
22679 we will create a DW_AT_specification of the declaration DIE
22680 in (1).
22681
22682 3. Once for the __base_ctor itself, but this time, we generate
22683 an DW_AT_abstract_origin version of the DW_AT_specification in
22684 (2).
22685
22686 Late debug via rest_of_handle_final
22687 -----------------------------------
22688
22689 4. One final time for the __base_ctor (which will have a cached
22690 DIE with DW_AT_abstract_origin created in (3). This time,
22691 we will just annotate the location information now
22692 available.
22693 */
22694 int declaration = (current_function_decl != decl
22695 || class_or_namespace_scope_p (context_die));
22696
22697 /* A declaration that has been previously dumped needs no
22698 additional information. */
22699 if (old_die && declaration)
22700 return;
22701
22702 /* Now that the C++ front end lazily declares artificial member fns, we
22703 might need to retrofit the declaration into its class. */
22704 if (!declaration && !origin && !old_die
22705 && DECL_CONTEXT (decl) && TYPE_P (DECL_CONTEXT (decl))
22706 && !class_or_namespace_scope_p (context_die)
22707 && debug_info_level > DINFO_LEVEL_TERSE)
22708 old_die = force_decl_die (decl);
22709
22710 /* A concrete instance, tag a new DIE with DW_AT_abstract_origin. */
22711 if (origin != NULL)
22712 {
22713 gcc_assert (!declaration || local_scope_p (context_die));
22714
22715 /* Fixup die_parent for the abstract instance of a nested
22716 inline function. */
22717 if (old_die && old_die->die_parent == NULL)
22718 add_child_die (context_die, old_die);
22719
22720 if (old_die && get_AT_ref (old_die, DW_AT_abstract_origin))
22721 {
22722 /* If we have a DW_AT_abstract_origin we have a working
22723 cached version. */
22724 subr_die = old_die;
22725 }
22726 else
22727 {
22728 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22729 add_abstract_origin_attribute (subr_die, origin);
22730 /* This is where the actual code for a cloned function is.
22731 Let's emit linkage name attribute for it. This helps
22732 debuggers to e.g, set breakpoints into
22733 constructors/destructors when the user asks "break
22734 K::K". */
22735 add_linkage_name (subr_die, decl);
22736 }
22737 }
22738 /* A cached copy, possibly from early dwarf generation. Reuse as
22739 much as possible. */
22740 else if (old_die)
22741 {
22742 if (!get_AT_flag (old_die, DW_AT_declaration)
22743 /* We can have a normal definition following an inline one in the
22744 case of redefinition of GNU C extern inlines.
22745 It seems reasonable to use AT_specification in this case. */
22746 && !get_AT (old_die, DW_AT_inline))
22747 {
22748 /* Detect and ignore this case, where we are trying to output
22749 something we have already output. */
22750 if (get_AT (old_die, DW_AT_low_pc)
22751 || get_AT (old_die, DW_AT_ranges))
22752 return;
22753
22754 /* If we have no location information, this must be a
22755 partially generated DIE from early dwarf generation.
22756 Fall through and generate it. */
22757 }
22758
22759 /* If the definition comes from the same place as the declaration,
22760 maybe use the old DIE. We always want the DIE for this function
22761 that has the *_pc attributes to be under comp_unit_die so the
22762 debugger can find it. We also need to do this for abstract
22763 instances of inlines, since the spec requires the out-of-line copy
22764 to have the same parent. For local class methods, this doesn't
22765 apply; we just use the old DIE. */
22766 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
22767 struct dwarf_file_data * file_index = lookup_filename (s.file);
22768 if (((is_unit_die (old_die->die_parent)
22769 /* This condition fixes the inconsistency/ICE with the
22770 following Fortran test (or some derivative thereof) while
22771 building libgfortran:
22772
22773 module some_m
22774 contains
22775 logical function funky (FLAG)
22776 funky = .true.
22777 end function
22778 end module
22779 */
22780 || (old_die->die_parent
22781 && old_die->die_parent->die_tag == DW_TAG_module)
22782 || local_scope_p (old_die->die_parent)
22783 || context_die == NULL)
22784 && (DECL_ARTIFICIAL (decl)
22785 || (get_AT_file (old_die, DW_AT_decl_file) == file_index
22786 && (get_AT_unsigned (old_die, DW_AT_decl_line)
22787 == (unsigned) s.line)
22788 && (!debug_column_info
22789 || s.column == 0
22790 || (get_AT_unsigned (old_die, DW_AT_decl_column)
22791 == (unsigned) s.column)))))
22792 /* With LTO if there's an abstract instance for
22793 the old DIE, this is a concrete instance and
22794 thus re-use the DIE. */
22795 || get_AT (old_die, DW_AT_abstract_origin))
22796 {
22797 subr_die = old_die;
22798
22799 /* Clear out the declaration attribute, but leave the
22800 parameters so they can be augmented with location
22801 information later. Unless this was a declaration, in
22802 which case, wipe out the nameless parameters and recreate
22803 them further down. */
22804 if (remove_AT (subr_die, DW_AT_declaration))
22805 {
22806
22807 remove_AT (subr_die, DW_AT_object_pointer);
22808 remove_child_TAG (subr_die, DW_TAG_formal_parameter);
22809 }
22810 }
22811 /* Make a specification pointing to the previously built
22812 declaration. */
22813 else
22814 {
22815 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22816 add_AT_specification (subr_die, old_die);
22817 add_pubname (decl, subr_die);
22818 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
22819 add_AT_file (subr_die, DW_AT_decl_file, file_index);
22820 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
22821 add_AT_unsigned (subr_die, DW_AT_decl_line, s.line);
22822 if (debug_column_info
22823 && s.column
22824 && (get_AT_unsigned (old_die, DW_AT_decl_column)
22825 != (unsigned) s.column))
22826 add_AT_unsigned (subr_die, DW_AT_decl_column, s.column);
22827
22828 /* If the prototype had an 'auto' or 'decltype(auto)' return type,
22829 emit the real type on the definition die. */
22830 if (is_cxx () && debug_info_level > DINFO_LEVEL_TERSE)
22831 {
22832 dw_die_ref die = get_AT_ref (old_die, DW_AT_type);
22833 if (die == auto_die || die == decltype_auto_die)
22834 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22835 TYPE_UNQUALIFIED, false, context_die);
22836 }
22837
22838 /* When we process the method declaration, we haven't seen
22839 the out-of-class defaulted definition yet, so we have to
22840 recheck now. */
22841 if ((dwarf_version >= 5 || ! dwarf_strict)
22842 && !get_AT (subr_die, DW_AT_defaulted))
22843 {
22844 int defaulted
22845 = lang_hooks.decls.decl_dwarf_attribute (decl,
22846 DW_AT_defaulted);
22847 if (defaulted != -1)
22848 {
22849 /* Other values must have been handled before. */
22850 gcc_assert (defaulted == DW_DEFAULTED_out_of_class);
22851 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22852 }
22853 }
22854 }
22855 }
22856 /* Create a fresh DIE for anything else. */
22857 else
22858 {
22859 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22860
22861 if (TREE_PUBLIC (decl))
22862 add_AT_flag (subr_die, DW_AT_external, 1);
22863
22864 add_name_and_src_coords_attributes (subr_die, decl);
22865 add_pubname (decl, subr_die);
22866 if (debug_info_level > DINFO_LEVEL_TERSE)
22867 {
22868 add_prototyped_attribute (subr_die, TREE_TYPE (decl));
22869 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22870 TYPE_UNQUALIFIED, false, context_die);
22871 }
22872
22873 add_pure_or_virtual_attribute (subr_die, decl);
22874 if (DECL_ARTIFICIAL (decl))
22875 add_AT_flag (subr_die, DW_AT_artificial, 1);
22876
22877 if (TREE_THIS_VOLATILE (decl) && (dwarf_version >= 5 || !dwarf_strict))
22878 add_AT_flag (subr_die, DW_AT_noreturn, 1);
22879
22880 add_alignment_attribute (subr_die, decl);
22881
22882 add_accessibility_attribute (subr_die, decl);
22883 }
22884
22885 /* Unless we have an existing non-declaration DIE, equate the new
22886 DIE. */
22887 if (!old_die || is_declaration_die (old_die))
22888 equate_decl_number_to_die (decl, subr_die);
22889
22890 if (declaration)
22891 {
22892 if (!old_die || !get_AT (old_die, DW_AT_inline))
22893 {
22894 add_AT_flag (subr_die, DW_AT_declaration, 1);
22895
22896 /* If this is an explicit function declaration then generate
22897 a DW_AT_explicit attribute. */
22898 if ((dwarf_version >= 3 || !dwarf_strict)
22899 && lang_hooks.decls.decl_dwarf_attribute (decl,
22900 DW_AT_explicit) == 1)
22901 add_AT_flag (subr_die, DW_AT_explicit, 1);
22902
22903 /* If this is a C++11 deleted special function member then generate
22904 a DW_AT_deleted attribute. */
22905 if ((dwarf_version >= 5 || !dwarf_strict)
22906 && lang_hooks.decls.decl_dwarf_attribute (decl,
22907 DW_AT_deleted) == 1)
22908 add_AT_flag (subr_die, DW_AT_deleted, 1);
22909
22910 /* If this is a C++11 defaulted special function member then
22911 generate a DW_AT_defaulted attribute. */
22912 if (dwarf_version >= 5 || !dwarf_strict)
22913 {
22914 int defaulted
22915 = lang_hooks.decls.decl_dwarf_attribute (decl,
22916 DW_AT_defaulted);
22917 if (defaulted != -1)
22918 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22919 }
22920
22921 /* If this is a C++11 non-static member function with & ref-qualifier
22922 then generate a DW_AT_reference attribute. */
22923 if ((dwarf_version >= 5 || !dwarf_strict)
22924 && lang_hooks.decls.decl_dwarf_attribute (decl,
22925 DW_AT_reference) == 1)
22926 add_AT_flag (subr_die, DW_AT_reference, 1);
22927
22928 /* If this is a C++11 non-static member function with &&
22929 ref-qualifier then generate a DW_AT_reference attribute. */
22930 if ((dwarf_version >= 5 || !dwarf_strict)
22931 && lang_hooks.decls.decl_dwarf_attribute (decl,
22932 DW_AT_rvalue_reference)
22933 == 1)
22934 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
22935 }
22936 }
22937 /* For non DECL_EXTERNALs, if range information is available, fill
22938 the DIE with it. */
22939 else if (!DECL_EXTERNAL (decl) && !early_dwarf)
22940 {
22941 HOST_WIDE_INT cfa_fb_offset;
22942
22943 struct function *fun = DECL_STRUCT_FUNCTION (decl);
22944
22945 if (!crtl->has_bb_partition)
22946 {
22947 dw_fde_ref fde = fun->fde;
22948 if (fde->dw_fde_begin)
22949 {
22950 /* We have already generated the labels. */
22951 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
22952 fde->dw_fde_end, false);
22953 }
22954 else
22955 {
22956 /* Create start/end labels and add the range. */
22957 char label_id_low[MAX_ARTIFICIAL_LABEL_BYTES];
22958 char label_id_high[MAX_ARTIFICIAL_LABEL_BYTES];
22959 ASM_GENERATE_INTERNAL_LABEL (label_id_low, FUNC_BEGIN_LABEL,
22960 current_function_funcdef_no);
22961 ASM_GENERATE_INTERNAL_LABEL (label_id_high, FUNC_END_LABEL,
22962 current_function_funcdef_no);
22963 add_AT_low_high_pc (subr_die, label_id_low, label_id_high,
22964 false);
22965 }
22966
22967 #if VMS_DEBUGGING_INFO
22968 /* HP OpenVMS Industry Standard 64: DWARF Extensions
22969 Section 2.3 Prologue and Epilogue Attributes:
22970 When a breakpoint is set on entry to a function, it is generally
22971 desirable for execution to be suspended, not on the very first
22972 instruction of the function, but rather at a point after the
22973 function's frame has been set up, after any language defined local
22974 declaration processing has been completed, and before execution of
22975 the first statement of the function begins. Debuggers generally
22976 cannot properly determine where this point is. Similarly for a
22977 breakpoint set on exit from a function. The prologue and epilogue
22978 attributes allow a compiler to communicate the location(s) to use. */
22979
22980 {
22981 if (fde->dw_fde_vms_end_prologue)
22982 add_AT_vms_delta (subr_die, DW_AT_HP_prologue,
22983 fde->dw_fde_begin, fde->dw_fde_vms_end_prologue);
22984
22985 if (fde->dw_fde_vms_begin_epilogue)
22986 add_AT_vms_delta (subr_die, DW_AT_HP_epilogue,
22987 fde->dw_fde_begin, fde->dw_fde_vms_begin_epilogue);
22988 }
22989 #endif
22990
22991 }
22992 else
22993 {
22994 /* Generate pubnames entries for the split function code ranges. */
22995 dw_fde_ref fde = fun->fde;
22996
22997 if (fde->dw_fde_second_begin)
22998 {
22999 if (dwarf_version >= 3 || !dwarf_strict)
23000 {
23001 /* We should use ranges for non-contiguous code section
23002 addresses. Use the actual code range for the initial
23003 section, since the HOT/COLD labels might precede an
23004 alignment offset. */
23005 bool range_list_added = false;
23006 add_ranges_by_labels (subr_die, fde->dw_fde_begin,
23007 fde->dw_fde_end, &range_list_added,
23008 false);
23009 add_ranges_by_labels (subr_die, fde->dw_fde_second_begin,
23010 fde->dw_fde_second_end,
23011 &range_list_added, false);
23012 if (range_list_added)
23013 add_ranges (NULL);
23014 }
23015 else
23016 {
23017 /* There is no real support in DW2 for this .. so we make
23018 a work-around. First, emit the pub name for the segment
23019 containing the function label. Then make and emit a
23020 simplified subprogram DIE for the second segment with the
23021 name pre-fixed by __hot/cold_sect_of_. We use the same
23022 linkage name for the second die so that gdb will find both
23023 sections when given "b foo". */
23024 const char *name = NULL;
23025 tree decl_name = DECL_NAME (decl);
23026 dw_die_ref seg_die;
23027
23028 /* Do the 'primary' section. */
23029 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
23030 fde->dw_fde_end, false);
23031
23032 /* Build a minimal DIE for the secondary section. */
23033 seg_die = new_die (DW_TAG_subprogram,
23034 subr_die->die_parent, decl);
23035
23036 if (TREE_PUBLIC (decl))
23037 add_AT_flag (seg_die, DW_AT_external, 1);
23038
23039 if (decl_name != NULL
23040 && IDENTIFIER_POINTER (decl_name) != NULL)
23041 {
23042 name = dwarf2_name (decl, 1);
23043 if (! DECL_ARTIFICIAL (decl))
23044 add_src_coords_attributes (seg_die, decl);
23045
23046 add_linkage_name (seg_die, decl);
23047 }
23048 gcc_assert (name != NULL);
23049 add_pure_or_virtual_attribute (seg_die, decl);
23050 if (DECL_ARTIFICIAL (decl))
23051 add_AT_flag (seg_die, DW_AT_artificial, 1);
23052
23053 name = concat ("__second_sect_of_", name, NULL);
23054 add_AT_low_high_pc (seg_die, fde->dw_fde_second_begin,
23055 fde->dw_fde_second_end, false);
23056 add_name_attribute (seg_die, name);
23057 if (want_pubnames ())
23058 add_pubname_string (name, seg_die);
23059 }
23060 }
23061 else
23062 add_AT_low_high_pc (subr_die, fde->dw_fde_begin, fde->dw_fde_end,
23063 false);
23064 }
23065
23066 cfa_fb_offset = CFA_FRAME_BASE_OFFSET (decl);
23067
23068 /* We define the "frame base" as the function's CFA. This is more
23069 convenient for several reasons: (1) It's stable across the prologue
23070 and epilogue, which makes it better than just a frame pointer,
23071 (2) With dwarf3, there exists a one-byte encoding that allows us
23072 to reference the .debug_frame data by proxy, but failing that,
23073 (3) We can at least reuse the code inspection and interpretation
23074 code that determines the CFA position at various points in the
23075 function. */
23076 if (dwarf_version >= 3 && targetm.debug_unwind_info () == UI_DWARF2)
23077 {
23078 dw_loc_descr_ref op = new_loc_descr (DW_OP_call_frame_cfa, 0, 0);
23079 add_AT_loc (subr_die, DW_AT_frame_base, op);
23080 }
23081 else
23082 {
23083 dw_loc_list_ref list = convert_cfa_to_fb_loc_list (cfa_fb_offset);
23084 if (list->dw_loc_next)
23085 add_AT_loc_list (subr_die, DW_AT_frame_base, list);
23086 else
23087 add_AT_loc (subr_die, DW_AT_frame_base, list->expr);
23088 }
23089
23090 /* Compute a displacement from the "steady-state frame pointer" to
23091 the CFA. The former is what all stack slots and argument slots
23092 will reference in the rtl; the latter is what we've told the
23093 debugger about. We'll need to adjust all frame_base references
23094 by this displacement. */
23095 compute_frame_pointer_to_fb_displacement (cfa_fb_offset);
23096
23097 if (fun->static_chain_decl)
23098 {
23099 /* DWARF requires here a location expression that computes the
23100 address of the enclosing subprogram's frame base. The machinery
23101 in tree-nested.c is supposed to store this specific address in the
23102 last field of the FRAME record. */
23103 const tree frame_type
23104 = TREE_TYPE (TREE_TYPE (fun->static_chain_decl));
23105 const tree fb_decl = tree_last (TYPE_FIELDS (frame_type));
23106
23107 tree fb_expr
23108 = build1 (INDIRECT_REF, frame_type, fun->static_chain_decl);
23109 fb_expr = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
23110 fb_expr, fb_decl, NULL_TREE);
23111
23112 add_AT_location_description (subr_die, DW_AT_static_link,
23113 loc_list_from_tree (fb_expr, 0, NULL));
23114 }
23115
23116 resolve_variable_values ();
23117 }
23118
23119 /* Generate child dies for template paramaters. */
23120 if (early_dwarf && debug_info_level > DINFO_LEVEL_TERSE)
23121 gen_generic_params_dies (decl);
23122
23123 /* Now output descriptions of the arguments for this function. This gets
23124 (unnecessarily?) complex because of the fact that the DECL_ARGUMENT list
23125 for a FUNCTION_DECL doesn't indicate cases where there was a trailing
23126 `...' at the end of the formal parameter list. In order to find out if
23127 there was a trailing ellipsis or not, we must instead look at the type
23128 associated with the FUNCTION_DECL. This will be a node of type
23129 FUNCTION_TYPE. If the chain of type nodes hanging off of this
23130 FUNCTION_TYPE node ends with a void_type_node then there should *not* be
23131 an ellipsis at the end. */
23132
23133 /* In the case where we are describing a mere function declaration, all we
23134 need to do here (and all we *can* do here) is to describe the *types* of
23135 its formal parameters. */
23136 if (debug_info_level <= DINFO_LEVEL_TERSE)
23137 ;
23138 else if (declaration)
23139 gen_formal_types_die (decl, subr_die);
23140 else
23141 {
23142 /* Generate DIEs to represent all known formal parameters. */
23143 tree parm = DECL_ARGUMENTS (decl);
23144 tree generic_decl = early_dwarf
23145 ? lang_hooks.decls.get_generic_function_decl (decl) : NULL;
23146 tree generic_decl_parm = generic_decl
23147 ? DECL_ARGUMENTS (generic_decl)
23148 : NULL;
23149
23150 /* Now we want to walk the list of parameters of the function and
23151 emit their relevant DIEs.
23152
23153 We consider the case of DECL being an instance of a generic function
23154 as well as it being a normal function.
23155
23156 If DECL is an instance of a generic function we walk the
23157 parameters of the generic function declaration _and_ the parameters of
23158 DECL itself. This is useful because we want to emit specific DIEs for
23159 function parameter packs and those are declared as part of the
23160 generic function declaration. In that particular case,
23161 the parameter pack yields a DW_TAG_GNU_formal_parameter_pack DIE.
23162 That DIE has children DIEs representing the set of arguments
23163 of the pack. Note that the set of pack arguments can be empty.
23164 In that case, the DW_TAG_GNU_formal_parameter_pack DIE will not have any
23165 children DIE.
23166
23167 Otherwise, we just consider the parameters of DECL. */
23168 while (generic_decl_parm || parm)
23169 {
23170 if (generic_decl_parm
23171 && lang_hooks.function_parameter_pack_p (generic_decl_parm))
23172 gen_formal_parameter_pack_die (generic_decl_parm,
23173 parm, subr_die,
23174 &parm);
23175 else if (parm)
23176 {
23177 dw_die_ref parm_die = gen_decl_die (parm, NULL, NULL, subr_die);
23178
23179 if (early_dwarf
23180 && parm == DECL_ARGUMENTS (decl)
23181 && TREE_CODE (TREE_TYPE (decl)) == METHOD_TYPE
23182 && parm_die
23183 && (dwarf_version >= 3 || !dwarf_strict))
23184 add_AT_die_ref (subr_die, DW_AT_object_pointer, parm_die);
23185
23186 parm = DECL_CHAIN (parm);
23187 }
23188 else if (parm)
23189 parm = DECL_CHAIN (parm);
23190
23191 if (generic_decl_parm)
23192 generic_decl_parm = DECL_CHAIN (generic_decl_parm);
23193 }
23194
23195 /* Decide whether we need an unspecified_parameters DIE at the end.
23196 There are 2 more cases to do this for: 1) the ansi ... declaration -
23197 this is detectable when the end of the arg list is not a
23198 void_type_node 2) an unprototyped function declaration (not a
23199 definition). This just means that we have no info about the
23200 parameters at all. */
23201 if (early_dwarf)
23202 {
23203 if (prototype_p (TREE_TYPE (decl)))
23204 {
23205 /* This is the prototyped case, check for.... */
23206 if (stdarg_p (TREE_TYPE (decl)))
23207 gen_unspecified_parameters_die (decl, subr_die);
23208 }
23209 else if (DECL_INITIAL (decl) == NULL_TREE)
23210 gen_unspecified_parameters_die (decl, subr_die);
23211 }
23212 }
23213
23214 if (subr_die != old_die)
23215 /* Add the calling convention attribute if requested. */
23216 add_calling_convention_attribute (subr_die, decl);
23217
23218 /* Output Dwarf info for all of the stuff within the body of the function
23219 (if it has one - it may be just a declaration).
23220
23221 OUTER_SCOPE is a pointer to the outermost BLOCK node created to represent
23222 a function. This BLOCK actually represents the outermost binding contour
23223 for the function, i.e. the contour in which the function's formal
23224 parameters and labels get declared. Curiously, it appears that the front
23225 end doesn't actually put the PARM_DECL nodes for the current function onto
23226 the BLOCK_VARS list for this outer scope, but are strung off of the
23227 DECL_ARGUMENTS list for the function instead.
23228
23229 The BLOCK_VARS list for the `outer_scope' does provide us with a list of
23230 the LABEL_DECL nodes for the function however, and we output DWARF info
23231 for those in decls_for_scope. Just within the `outer_scope' there will be
23232 a BLOCK node representing the function's outermost pair of curly braces,
23233 and any blocks used for the base and member initializers of a C++
23234 constructor function. */
23235 tree outer_scope = DECL_INITIAL (decl);
23236 if (! declaration && outer_scope && TREE_CODE (outer_scope) != ERROR_MARK)
23237 {
23238 int call_site_note_count = 0;
23239 int tail_call_site_note_count = 0;
23240
23241 /* Emit a DW_TAG_variable DIE for a named return value. */
23242 if (DECL_NAME (DECL_RESULT (decl)))
23243 gen_decl_die (DECL_RESULT (decl), NULL, NULL, subr_die);
23244
23245 /* The first time through decls_for_scope we will generate the
23246 DIEs for the locals. The second time, we fill in the
23247 location info. */
23248 decls_for_scope (outer_scope, subr_die);
23249
23250 if (call_arg_locations && (!dwarf_strict || dwarf_version >= 5))
23251 {
23252 struct call_arg_loc_node *ca_loc;
23253 for (ca_loc = call_arg_locations; ca_loc; ca_loc = ca_loc->next)
23254 {
23255 dw_die_ref die = NULL;
23256 rtx tloc = NULL_RTX, tlocc = NULL_RTX;
23257 rtx arg, next_arg;
23258 tree arg_decl = NULL_TREE;
23259
23260 for (arg = (ca_loc->call_arg_loc_note != NULL_RTX
23261 ? XEXP (ca_loc->call_arg_loc_note, 0)
23262 : NULL_RTX);
23263 arg; arg = next_arg)
23264 {
23265 dw_loc_descr_ref reg, val;
23266 machine_mode mode = GET_MODE (XEXP (XEXP (arg, 0), 1));
23267 dw_die_ref cdie, tdie = NULL;
23268
23269 next_arg = XEXP (arg, 1);
23270 if (REG_P (XEXP (XEXP (arg, 0), 0))
23271 && next_arg
23272 && MEM_P (XEXP (XEXP (next_arg, 0), 0))
23273 && REG_P (XEXP (XEXP (XEXP (next_arg, 0), 0), 0))
23274 && REGNO (XEXP (XEXP (arg, 0), 0))
23275 == REGNO (XEXP (XEXP (XEXP (next_arg, 0), 0), 0)))
23276 next_arg = XEXP (next_arg, 1);
23277 if (mode == VOIDmode)
23278 {
23279 mode = GET_MODE (XEXP (XEXP (arg, 0), 0));
23280 if (mode == VOIDmode)
23281 mode = GET_MODE (XEXP (arg, 0));
23282 }
23283 if (mode == VOIDmode || mode == BLKmode)
23284 continue;
23285 /* Get dynamic information about call target only if we
23286 have no static information: we cannot generate both
23287 DW_AT_call_origin and DW_AT_call_target
23288 attributes. */
23289 if (ca_loc->symbol_ref == NULL_RTX)
23290 {
23291 if (XEXP (XEXP (arg, 0), 0) == pc_rtx)
23292 {
23293 tloc = XEXP (XEXP (arg, 0), 1);
23294 continue;
23295 }
23296 else if (GET_CODE (XEXP (XEXP (arg, 0), 0)) == CLOBBER
23297 && XEXP (XEXP (XEXP (arg, 0), 0), 0) == pc_rtx)
23298 {
23299 tlocc = XEXP (XEXP (arg, 0), 1);
23300 continue;
23301 }
23302 }
23303 reg = NULL;
23304 if (REG_P (XEXP (XEXP (arg, 0), 0)))
23305 reg = reg_loc_descriptor (XEXP (XEXP (arg, 0), 0),
23306 VAR_INIT_STATUS_INITIALIZED);
23307 else if (MEM_P (XEXP (XEXP (arg, 0), 0)))
23308 {
23309 rtx mem = XEXP (XEXP (arg, 0), 0);
23310 reg = mem_loc_descriptor (XEXP (mem, 0),
23311 get_address_mode (mem),
23312 GET_MODE (mem),
23313 VAR_INIT_STATUS_INITIALIZED);
23314 }
23315 else if (GET_CODE (XEXP (XEXP (arg, 0), 0))
23316 == DEBUG_PARAMETER_REF)
23317 {
23318 tree tdecl
23319 = DEBUG_PARAMETER_REF_DECL (XEXP (XEXP (arg, 0), 0));
23320 tdie = lookup_decl_die (tdecl);
23321 if (tdie == NULL)
23322 continue;
23323 arg_decl = tdecl;
23324 }
23325 else
23326 continue;
23327 if (reg == NULL
23328 && GET_CODE (XEXP (XEXP (arg, 0), 0))
23329 != DEBUG_PARAMETER_REF)
23330 continue;
23331 val = mem_loc_descriptor (XEXP (XEXP (arg, 0), 1), mode,
23332 VOIDmode,
23333 VAR_INIT_STATUS_INITIALIZED);
23334 if (val == NULL)
23335 continue;
23336 if (die == NULL)
23337 die = gen_call_site_die (decl, subr_die, ca_loc);
23338 cdie = new_die (dwarf_TAG (DW_TAG_call_site_parameter), die,
23339 NULL_TREE);
23340 add_desc_attribute (cdie, arg_decl);
23341 if (reg != NULL)
23342 add_AT_loc (cdie, DW_AT_location, reg);
23343 else if (tdie != NULL)
23344 add_AT_die_ref (cdie, dwarf_AT (DW_AT_call_parameter),
23345 tdie);
23346 add_AT_loc (cdie, dwarf_AT (DW_AT_call_value), val);
23347 if (next_arg != XEXP (arg, 1))
23348 {
23349 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 1));
23350 if (mode == VOIDmode)
23351 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 0));
23352 val = mem_loc_descriptor (XEXP (XEXP (XEXP (arg, 1),
23353 0), 1),
23354 mode, VOIDmode,
23355 VAR_INIT_STATUS_INITIALIZED);
23356 if (val != NULL)
23357 add_AT_loc (cdie, dwarf_AT (DW_AT_call_data_value),
23358 val);
23359 }
23360 }
23361 if (die == NULL
23362 && (ca_loc->symbol_ref || tloc))
23363 die = gen_call_site_die (decl, subr_die, ca_loc);
23364 if (die != NULL && (tloc != NULL_RTX || tlocc != NULL_RTX))
23365 {
23366 dw_loc_descr_ref tval = NULL;
23367
23368 if (tloc != NULL_RTX)
23369 tval = mem_loc_descriptor (tloc,
23370 GET_MODE (tloc) == VOIDmode
23371 ? Pmode : GET_MODE (tloc),
23372 VOIDmode,
23373 VAR_INIT_STATUS_INITIALIZED);
23374 if (tval)
23375 add_AT_loc (die, dwarf_AT (DW_AT_call_target), tval);
23376 else if (tlocc != NULL_RTX)
23377 {
23378 tval = mem_loc_descriptor (tlocc,
23379 GET_MODE (tlocc) == VOIDmode
23380 ? Pmode : GET_MODE (tlocc),
23381 VOIDmode,
23382 VAR_INIT_STATUS_INITIALIZED);
23383 if (tval)
23384 add_AT_loc (die,
23385 dwarf_AT (DW_AT_call_target_clobbered),
23386 tval);
23387 }
23388 }
23389 if (die != NULL)
23390 {
23391 call_site_note_count++;
23392 if (ca_loc->tail_call_p)
23393 tail_call_site_note_count++;
23394 }
23395 }
23396 }
23397 call_arg_locations = NULL;
23398 call_arg_loc_last = NULL;
23399 if (tail_call_site_count >= 0
23400 && tail_call_site_count == tail_call_site_note_count
23401 && (!dwarf_strict || dwarf_version >= 5))
23402 {
23403 if (call_site_count >= 0
23404 && call_site_count == call_site_note_count)
23405 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_calls), 1);
23406 else
23407 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_tail_calls), 1);
23408 }
23409 call_site_count = -1;
23410 tail_call_site_count = -1;
23411 }
23412
23413 /* Mark used types after we have created DIEs for the functions scopes. */
23414 premark_used_types (DECL_STRUCT_FUNCTION (decl));
23415 }
23416
23417 /* Returns a hash value for X (which really is a die_struct). */
23418
23419 hashval_t
23420 block_die_hasher::hash (die_struct *d)
23421 {
23422 return (hashval_t) d->decl_id ^ htab_hash_pointer (d->die_parent);
23423 }
23424
23425 /* Return nonzero if decl_id and die_parent of die_struct X is the same
23426 as decl_id and die_parent of die_struct Y. */
23427
23428 bool
23429 block_die_hasher::equal (die_struct *x, die_struct *y)
23430 {
23431 return x->decl_id == y->decl_id && x->die_parent == y->die_parent;
23432 }
23433
23434 /* Hold information about markers for inlined entry points. */
23435 struct GTY ((for_user)) inline_entry_data
23436 {
23437 /* The block that's the inlined_function_outer_scope for an inlined
23438 function. */
23439 tree block;
23440
23441 /* The label at the inlined entry point. */
23442 const char *label_pfx;
23443 unsigned int label_num;
23444
23445 /* The view number to be used as the inlined entry point. */
23446 var_loc_view view;
23447 };
23448
23449 struct inline_entry_data_hasher : ggc_ptr_hash <inline_entry_data>
23450 {
23451 typedef tree compare_type;
23452 static inline hashval_t hash (const inline_entry_data *);
23453 static inline bool equal (const inline_entry_data *, const_tree);
23454 };
23455
23456 /* Hash table routines for inline_entry_data. */
23457
23458 inline hashval_t
23459 inline_entry_data_hasher::hash (const inline_entry_data *data)
23460 {
23461 return htab_hash_pointer (data->block);
23462 }
23463
23464 inline bool
23465 inline_entry_data_hasher::equal (const inline_entry_data *data,
23466 const_tree block)
23467 {
23468 return data->block == block;
23469 }
23470
23471 /* Inlined entry points pending DIE creation in this compilation unit. */
23472
23473 static GTY(()) hash_table<inline_entry_data_hasher> *inline_entry_data_table;
23474
23475
23476 /* Return TRUE if DECL, which may have been previously generated as
23477 OLD_DIE, is a candidate for a DW_AT_specification. DECLARATION is
23478 true if decl (or its origin) is either an extern declaration or a
23479 class/namespace scoped declaration.
23480
23481 The declare_in_namespace support causes us to get two DIEs for one
23482 variable, both of which are declarations. We want to avoid
23483 considering one to be a specification, so we must test for
23484 DECLARATION and DW_AT_declaration. */
23485 static inline bool
23486 decl_will_get_specification_p (dw_die_ref old_die, tree decl, bool declaration)
23487 {
23488 return (old_die && TREE_STATIC (decl) && !declaration
23489 && get_AT_flag (old_die, DW_AT_declaration) == 1);
23490 }
23491
23492 /* Return true if DECL is a local static. */
23493
23494 static inline bool
23495 local_function_static (tree decl)
23496 {
23497 gcc_assert (VAR_P (decl));
23498 return TREE_STATIC (decl)
23499 && DECL_CONTEXT (decl)
23500 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL;
23501 }
23502
23503 /* Generate a DIE to represent a declared data object.
23504 Either DECL or ORIGIN must be non-null. */
23505
23506 static void
23507 gen_variable_die (tree decl, tree origin, dw_die_ref context_die)
23508 {
23509 HOST_WIDE_INT off = 0;
23510 tree com_decl;
23511 tree decl_or_origin = decl ? decl : origin;
23512 tree ultimate_origin;
23513 dw_die_ref var_die;
23514 dw_die_ref old_die = decl ? lookup_decl_die (decl) : NULL;
23515 bool declaration = (DECL_EXTERNAL (decl_or_origin)
23516 || class_or_namespace_scope_p (context_die));
23517 bool specialization_p = false;
23518 bool no_linkage_name = false;
23519
23520 /* While C++ inline static data members have definitions inside of the
23521 class, force the first DIE to be a declaration, then let gen_member_die
23522 reparent it to the class context and call gen_variable_die again
23523 to create the outside of the class DIE for the definition. */
23524 if (!declaration
23525 && old_die == NULL
23526 && decl
23527 && DECL_CONTEXT (decl)
23528 && TYPE_P (DECL_CONTEXT (decl))
23529 && lang_hooks.decls.decl_dwarf_attribute (decl, DW_AT_inline) != -1)
23530 {
23531 declaration = true;
23532 if (dwarf_version < 5)
23533 no_linkage_name = true;
23534 }
23535
23536 ultimate_origin = decl_ultimate_origin (decl_or_origin);
23537 if (decl || ultimate_origin)
23538 origin = ultimate_origin;
23539 com_decl = fortran_common (decl_or_origin, &off);
23540
23541 /* Symbol in common gets emitted as a child of the common block, in the form
23542 of a data member. */
23543 if (com_decl)
23544 {
23545 dw_die_ref com_die;
23546 dw_loc_list_ref loc = NULL;
23547 die_node com_die_arg;
23548
23549 var_die = lookup_decl_die (decl_or_origin);
23550 if (var_die)
23551 {
23552 if (! early_dwarf && get_AT (var_die, DW_AT_location) == NULL)
23553 {
23554 loc = loc_list_from_tree (com_decl, off ? 1 : 2, NULL);
23555 if (loc)
23556 {
23557 if (off)
23558 {
23559 /* Optimize the common case. */
23560 if (single_element_loc_list_p (loc)
23561 && loc->expr->dw_loc_opc == DW_OP_addr
23562 && loc->expr->dw_loc_next == NULL
23563 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr)
23564 == SYMBOL_REF)
23565 {
23566 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23567 loc->expr->dw_loc_oprnd1.v.val_addr
23568 = plus_constant (GET_MODE (x), x , off);
23569 }
23570 else
23571 loc_list_plus_const (loc, off);
23572 }
23573 add_AT_location_description (var_die, DW_AT_location, loc);
23574 remove_AT (var_die, DW_AT_declaration);
23575 }
23576 }
23577 return;
23578 }
23579
23580 if (common_block_die_table == NULL)
23581 common_block_die_table = hash_table<block_die_hasher>::create_ggc (10);
23582
23583 com_die_arg.decl_id = DECL_UID (com_decl);
23584 com_die_arg.die_parent = context_die;
23585 com_die = common_block_die_table->find (&com_die_arg);
23586 if (! early_dwarf)
23587 loc = loc_list_from_tree (com_decl, 2, NULL);
23588 if (com_die == NULL)
23589 {
23590 const char *cnam
23591 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (com_decl));
23592 die_node **slot;
23593
23594 com_die = new_die (DW_TAG_common_block, context_die, decl);
23595 add_name_and_src_coords_attributes (com_die, com_decl);
23596 if (loc)
23597 {
23598 add_AT_location_description (com_die, DW_AT_location, loc);
23599 /* Avoid sharing the same loc descriptor between
23600 DW_TAG_common_block and DW_TAG_variable. */
23601 loc = loc_list_from_tree (com_decl, 2, NULL);
23602 }
23603 else if (DECL_EXTERNAL (decl_or_origin))
23604 add_AT_flag (com_die, DW_AT_declaration, 1);
23605 if (want_pubnames ())
23606 add_pubname_string (cnam, com_die); /* ??? needed? */
23607 com_die->decl_id = DECL_UID (com_decl);
23608 slot = common_block_die_table->find_slot (com_die, INSERT);
23609 *slot = com_die;
23610 }
23611 else if (get_AT (com_die, DW_AT_location) == NULL && loc)
23612 {
23613 add_AT_location_description (com_die, DW_AT_location, loc);
23614 loc = loc_list_from_tree (com_decl, 2, NULL);
23615 remove_AT (com_die, DW_AT_declaration);
23616 }
23617 var_die = new_die (DW_TAG_variable, com_die, decl);
23618 add_name_and_src_coords_attributes (var_die, decl_or_origin);
23619 add_type_attribute (var_die, TREE_TYPE (decl_or_origin),
23620 decl_quals (decl_or_origin), false,
23621 context_die);
23622 add_alignment_attribute (var_die, decl);
23623 add_AT_flag (var_die, DW_AT_external, 1);
23624 if (loc)
23625 {
23626 if (off)
23627 {
23628 /* Optimize the common case. */
23629 if (single_element_loc_list_p (loc)
23630 && loc->expr->dw_loc_opc == DW_OP_addr
23631 && loc->expr->dw_loc_next == NULL
23632 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF)
23633 {
23634 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23635 loc->expr->dw_loc_oprnd1.v.val_addr
23636 = plus_constant (GET_MODE (x), x, off);
23637 }
23638 else
23639 loc_list_plus_const (loc, off);
23640 }
23641 add_AT_location_description (var_die, DW_AT_location, loc);
23642 }
23643 else if (DECL_EXTERNAL (decl_or_origin))
23644 add_AT_flag (var_die, DW_AT_declaration, 1);
23645 if (decl)
23646 equate_decl_number_to_die (decl, var_die);
23647 return;
23648 }
23649
23650 if (old_die)
23651 {
23652 if (declaration)
23653 {
23654 /* A declaration that has been previously dumped, needs no
23655 further annotations, since it doesn't need location on
23656 the second pass. */
23657 return;
23658 }
23659 else if (decl_will_get_specification_p (old_die, decl, declaration)
23660 && !get_AT (old_die, DW_AT_specification))
23661 {
23662 /* Fall-thru so we can make a new variable die along with a
23663 DW_AT_specification. */
23664 }
23665 else if (origin && old_die->die_parent != context_die)
23666 {
23667 /* If we will be creating an inlined instance, we need a
23668 new DIE that will get annotated with
23669 DW_AT_abstract_origin. */
23670 gcc_assert (!DECL_ABSTRACT_P (decl));
23671 }
23672 else
23673 {
23674 /* If a DIE was dumped early, it still needs location info.
23675 Skip to where we fill the location bits. */
23676 var_die = old_die;
23677
23678 /* ??? In LTRANS we cannot annotate early created variably
23679 modified type DIEs without copying them and adjusting all
23680 references to them. Thus we dumped them again. Also add a
23681 reference to them but beware of -g0 compile and -g link
23682 in which case the reference will be already present. */
23683 tree type = TREE_TYPE (decl_or_origin);
23684 if (in_lto_p
23685 && ! get_AT (var_die, DW_AT_type)
23686 && variably_modified_type_p
23687 (type, decl_function_context (decl_or_origin)))
23688 {
23689 if (decl_by_reference_p (decl_or_origin))
23690 add_type_attribute (var_die, TREE_TYPE (type),
23691 TYPE_UNQUALIFIED, false, context_die);
23692 else
23693 add_type_attribute (var_die, type, decl_quals (decl_or_origin),
23694 false, context_die);
23695 }
23696
23697 goto gen_variable_die_location;
23698 }
23699 }
23700
23701 /* For static data members, the declaration in the class is supposed
23702 to have DW_TAG_member tag in DWARF{3,4} and we emit it for compatibility
23703 also in DWARF2; the specification should still be DW_TAG_variable
23704 referencing the DW_TAG_member DIE. */
23705 if (declaration && class_scope_p (context_die) && dwarf_version < 5)
23706 var_die = new_die (DW_TAG_member, context_die, decl);
23707 else
23708 var_die = new_die (DW_TAG_variable, context_die, decl);
23709
23710 if (origin != NULL)
23711 add_abstract_origin_attribute (var_die, origin);
23712
23713 /* Loop unrolling can create multiple blocks that refer to the same
23714 static variable, so we must test for the DW_AT_declaration flag.
23715
23716 ??? Loop unrolling/reorder_blocks should perhaps be rewritten to
23717 copy decls and set the DECL_ABSTRACT_P flag on them instead of
23718 sharing them.
23719
23720 ??? Duplicated blocks have been rewritten to use .debug_ranges. */
23721 else if (decl_will_get_specification_p (old_die, decl, declaration))
23722 {
23723 /* This is a definition of a C++ class level static. */
23724 add_AT_specification (var_die, old_die);
23725 specialization_p = true;
23726 if (DECL_NAME (decl))
23727 {
23728 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
23729 struct dwarf_file_data * file_index = lookup_filename (s.file);
23730
23731 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
23732 add_AT_file (var_die, DW_AT_decl_file, file_index);
23733
23734 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
23735 add_AT_unsigned (var_die, DW_AT_decl_line, s.line);
23736
23737 if (debug_column_info
23738 && s.column
23739 && (get_AT_unsigned (old_die, DW_AT_decl_column)
23740 != (unsigned) s.column))
23741 add_AT_unsigned (var_die, DW_AT_decl_column, s.column);
23742
23743 if (old_die->die_tag == DW_TAG_member)
23744 add_linkage_name (var_die, decl);
23745 }
23746 }
23747 else
23748 add_name_and_src_coords_attributes (var_die, decl, no_linkage_name);
23749
23750 if ((origin == NULL && !specialization_p)
23751 || (origin != NULL
23752 && !DECL_ABSTRACT_P (decl_or_origin)
23753 && variably_modified_type_p (TREE_TYPE (decl_or_origin),
23754 decl_function_context
23755 (decl_or_origin))))
23756 {
23757 tree type = TREE_TYPE (decl_or_origin);
23758
23759 if (decl_by_reference_p (decl_or_origin))
23760 add_type_attribute (var_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
23761 context_die);
23762 else
23763 add_type_attribute (var_die, type, decl_quals (decl_or_origin), false,
23764 context_die);
23765 }
23766
23767 if (origin == NULL && !specialization_p)
23768 {
23769 if (TREE_PUBLIC (decl))
23770 add_AT_flag (var_die, DW_AT_external, 1);
23771
23772 if (DECL_ARTIFICIAL (decl))
23773 add_AT_flag (var_die, DW_AT_artificial, 1);
23774
23775 add_alignment_attribute (var_die, decl);
23776
23777 add_accessibility_attribute (var_die, decl);
23778 }
23779
23780 if (declaration)
23781 add_AT_flag (var_die, DW_AT_declaration, 1);
23782
23783 if (decl && (DECL_ABSTRACT_P (decl)
23784 || !old_die || is_declaration_die (old_die)))
23785 equate_decl_number_to_die (decl, var_die);
23786
23787 gen_variable_die_location:
23788 if (! declaration
23789 && (! DECL_ABSTRACT_P (decl_or_origin)
23790 /* Local static vars are shared between all clones/inlines,
23791 so emit DW_AT_location on the abstract DIE if DECL_RTL is
23792 already set. */
23793 || (VAR_P (decl_or_origin)
23794 && TREE_STATIC (decl_or_origin)
23795 && DECL_RTL_SET_P (decl_or_origin))))
23796 {
23797 if (early_dwarf)
23798 add_pubname (decl_or_origin, var_die);
23799 else
23800 add_location_or_const_value_attribute (var_die, decl_or_origin,
23801 decl == NULL);
23802 }
23803 else
23804 tree_add_const_value_attribute_for_decl (var_die, decl_or_origin);
23805
23806 if ((dwarf_version >= 4 || !dwarf_strict)
23807 && lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23808 DW_AT_const_expr) == 1
23809 && !get_AT (var_die, DW_AT_const_expr)
23810 && !specialization_p)
23811 add_AT_flag (var_die, DW_AT_const_expr, 1);
23812
23813 if (!dwarf_strict)
23814 {
23815 int inl = lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23816 DW_AT_inline);
23817 if (inl != -1
23818 && !get_AT (var_die, DW_AT_inline)
23819 && !specialization_p)
23820 add_AT_unsigned (var_die, DW_AT_inline, inl);
23821 }
23822 }
23823
23824 /* Generate a DIE to represent a named constant. */
23825
23826 static void
23827 gen_const_die (tree decl, dw_die_ref context_die)
23828 {
23829 dw_die_ref const_die;
23830 tree type = TREE_TYPE (decl);
23831
23832 const_die = lookup_decl_die (decl);
23833 if (const_die)
23834 return;
23835
23836 const_die = new_die (DW_TAG_constant, context_die, decl);
23837 equate_decl_number_to_die (decl, const_die);
23838 add_name_and_src_coords_attributes (const_die, decl);
23839 add_type_attribute (const_die, type, TYPE_QUAL_CONST, false, context_die);
23840 if (TREE_PUBLIC (decl))
23841 add_AT_flag (const_die, DW_AT_external, 1);
23842 if (DECL_ARTIFICIAL (decl))
23843 add_AT_flag (const_die, DW_AT_artificial, 1);
23844 tree_add_const_value_attribute_for_decl (const_die, decl);
23845 }
23846
23847 /* Generate a DIE to represent a label identifier. */
23848
23849 static void
23850 gen_label_die (tree decl, dw_die_ref context_die)
23851 {
23852 tree origin = decl_ultimate_origin (decl);
23853 dw_die_ref lbl_die = lookup_decl_die (decl);
23854 rtx insn;
23855 char label[MAX_ARTIFICIAL_LABEL_BYTES];
23856
23857 if (!lbl_die)
23858 {
23859 lbl_die = new_die (DW_TAG_label, context_die, decl);
23860 equate_decl_number_to_die (decl, lbl_die);
23861
23862 if (origin != NULL)
23863 add_abstract_origin_attribute (lbl_die, origin);
23864 else
23865 add_name_and_src_coords_attributes (lbl_die, decl);
23866 }
23867
23868 if (DECL_ABSTRACT_P (decl))
23869 equate_decl_number_to_die (decl, lbl_die);
23870 else if (! early_dwarf)
23871 {
23872 insn = DECL_RTL_IF_SET (decl);
23873
23874 /* Deleted labels are programmer specified labels which have been
23875 eliminated because of various optimizations. We still emit them
23876 here so that it is possible to put breakpoints on them. */
23877 if (insn
23878 && (LABEL_P (insn)
23879 || ((NOTE_P (insn)
23880 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL))))
23881 {
23882 /* When optimization is enabled (via -O) some parts of the compiler
23883 (e.g. jump.c and cse.c) may try to delete CODE_LABEL insns which
23884 represent source-level labels which were explicitly declared by
23885 the user. This really shouldn't be happening though, so catch
23886 it if it ever does happen. */
23887 gcc_assert (!as_a<rtx_insn *> (insn)->deleted ());
23888
23889 ASM_GENERATE_INTERNAL_LABEL (label, "L", CODE_LABEL_NUMBER (insn));
23890 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23891 }
23892 else if (insn
23893 && NOTE_P (insn)
23894 && NOTE_KIND (insn) == NOTE_INSN_DELETED_DEBUG_LABEL
23895 && CODE_LABEL_NUMBER (insn) != -1)
23896 {
23897 ASM_GENERATE_INTERNAL_LABEL (label, "LDL", CODE_LABEL_NUMBER (insn));
23898 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23899 }
23900 }
23901 }
23902
23903 /* A helper function for gen_inlined_subroutine_die. Add source coordinate
23904 attributes to the DIE for a block STMT, to describe where the inlined
23905 function was called from. This is similar to add_src_coords_attributes. */
23906
23907 static inline void
23908 add_call_src_coords_attributes (tree stmt, dw_die_ref die)
23909 {
23910 /* We can end up with BUILTINS_LOCATION here. */
23911 if (RESERVED_LOCATION_P (BLOCK_SOURCE_LOCATION (stmt)))
23912 return;
23913
23914 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (stmt));
23915
23916 if (dwarf_version >= 3 || !dwarf_strict)
23917 {
23918 add_AT_file (die, DW_AT_call_file, lookup_filename (s.file));
23919 add_AT_unsigned (die, DW_AT_call_line, s.line);
23920 if (debug_column_info && s.column)
23921 add_AT_unsigned (die, DW_AT_call_column, s.column);
23922 }
23923 }
23924
23925
23926 /* A helper function for gen_lexical_block_die and gen_inlined_subroutine_die.
23927 Add low_pc and high_pc attributes to the DIE for a block STMT. */
23928
23929 static inline void
23930 add_high_low_attributes (tree stmt, dw_die_ref die)
23931 {
23932 char label[MAX_ARTIFICIAL_LABEL_BYTES];
23933
23934 if (inline_entry_data **iedp
23935 = !inline_entry_data_table ? NULL
23936 : inline_entry_data_table->find_slot_with_hash (stmt,
23937 htab_hash_pointer (stmt),
23938 NO_INSERT))
23939 {
23940 inline_entry_data *ied = *iedp;
23941 gcc_assert (MAY_HAVE_DEBUG_MARKER_INSNS);
23942 gcc_assert (debug_inline_points);
23943 gcc_assert (inlined_function_outer_scope_p (stmt));
23944
23945 ASM_GENERATE_INTERNAL_LABEL (label, ied->label_pfx, ied->label_num);
23946 add_AT_lbl_id (die, DW_AT_entry_pc, label);
23947
23948 if (debug_variable_location_views && !ZERO_VIEW_P (ied->view)
23949 && !dwarf_strict)
23950 {
23951 if (!output_asm_line_debug_info ())
23952 add_AT_unsigned (die, DW_AT_GNU_entry_view, ied->view);
23953 else
23954 {
23955 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", ied->view);
23956 /* FIXME: this will resolve to a small number. Could we
23957 possibly emit smaller data? Ideally we'd emit a
23958 uleb128, but that would make the size of DIEs
23959 impossible for the compiler to compute, since it's
23960 the assembler that computes the value of the view
23961 label in this case. Ideally, we'd have a single form
23962 encompassing both the address and the view, and
23963 indirecting them through a table might make things
23964 easier, but even that would be more wasteful,
23965 space-wise, than what we have now. */
23966 add_AT_symview (die, DW_AT_GNU_entry_view, label);
23967 }
23968 }
23969
23970 inline_entry_data_table->clear_slot (iedp);
23971 }
23972
23973 if (BLOCK_FRAGMENT_CHAIN (stmt)
23974 && (dwarf_version >= 3 || !dwarf_strict))
23975 {
23976 tree chain, superblock = NULL_TREE;
23977 dw_die_ref pdie;
23978 dw_attr_node *attr = NULL;
23979
23980 if (!debug_inline_points && inlined_function_outer_scope_p (stmt))
23981 {
23982 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
23983 BLOCK_NUMBER (stmt));
23984 add_AT_lbl_id (die, DW_AT_entry_pc, label);
23985 }
23986
23987 /* Optimize duplicate .debug_ranges lists or even tails of
23988 lists. If this BLOCK has same ranges as its supercontext,
23989 lookup DW_AT_ranges attribute in the supercontext (and
23990 recursively so), verify that the ranges_table contains the
23991 right values and use it instead of adding a new .debug_range. */
23992 for (chain = stmt, pdie = die;
23993 BLOCK_SAME_RANGE (chain);
23994 chain = BLOCK_SUPERCONTEXT (chain))
23995 {
23996 dw_attr_node *new_attr;
23997
23998 pdie = pdie->die_parent;
23999 if (pdie == NULL)
24000 break;
24001 if (BLOCK_SUPERCONTEXT (chain) == NULL_TREE)
24002 break;
24003 new_attr = get_AT (pdie, DW_AT_ranges);
24004 if (new_attr == NULL
24005 || new_attr->dw_attr_val.val_class != dw_val_class_range_list)
24006 break;
24007 attr = new_attr;
24008 superblock = BLOCK_SUPERCONTEXT (chain);
24009 }
24010 if (attr != NULL
24011 && ((*ranges_table)[attr->dw_attr_val.v.val_offset].num
24012 == (int)BLOCK_NUMBER (superblock))
24013 && BLOCK_FRAGMENT_CHAIN (superblock))
24014 {
24015 unsigned long off = attr->dw_attr_val.v.val_offset;
24016 unsigned long supercnt = 0, thiscnt = 0;
24017 for (chain = BLOCK_FRAGMENT_CHAIN (superblock);
24018 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
24019 {
24020 ++supercnt;
24021 gcc_checking_assert ((*ranges_table)[off + supercnt].num
24022 == (int)BLOCK_NUMBER (chain));
24023 }
24024 gcc_checking_assert ((*ranges_table)[off + supercnt + 1].num == 0);
24025 for (chain = BLOCK_FRAGMENT_CHAIN (stmt);
24026 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
24027 ++thiscnt;
24028 gcc_assert (supercnt >= thiscnt);
24029 add_AT_range_list (die, DW_AT_ranges, off + supercnt - thiscnt,
24030 false);
24031 note_rnglist_head (off + supercnt - thiscnt);
24032 return;
24033 }
24034
24035 unsigned int offset = add_ranges (stmt, true);
24036 add_AT_range_list (die, DW_AT_ranges, offset, false);
24037 note_rnglist_head (offset);
24038
24039 bool prev_in_cold = BLOCK_IN_COLD_SECTION_P (stmt);
24040 chain = BLOCK_FRAGMENT_CHAIN (stmt);
24041 do
24042 {
24043 add_ranges (chain, prev_in_cold != BLOCK_IN_COLD_SECTION_P (chain));
24044 prev_in_cold = BLOCK_IN_COLD_SECTION_P (chain);
24045 chain = BLOCK_FRAGMENT_CHAIN (chain);
24046 }
24047 while (chain);
24048 add_ranges (NULL);
24049 }
24050 else
24051 {
24052 char label_high[MAX_ARTIFICIAL_LABEL_BYTES];
24053 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
24054 BLOCK_NUMBER (stmt));
24055 ASM_GENERATE_INTERNAL_LABEL (label_high, BLOCK_END_LABEL,
24056 BLOCK_NUMBER (stmt));
24057 add_AT_low_high_pc (die, label, label_high, false);
24058 }
24059 }
24060
24061 /* Generate a DIE for a lexical block. */
24062
24063 static void
24064 gen_lexical_block_die (tree stmt, dw_die_ref context_die)
24065 {
24066 dw_die_ref old_die = lookup_block_die (stmt);
24067 dw_die_ref stmt_die = NULL;
24068 if (!old_die)
24069 {
24070 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24071 equate_block_to_die (stmt, stmt_die);
24072 }
24073
24074 if (BLOCK_ABSTRACT_ORIGIN (stmt))
24075 {
24076 /* If this is an inlined or conrecte instance, create a new lexical
24077 die for anything below to attach DW_AT_abstract_origin to. */
24078 if (old_die)
24079 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24080
24081 tree origin = block_ultimate_origin (stmt);
24082 if (origin != NULL_TREE && (origin != stmt || old_die))
24083 add_abstract_origin_attribute (stmt_die, origin);
24084
24085 old_die = NULL;
24086 }
24087
24088 if (old_die)
24089 stmt_die = old_die;
24090
24091 /* A non abstract block whose blocks have already been reordered
24092 should have the instruction range for this block. If so, set the
24093 high/low attributes. */
24094 if (!early_dwarf && TREE_ASM_WRITTEN (stmt))
24095 {
24096 gcc_assert (stmt_die);
24097 add_high_low_attributes (stmt, stmt_die);
24098 }
24099
24100 decls_for_scope (stmt, stmt_die);
24101 }
24102
24103 /* Generate a DIE for an inlined subprogram. */
24104
24105 static void
24106 gen_inlined_subroutine_die (tree stmt, dw_die_ref context_die)
24107 {
24108 tree decl = block_ultimate_origin (stmt);
24109
24110 /* Make sure any inlined functions are known to be inlineable. */
24111 gcc_checking_assert (DECL_ABSTRACT_P (decl)
24112 || cgraph_function_possibly_inlined_p (decl));
24113
24114 dw_die_ref subr_die = new_die (DW_TAG_inlined_subroutine, context_die, stmt);
24115
24116 if (call_arg_locations || debug_inline_points)
24117 equate_block_to_die (stmt, subr_die);
24118 add_abstract_origin_attribute (subr_die, decl);
24119 if (TREE_ASM_WRITTEN (stmt))
24120 add_high_low_attributes (stmt, subr_die);
24121 add_call_src_coords_attributes (stmt, subr_die);
24122
24123 /* The inliner creates an extra BLOCK for the parameter setup,
24124 we want to merge that with the actual outermost BLOCK of the
24125 inlined function to avoid duplicate locals in consumers.
24126 Do that by doing the recursion to subblocks on the single subblock
24127 of STMT. */
24128 bool unwrap_one = false;
24129 if (BLOCK_SUBBLOCKS (stmt) && !BLOCK_CHAIN (BLOCK_SUBBLOCKS (stmt)))
24130 {
24131 tree origin = block_ultimate_origin (BLOCK_SUBBLOCKS (stmt));
24132 if (origin
24133 && TREE_CODE (origin) == BLOCK
24134 && BLOCK_SUPERCONTEXT (origin) == decl)
24135 unwrap_one = true;
24136 }
24137 decls_for_scope (stmt, subr_die, !unwrap_one);
24138 if (unwrap_one)
24139 decls_for_scope (BLOCK_SUBBLOCKS (stmt), subr_die);
24140 }
24141
24142 /* Generate a DIE for a field in a record, or structure. CTX is required: see
24143 the comment for VLR_CONTEXT. */
24144
24145 static void
24146 gen_field_die (tree decl, struct vlr_context *ctx, dw_die_ref context_die)
24147 {
24148 dw_die_ref decl_die;
24149
24150 if (TREE_TYPE (decl) == error_mark_node)
24151 return;
24152
24153 decl_die = new_die (DW_TAG_member, context_die, decl);
24154 add_name_and_src_coords_attributes (decl_die, decl);
24155 add_type_attribute (decl_die, member_declared_type (decl), decl_quals (decl),
24156 TYPE_REVERSE_STORAGE_ORDER (DECL_FIELD_CONTEXT (decl)),
24157 context_die);
24158
24159 if (DECL_BIT_FIELD_TYPE (decl))
24160 {
24161 add_byte_size_attribute (decl_die, decl);
24162 add_bit_size_attribute (decl_die, decl);
24163 add_bit_offset_attribute (decl_die, decl, ctx);
24164 }
24165
24166 add_alignment_attribute (decl_die, decl);
24167
24168 /* If we have a variant part offset, then we are supposed to process a member
24169 of a QUAL_UNION_TYPE, which is how we represent variant parts in
24170 trees. */
24171 gcc_assert (ctx->variant_part_offset == NULL_TREE
24172 || TREE_CODE (DECL_FIELD_CONTEXT (decl)) != QUAL_UNION_TYPE);
24173 if (TREE_CODE (DECL_FIELD_CONTEXT (decl)) != UNION_TYPE)
24174 add_data_member_location_attribute (decl_die, decl, ctx);
24175
24176 if (DECL_ARTIFICIAL (decl))
24177 add_AT_flag (decl_die, DW_AT_artificial, 1);
24178
24179 add_accessibility_attribute (decl_die, decl);
24180
24181 /* Equate decl number to die, so that we can look up this decl later on. */
24182 equate_decl_number_to_die (decl, decl_die);
24183 }
24184
24185 /* Generate a DIE for a pointer to a member type. TYPE can be an
24186 OFFSET_TYPE, for a pointer to data member, or a RECORD_TYPE, for a
24187 pointer to member function. */
24188
24189 static void
24190 gen_ptr_to_mbr_type_die (tree type, dw_die_ref context_die)
24191 {
24192 if (lookup_type_die (type))
24193 return;
24194
24195 dw_die_ref ptr_die = new_die (DW_TAG_ptr_to_member_type,
24196 scope_die_for (type, context_die), type);
24197
24198 equate_type_number_to_die (type, ptr_die);
24199 add_AT_die_ref (ptr_die, DW_AT_containing_type,
24200 lookup_type_die (TYPE_OFFSET_BASETYPE (type)));
24201 add_type_attribute (ptr_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
24202 context_die);
24203 add_alignment_attribute (ptr_die, type);
24204
24205 if (TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE
24206 && TREE_CODE (TREE_TYPE (type)) != METHOD_TYPE)
24207 {
24208 dw_loc_descr_ref op = new_loc_descr (DW_OP_plus, 0, 0);
24209 add_AT_loc (ptr_die, DW_AT_use_location, op);
24210 }
24211 }
24212
24213 static char *producer_string;
24214
24215 /* Return a heap allocated producer string including command line options
24216 if -grecord-gcc-switches. */
24217
24218 static char *
24219 gen_producer_string (void)
24220 {
24221 size_t j;
24222 auto_vec<const char *> switches;
24223 const char *language_string = lang_hooks.name;
24224 char *producer, *tail;
24225 const char *p;
24226 size_t len = dwarf_record_gcc_switches ? 0 : 3;
24227 size_t plen = strlen (language_string) + 1 + strlen (version_string);
24228
24229 for (j = 1; dwarf_record_gcc_switches && j < save_decoded_options_count; j++)
24230 switch (save_decoded_options[j].opt_index)
24231 {
24232 case OPT_o:
24233 case OPT_d:
24234 case OPT_dumpbase:
24235 case OPT_dumpdir:
24236 case OPT_auxbase:
24237 case OPT_auxbase_strip:
24238 case OPT_quiet:
24239 case OPT_version:
24240 case OPT_v:
24241 case OPT_w:
24242 case OPT_L:
24243 case OPT_D:
24244 case OPT_I:
24245 case OPT_U:
24246 case OPT_SPECIAL_unknown:
24247 case OPT_SPECIAL_ignore:
24248 case OPT_SPECIAL_deprecated:
24249 case OPT_SPECIAL_program_name:
24250 case OPT_SPECIAL_input_file:
24251 case OPT_grecord_gcc_switches:
24252 case OPT__output_pch_:
24253 case OPT_fdiagnostics_show_location_:
24254 case OPT_fdiagnostics_show_option:
24255 case OPT_fdiagnostics_show_caret:
24256 case OPT_fdiagnostics_show_labels:
24257 case OPT_fdiagnostics_show_line_numbers:
24258 case OPT_fdiagnostics_color_:
24259 case OPT_fverbose_asm:
24260 case OPT____:
24261 case OPT__sysroot_:
24262 case OPT_nostdinc:
24263 case OPT_nostdinc__:
24264 case OPT_fpreprocessed:
24265 case OPT_fltrans_output_list_:
24266 case OPT_fresolution_:
24267 case OPT_fdebug_prefix_map_:
24268 case OPT_fmacro_prefix_map_:
24269 case OPT_ffile_prefix_map_:
24270 case OPT_fcompare_debug:
24271 case OPT_fchecking:
24272 case OPT_fchecking_:
24273 /* Ignore these. */
24274 continue;
24275 default:
24276 if (cl_options[save_decoded_options[j].opt_index].flags
24277 & CL_NO_DWARF_RECORD)
24278 continue;
24279 gcc_checking_assert (save_decoded_options[j].canonical_option[0][0]
24280 == '-');
24281 switch (save_decoded_options[j].canonical_option[0][1])
24282 {
24283 case 'M':
24284 case 'i':
24285 case 'W':
24286 continue;
24287 case 'f':
24288 if (strncmp (save_decoded_options[j].canonical_option[0] + 2,
24289 "dump", 4) == 0)
24290 continue;
24291 break;
24292 default:
24293 break;
24294 }
24295 switches.safe_push (save_decoded_options[j].orig_option_with_args_text);
24296 len += strlen (save_decoded_options[j].orig_option_with_args_text) + 1;
24297 break;
24298 }
24299
24300 producer = XNEWVEC (char, plen + 1 + len + 1);
24301 tail = producer;
24302 sprintf (tail, "%s %s", language_string, version_string);
24303 tail += plen;
24304
24305 FOR_EACH_VEC_ELT (switches, j, p)
24306 {
24307 len = strlen (p);
24308 *tail = ' ';
24309 memcpy (tail + 1, p, len);
24310 tail += len + 1;
24311 }
24312
24313 *tail = '\0';
24314 return producer;
24315 }
24316
24317 /* Given a C and/or C++ language/version string return the "highest".
24318 C++ is assumed to be "higher" than C in this case. Used for merging
24319 LTO translation unit languages. */
24320 static const char *
24321 highest_c_language (const char *lang1, const char *lang2)
24322 {
24323 if (strcmp ("GNU C++17", lang1) == 0 || strcmp ("GNU C++17", lang2) == 0)
24324 return "GNU C++17";
24325 if (strcmp ("GNU C++14", lang1) == 0 || strcmp ("GNU C++14", lang2) == 0)
24326 return "GNU C++14";
24327 if (strcmp ("GNU C++11", lang1) == 0 || strcmp ("GNU C++11", lang2) == 0)
24328 return "GNU C++11";
24329 if (strcmp ("GNU C++98", lang1) == 0 || strcmp ("GNU C++98", lang2) == 0)
24330 return "GNU C++98";
24331
24332 if (strcmp ("GNU C17", lang1) == 0 || strcmp ("GNU C17", lang2) == 0)
24333 return "GNU C17";
24334 if (strcmp ("GNU C11", lang1) == 0 || strcmp ("GNU C11", lang2) == 0)
24335 return "GNU C11";
24336 if (strcmp ("GNU C99", lang1) == 0 || strcmp ("GNU C99", lang2) == 0)
24337 return "GNU C99";
24338 if (strcmp ("GNU C89", lang1) == 0 || strcmp ("GNU C89", lang2) == 0)
24339 return "GNU C89";
24340
24341 gcc_unreachable ();
24342 }
24343
24344
24345 /* Generate the DIE for the compilation unit. */
24346
24347 static dw_die_ref
24348 gen_compile_unit_die (const char *filename)
24349 {
24350 dw_die_ref die;
24351 const char *language_string = lang_hooks.name;
24352 int language;
24353
24354 die = new_die (DW_TAG_compile_unit, NULL, NULL);
24355
24356 if (filename)
24357 {
24358 add_name_attribute (die, filename);
24359 /* Don't add cwd for <built-in>. */
24360 if (filename[0] != '<')
24361 add_comp_dir_attribute (die);
24362 }
24363
24364 add_AT_string (die, DW_AT_producer, producer_string ? producer_string : "");
24365
24366 /* If our producer is LTO try to figure out a common language to use
24367 from the global list of translation units. */
24368 if (strcmp (language_string, "GNU GIMPLE") == 0)
24369 {
24370 unsigned i;
24371 tree t;
24372 const char *common_lang = NULL;
24373
24374 FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, t)
24375 {
24376 if (!TRANSLATION_UNIT_LANGUAGE (t))
24377 continue;
24378 if (!common_lang)
24379 common_lang = TRANSLATION_UNIT_LANGUAGE (t);
24380 else if (strcmp (common_lang, TRANSLATION_UNIT_LANGUAGE (t)) == 0)
24381 ;
24382 else if (strncmp (common_lang, "GNU C", 5) == 0
24383 && strncmp (TRANSLATION_UNIT_LANGUAGE (t), "GNU C", 5) == 0)
24384 /* Mixing C and C++ is ok, use C++ in that case. */
24385 common_lang = highest_c_language (common_lang,
24386 TRANSLATION_UNIT_LANGUAGE (t));
24387 else
24388 {
24389 /* Fall back to C. */
24390 common_lang = NULL;
24391 break;
24392 }
24393 }
24394
24395 if (common_lang)
24396 language_string = common_lang;
24397 }
24398
24399 language = DW_LANG_C;
24400 if (strncmp (language_string, "GNU C", 5) == 0
24401 && ISDIGIT (language_string[5]))
24402 {
24403 language = DW_LANG_C89;
24404 if (dwarf_version >= 3 || !dwarf_strict)
24405 {
24406 if (strcmp (language_string, "GNU C89") != 0)
24407 language = DW_LANG_C99;
24408
24409 if (dwarf_version >= 5 /* || !dwarf_strict */)
24410 if (strcmp (language_string, "GNU C11") == 0
24411 || strcmp (language_string, "GNU C17") == 0)
24412 language = DW_LANG_C11;
24413 }
24414 }
24415 else if (strncmp (language_string, "GNU C++", 7) == 0)
24416 {
24417 language = DW_LANG_C_plus_plus;
24418 if (dwarf_version >= 5 /* || !dwarf_strict */)
24419 {
24420 if (strcmp (language_string, "GNU C++11") == 0)
24421 language = DW_LANG_C_plus_plus_11;
24422 else if (strcmp (language_string, "GNU C++14") == 0)
24423 language = DW_LANG_C_plus_plus_14;
24424 else if (strcmp (language_string, "GNU C++17") == 0)
24425 /* For now. */
24426 language = DW_LANG_C_plus_plus_14;
24427 }
24428 }
24429 else if (strcmp (language_string, "GNU F77") == 0)
24430 language = DW_LANG_Fortran77;
24431 else if (dwarf_version >= 3 || !dwarf_strict)
24432 {
24433 if (strcmp (language_string, "GNU Ada") == 0)
24434 language = DW_LANG_Ada95;
24435 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24436 {
24437 language = DW_LANG_Fortran95;
24438 if (dwarf_version >= 5 /* || !dwarf_strict */)
24439 {
24440 if (strcmp (language_string, "GNU Fortran2003") == 0)
24441 language = DW_LANG_Fortran03;
24442 else if (strcmp (language_string, "GNU Fortran2008") == 0)
24443 language = DW_LANG_Fortran08;
24444 }
24445 }
24446 else if (strcmp (language_string, "GNU Objective-C") == 0)
24447 language = DW_LANG_ObjC;
24448 else if (strcmp (language_string, "GNU Objective-C++") == 0)
24449 language = DW_LANG_ObjC_plus_plus;
24450 else if (dwarf_version >= 5 || !dwarf_strict)
24451 {
24452 if (strcmp (language_string, "GNU Go") == 0)
24453 language = DW_LANG_Go;
24454 }
24455 }
24456 /* Use a degraded Fortran setting in strict DWARF2 so is_fortran works. */
24457 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24458 language = DW_LANG_Fortran90;
24459 /* Likewise for Ada. */
24460 else if (strcmp (language_string, "GNU Ada") == 0)
24461 language = DW_LANG_Ada83;
24462
24463 add_AT_unsigned (die, DW_AT_language, language);
24464
24465 switch (language)
24466 {
24467 case DW_LANG_Fortran77:
24468 case DW_LANG_Fortran90:
24469 case DW_LANG_Fortran95:
24470 case DW_LANG_Fortran03:
24471 case DW_LANG_Fortran08:
24472 /* Fortran has case insensitive identifiers and the front-end
24473 lowercases everything. */
24474 add_AT_unsigned (die, DW_AT_identifier_case, DW_ID_down_case);
24475 break;
24476 default:
24477 /* The default DW_ID_case_sensitive doesn't need to be specified. */
24478 break;
24479 }
24480 return die;
24481 }
24482
24483 /* Generate the DIE for a base class. */
24484
24485 static void
24486 gen_inheritance_die (tree binfo, tree access, tree type,
24487 dw_die_ref context_die)
24488 {
24489 dw_die_ref die = new_die (DW_TAG_inheritance, context_die, binfo);
24490 struct vlr_context ctx = { type, NULL };
24491
24492 add_type_attribute (die, BINFO_TYPE (binfo), TYPE_UNQUALIFIED, false,
24493 context_die);
24494 add_data_member_location_attribute (die, binfo, &ctx);
24495
24496 if (BINFO_VIRTUAL_P (binfo))
24497 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
24498
24499 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
24500 children, otherwise the default is DW_ACCESS_public. In DWARF2
24501 the default has always been DW_ACCESS_private. */
24502 if (access == access_public_node)
24503 {
24504 if (dwarf_version == 2
24505 || context_die->die_tag == DW_TAG_class_type)
24506 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
24507 }
24508 else if (access == access_protected_node)
24509 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
24510 else if (dwarf_version > 2
24511 && context_die->die_tag != DW_TAG_class_type)
24512 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
24513 }
24514
24515 /* Return whether DECL is a FIELD_DECL that represents the variant part of a
24516 structure. */
24517 static bool
24518 is_variant_part (tree decl)
24519 {
24520 return (TREE_CODE (decl) == FIELD_DECL
24521 && TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE);
24522 }
24523
24524 /* Check that OPERAND is a reference to a field in STRUCT_TYPE. If it is,
24525 return the FIELD_DECL. Return NULL_TREE otherwise. */
24526
24527 static tree
24528 analyze_discr_in_predicate (tree operand, tree struct_type)
24529 {
24530 bool continue_stripping = true;
24531 while (continue_stripping)
24532 switch (TREE_CODE (operand))
24533 {
24534 CASE_CONVERT:
24535 operand = TREE_OPERAND (operand, 0);
24536 break;
24537 default:
24538 continue_stripping = false;
24539 break;
24540 }
24541
24542 /* Match field access to members of struct_type only. */
24543 if (TREE_CODE (operand) == COMPONENT_REF
24544 && TREE_CODE (TREE_OPERAND (operand, 0)) == PLACEHOLDER_EXPR
24545 && TREE_TYPE (TREE_OPERAND (operand, 0)) == struct_type
24546 && TREE_CODE (TREE_OPERAND (operand, 1)) == FIELD_DECL)
24547 return TREE_OPERAND (operand, 1);
24548 else
24549 return NULL_TREE;
24550 }
24551
24552 /* Check that SRC is a constant integer that can be represented as a native
24553 integer constant (either signed or unsigned). If so, store it into DEST and
24554 return true. Return false otherwise. */
24555
24556 static bool
24557 get_discr_value (tree src, dw_discr_value *dest)
24558 {
24559 tree discr_type = TREE_TYPE (src);
24560
24561 if (lang_hooks.types.get_debug_type)
24562 {
24563 tree debug_type = lang_hooks.types.get_debug_type (discr_type);
24564 if (debug_type != NULL)
24565 discr_type = debug_type;
24566 }
24567
24568 if (TREE_CODE (src) != INTEGER_CST || !INTEGRAL_TYPE_P (discr_type))
24569 return false;
24570
24571 /* Signedness can vary between the original type and the debug type. This
24572 can happen for character types in Ada for instance: the character type
24573 used for code generation can be signed, to be compatible with the C one,
24574 but from a debugger point of view, it must be unsigned. */
24575 bool is_orig_unsigned = TYPE_UNSIGNED (TREE_TYPE (src));
24576 bool is_debug_unsigned = TYPE_UNSIGNED (discr_type);
24577
24578 if (is_orig_unsigned != is_debug_unsigned)
24579 src = fold_convert (discr_type, src);
24580
24581 if (!(is_debug_unsigned ? tree_fits_uhwi_p (src) : tree_fits_shwi_p (src)))
24582 return false;
24583
24584 dest->pos = is_debug_unsigned;
24585 if (is_debug_unsigned)
24586 dest->v.uval = tree_to_uhwi (src);
24587 else
24588 dest->v.sval = tree_to_shwi (src);
24589
24590 return true;
24591 }
24592
24593 /* Try to extract synthetic properties out of VARIANT_PART_DECL, which is a
24594 FIELD_DECL in STRUCT_TYPE that represents a variant part. If unsuccessful,
24595 store NULL_TREE in DISCR_DECL. Otherwise:
24596
24597 - store the discriminant field in STRUCT_TYPE that controls the variant
24598 part to *DISCR_DECL
24599
24600 - put in *DISCR_LISTS_P an array where for each variant, the item
24601 represents the corresponding matching list of discriminant values.
24602
24603 - put in *DISCR_LISTS_LENGTH the number of variants, which is the size of
24604 the above array.
24605
24606 Note that when the array is allocated (i.e. when the analysis is
24607 successful), it is up to the caller to free the array. */
24608
24609 static void
24610 analyze_variants_discr (tree variant_part_decl,
24611 tree struct_type,
24612 tree *discr_decl,
24613 dw_discr_list_ref **discr_lists_p,
24614 unsigned *discr_lists_length)
24615 {
24616 tree variant_part_type = TREE_TYPE (variant_part_decl);
24617 tree variant;
24618 dw_discr_list_ref *discr_lists;
24619 unsigned i;
24620
24621 /* Compute how many variants there are in this variant part. */
24622 *discr_lists_length = 0;
24623 for (variant = TYPE_FIELDS (variant_part_type);
24624 variant != NULL_TREE;
24625 variant = DECL_CHAIN (variant))
24626 ++*discr_lists_length;
24627
24628 *discr_decl = NULL_TREE;
24629 *discr_lists_p
24630 = (dw_discr_list_ref *) xcalloc (*discr_lists_length,
24631 sizeof (**discr_lists_p));
24632 discr_lists = *discr_lists_p;
24633
24634 /* And then analyze all variants to extract discriminant information for all
24635 of them. This analysis is conservative: as soon as we detect something we
24636 do not support, abort everything and pretend we found nothing. */
24637 for (variant = TYPE_FIELDS (variant_part_type), i = 0;
24638 variant != NULL_TREE;
24639 variant = DECL_CHAIN (variant), ++i)
24640 {
24641 tree match_expr = DECL_QUALIFIER (variant);
24642
24643 /* Now, try to analyze the predicate and deduce a discriminant for
24644 it. */
24645 if (match_expr == boolean_true_node)
24646 /* Typically happens for the default variant: it matches all cases that
24647 previous variants rejected. Don't output any matching value for
24648 this one. */
24649 continue;
24650
24651 /* The following loop tries to iterate over each discriminant
24652 possibility: single values or ranges. */
24653 while (match_expr != NULL_TREE)
24654 {
24655 tree next_round_match_expr;
24656 tree candidate_discr = NULL_TREE;
24657 dw_discr_list_ref new_node = NULL;
24658
24659 /* Possibilities are matched one after the other by nested
24660 TRUTH_ORIF_EXPR expressions. Process the current possibility and
24661 continue with the rest at next iteration. */
24662 if (TREE_CODE (match_expr) == TRUTH_ORIF_EXPR)
24663 {
24664 next_round_match_expr = TREE_OPERAND (match_expr, 0);
24665 match_expr = TREE_OPERAND (match_expr, 1);
24666 }
24667 else
24668 next_round_match_expr = NULL_TREE;
24669
24670 if (match_expr == boolean_false_node)
24671 /* This sub-expression matches nothing: just wait for the next
24672 one. */
24673 ;
24674
24675 else if (TREE_CODE (match_expr) == EQ_EXPR)
24676 {
24677 /* We are matching: <discr_field> == <integer_cst>
24678 This sub-expression matches a single value. */
24679 tree integer_cst = TREE_OPERAND (match_expr, 1);
24680
24681 candidate_discr
24682 = analyze_discr_in_predicate (TREE_OPERAND (match_expr, 0),
24683 struct_type);
24684
24685 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24686 if (!get_discr_value (integer_cst,
24687 &new_node->dw_discr_lower_bound))
24688 goto abort;
24689 new_node->dw_discr_range = false;
24690 }
24691
24692 else if (TREE_CODE (match_expr) == TRUTH_ANDIF_EXPR)
24693 {
24694 /* We are matching:
24695 <discr_field> > <integer_cst>
24696 && <discr_field> < <integer_cst>.
24697 This sub-expression matches the range of values between the
24698 two matched integer constants. Note that comparisons can be
24699 inclusive or exclusive. */
24700 tree candidate_discr_1, candidate_discr_2;
24701 tree lower_cst, upper_cst;
24702 bool lower_cst_included, upper_cst_included;
24703 tree lower_op = TREE_OPERAND (match_expr, 0);
24704 tree upper_op = TREE_OPERAND (match_expr, 1);
24705
24706 /* When the comparison is exclusive, the integer constant is not
24707 the discriminant range bound we are looking for: we will have
24708 to increment or decrement it. */
24709 if (TREE_CODE (lower_op) == GE_EXPR)
24710 lower_cst_included = true;
24711 else if (TREE_CODE (lower_op) == GT_EXPR)
24712 lower_cst_included = false;
24713 else
24714 goto abort;
24715
24716 if (TREE_CODE (upper_op) == LE_EXPR)
24717 upper_cst_included = true;
24718 else if (TREE_CODE (upper_op) == LT_EXPR)
24719 upper_cst_included = false;
24720 else
24721 goto abort;
24722
24723 /* Extract the discriminant from the first operand and check it
24724 is consistant with the same analysis in the second
24725 operand. */
24726 candidate_discr_1
24727 = analyze_discr_in_predicate (TREE_OPERAND (lower_op, 0),
24728 struct_type);
24729 candidate_discr_2
24730 = analyze_discr_in_predicate (TREE_OPERAND (upper_op, 0),
24731 struct_type);
24732 if (candidate_discr_1 == candidate_discr_2)
24733 candidate_discr = candidate_discr_1;
24734 else
24735 goto abort;
24736
24737 /* Extract bounds from both. */
24738 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24739 lower_cst = TREE_OPERAND (lower_op, 1);
24740 upper_cst = TREE_OPERAND (upper_op, 1);
24741
24742 if (!lower_cst_included)
24743 lower_cst
24744 = fold_build2 (PLUS_EXPR, TREE_TYPE (lower_cst), lower_cst,
24745 build_int_cst (TREE_TYPE (lower_cst), 1));
24746 if (!upper_cst_included)
24747 upper_cst
24748 = fold_build2 (MINUS_EXPR, TREE_TYPE (upper_cst), upper_cst,
24749 build_int_cst (TREE_TYPE (upper_cst), 1));
24750
24751 if (!get_discr_value (lower_cst,
24752 &new_node->dw_discr_lower_bound)
24753 || !get_discr_value (upper_cst,
24754 &new_node->dw_discr_upper_bound))
24755 goto abort;
24756
24757 new_node->dw_discr_range = true;
24758 }
24759
24760 else
24761 /* Unsupported sub-expression: we cannot determine the set of
24762 matching discriminant values. Abort everything. */
24763 goto abort;
24764
24765 /* If the discriminant info is not consistant with what we saw so
24766 far, consider the analysis failed and abort everything. */
24767 if (candidate_discr == NULL_TREE
24768 || (*discr_decl != NULL_TREE && candidate_discr != *discr_decl))
24769 goto abort;
24770 else
24771 *discr_decl = candidate_discr;
24772
24773 if (new_node != NULL)
24774 {
24775 new_node->dw_discr_next = discr_lists[i];
24776 discr_lists[i] = new_node;
24777 }
24778 match_expr = next_round_match_expr;
24779 }
24780 }
24781
24782 /* If we reach this point, we could match everything we were interested
24783 in. */
24784 return;
24785
24786 abort:
24787 /* Clean all data structure and return no result. */
24788 free (*discr_lists_p);
24789 *discr_lists_p = NULL;
24790 *discr_decl = NULL_TREE;
24791 }
24792
24793 /* Generate a DIE to represent VARIANT_PART_DECL, a variant part that is part
24794 of STRUCT_TYPE, a record type. This new DIE is emitted as the next child
24795 under CONTEXT_DIE.
24796
24797 Variant parts are supposed to be implemented as a FIELD_DECL whose type is a
24798 QUAL_UNION_TYPE: this is the VARIANT_PART_DECL parameter. The members for
24799 this type, which are record types, represent the available variants and each
24800 has a DECL_QUALIFIER attribute. The discriminant and the discriminant
24801 values are inferred from these attributes.
24802
24803 In trees, the offsets for the fields inside these sub-records are relative
24804 to the variant part itself, whereas the corresponding DIEs should have
24805 offset attributes that are relative to the embedding record base address.
24806 This is why the caller must provide a VARIANT_PART_OFFSET expression: it
24807 must be an expression that computes the offset of the variant part to
24808 describe in DWARF. */
24809
24810 static void
24811 gen_variant_part (tree variant_part_decl, struct vlr_context *vlr_ctx,
24812 dw_die_ref context_die)
24813 {
24814 const tree variant_part_type = TREE_TYPE (variant_part_decl);
24815 tree variant_part_offset = vlr_ctx->variant_part_offset;
24816 struct loc_descr_context ctx = {
24817 vlr_ctx->struct_type, /* context_type */
24818 NULL_TREE, /* base_decl */
24819 NULL, /* dpi */
24820 false, /* placeholder_arg */
24821 false /* placeholder_seen */
24822 };
24823
24824 /* The FIELD_DECL node in STRUCT_TYPE that acts as the discriminant, or
24825 NULL_TREE if there is no such field. */
24826 tree discr_decl = NULL_TREE;
24827 dw_discr_list_ref *discr_lists;
24828 unsigned discr_lists_length = 0;
24829 unsigned i;
24830
24831 dw_die_ref dwarf_proc_die = NULL;
24832 dw_die_ref variant_part_die
24833 = new_die (DW_TAG_variant_part, context_die, variant_part_type);
24834
24835 equate_decl_number_to_die (variant_part_decl, variant_part_die);
24836
24837 analyze_variants_discr (variant_part_decl, vlr_ctx->struct_type,
24838 &discr_decl, &discr_lists, &discr_lists_length);
24839
24840 if (discr_decl != NULL_TREE)
24841 {
24842 dw_die_ref discr_die = lookup_decl_die (discr_decl);
24843
24844 if (discr_die)
24845 add_AT_die_ref (variant_part_die, DW_AT_discr, discr_die);
24846 else
24847 /* We have no DIE for the discriminant, so just discard all
24848 discrimimant information in the output. */
24849 discr_decl = NULL_TREE;
24850 }
24851
24852 /* If the offset for this variant part is more complex than a constant,
24853 create a DWARF procedure for it so that we will not have to generate DWARF
24854 expressions for it for each member. */
24855 if (TREE_CODE (variant_part_offset) != INTEGER_CST
24856 && (dwarf_version >= 3 || !dwarf_strict))
24857 {
24858 const tree dwarf_proc_fndecl
24859 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
24860 build_function_type (TREE_TYPE (variant_part_offset),
24861 NULL_TREE));
24862 const tree dwarf_proc_call = build_call_expr (dwarf_proc_fndecl, 0);
24863 const dw_loc_descr_ref dwarf_proc_body
24864 = loc_descriptor_from_tree (variant_part_offset, 0, &ctx);
24865
24866 dwarf_proc_die = new_dwarf_proc_die (dwarf_proc_body,
24867 dwarf_proc_fndecl, context_die);
24868 if (dwarf_proc_die != NULL)
24869 variant_part_offset = dwarf_proc_call;
24870 }
24871
24872 /* Output DIEs for all variants. */
24873 i = 0;
24874 for (tree variant = TYPE_FIELDS (variant_part_type);
24875 variant != NULL_TREE;
24876 variant = DECL_CHAIN (variant), ++i)
24877 {
24878 tree variant_type = TREE_TYPE (variant);
24879 dw_die_ref variant_die;
24880
24881 /* All variants (i.e. members of a variant part) are supposed to be
24882 encoded as structures. Sub-variant parts are QUAL_UNION_TYPE fields
24883 under these records. */
24884 gcc_assert (TREE_CODE (variant_type) == RECORD_TYPE);
24885
24886 variant_die = new_die (DW_TAG_variant, variant_part_die, variant_type);
24887 equate_decl_number_to_die (variant, variant_die);
24888
24889 /* Output discriminant values this variant matches, if any. */
24890 if (discr_decl == NULL || discr_lists[i] == NULL)
24891 /* In the case we have discriminant information at all, this is
24892 probably the default variant: as the standard says, don't
24893 output any discriminant value/list attribute. */
24894 ;
24895 else if (discr_lists[i]->dw_discr_next == NULL
24896 && !discr_lists[i]->dw_discr_range)
24897 /* If there is only one accepted value, don't bother outputting a
24898 list. */
24899 add_discr_value (variant_die, &discr_lists[i]->dw_discr_lower_bound);
24900 else
24901 add_discr_list (variant_die, discr_lists[i]);
24902
24903 for (tree member = TYPE_FIELDS (variant_type);
24904 member != NULL_TREE;
24905 member = DECL_CHAIN (member))
24906 {
24907 struct vlr_context vlr_sub_ctx = {
24908 vlr_ctx->struct_type, /* struct_type */
24909 NULL /* variant_part_offset */
24910 };
24911 if (is_variant_part (member))
24912 {
24913 /* All offsets for fields inside variant parts are relative to
24914 the top-level embedding RECORD_TYPE's base address. On the
24915 other hand, offsets in GCC's types are relative to the
24916 nested-most variant part. So we have to sum offsets each time
24917 we recurse. */
24918
24919 vlr_sub_ctx.variant_part_offset
24920 = fold_build2 (PLUS_EXPR, TREE_TYPE (variant_part_offset),
24921 variant_part_offset, byte_position (member));
24922 gen_variant_part (member, &vlr_sub_ctx, variant_die);
24923 }
24924 else
24925 {
24926 vlr_sub_ctx.variant_part_offset = variant_part_offset;
24927 gen_decl_die (member, NULL, &vlr_sub_ctx, variant_die);
24928 }
24929 }
24930 }
24931
24932 free (discr_lists);
24933 }
24934
24935 /* Generate a DIE for a class member. */
24936
24937 static void
24938 gen_member_die (tree type, dw_die_ref context_die)
24939 {
24940 tree member;
24941 tree binfo = TYPE_BINFO (type);
24942
24943 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
24944
24945 /* If this is not an incomplete type, output descriptions of each of its
24946 members. Note that as we output the DIEs necessary to represent the
24947 members of this record or union type, we will also be trying to output
24948 DIEs to represent the *types* of those members. However the `type'
24949 function (above) will specifically avoid generating type DIEs for member
24950 types *within* the list of member DIEs for this (containing) type except
24951 for those types (of members) which are explicitly marked as also being
24952 members of this (containing) type themselves. The g++ front- end can
24953 force any given type to be treated as a member of some other (containing)
24954 type by setting the TYPE_CONTEXT of the given (member) type to point to
24955 the TREE node representing the appropriate (containing) type. */
24956
24957 /* First output info about the base classes. */
24958 if (binfo)
24959 {
24960 vec<tree, va_gc> *accesses = BINFO_BASE_ACCESSES (binfo);
24961 int i;
24962 tree base;
24963
24964 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base); i++)
24965 gen_inheritance_die (base,
24966 (accesses ? (*accesses)[i] : access_public_node),
24967 type,
24968 context_die);
24969 }
24970
24971 /* Now output info about the data members and type members. */
24972 for (member = TYPE_FIELDS (type); member; member = DECL_CHAIN (member))
24973 {
24974 struct vlr_context vlr_ctx = { type, NULL_TREE };
24975 bool static_inline_p
24976 = (TREE_STATIC (member)
24977 && (lang_hooks.decls.decl_dwarf_attribute (member, DW_AT_inline)
24978 != -1));
24979
24980 /* Ignore clones. */
24981 if (DECL_ABSTRACT_ORIGIN (member))
24982 continue;
24983
24984 /* If we thought we were generating minimal debug info for TYPE
24985 and then changed our minds, some of the member declarations
24986 may have already been defined. Don't define them again, but
24987 do put them in the right order. */
24988
24989 if (dw_die_ref child = lookup_decl_die (member))
24990 {
24991 /* Handle inline static data members, which only have in-class
24992 declarations. */
24993 dw_die_ref ref = NULL;
24994 if (child->die_tag == DW_TAG_variable
24995 && child->die_parent == comp_unit_die ())
24996 {
24997 ref = get_AT_ref (child, DW_AT_specification);
24998 /* For C++17 inline static data members followed by redundant
24999 out of class redeclaration, we might get here with
25000 child being the DIE created for the out of class
25001 redeclaration and with its DW_AT_specification being
25002 the DIE created for in-class definition. We want to
25003 reparent the latter, and don't want to create another
25004 DIE with DW_AT_specification in that case, because
25005 we already have one. */
25006 if (ref
25007 && static_inline_p
25008 && ref->die_tag == DW_TAG_variable
25009 && ref->die_parent == comp_unit_die ()
25010 && get_AT (ref, DW_AT_specification) == NULL)
25011 {
25012 child = ref;
25013 ref = NULL;
25014 static_inline_p = false;
25015 }
25016 }
25017
25018 if (child->die_tag == DW_TAG_variable
25019 && child->die_parent == comp_unit_die ()
25020 && ref == NULL)
25021 {
25022 reparent_child (child, context_die);
25023 if (dwarf_version < 5)
25024 child->die_tag = DW_TAG_member;
25025 }
25026 else
25027 splice_child_die (context_die, child);
25028 }
25029
25030 /* Do not generate standard DWARF for variant parts if we are generating
25031 the corresponding GNAT encodings: DIEs generated for both would
25032 conflict in our mappings. */
25033 else if (is_variant_part (member)
25034 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
25035 {
25036 vlr_ctx.variant_part_offset = byte_position (member);
25037 gen_variant_part (member, &vlr_ctx, context_die);
25038 }
25039 else
25040 {
25041 vlr_ctx.variant_part_offset = NULL_TREE;
25042 gen_decl_die (member, NULL, &vlr_ctx, context_die);
25043 }
25044
25045 /* For C++ inline static data members emit immediately a DW_TAG_variable
25046 DIE that will refer to that DW_TAG_member/DW_TAG_variable through
25047 DW_AT_specification. */
25048 if (static_inline_p)
25049 {
25050 int old_extern = DECL_EXTERNAL (member);
25051 DECL_EXTERNAL (member) = 0;
25052 gen_decl_die (member, NULL, NULL, comp_unit_die ());
25053 DECL_EXTERNAL (member) = old_extern;
25054 }
25055 }
25056 }
25057
25058 /* Generate a DIE for a structure or union type. If TYPE_DECL_SUPPRESS_DEBUG
25059 is set, we pretend that the type was never defined, so we only get the
25060 member DIEs needed by later specification DIEs. */
25061
25062 static void
25063 gen_struct_or_union_type_die (tree type, dw_die_ref context_die,
25064 enum debug_info_usage usage)
25065 {
25066 if (TREE_ASM_WRITTEN (type))
25067 {
25068 /* Fill in the bound of variable-length fields in late dwarf if
25069 still incomplete. */
25070 if (!early_dwarf && variably_modified_type_p (type, NULL))
25071 for (tree member = TYPE_FIELDS (type);
25072 member;
25073 member = DECL_CHAIN (member))
25074 fill_variable_array_bounds (TREE_TYPE (member));
25075 return;
25076 }
25077
25078 dw_die_ref type_die = lookup_type_die (type);
25079 dw_die_ref scope_die = 0;
25080 int nested = 0;
25081 int complete = (TYPE_SIZE (type)
25082 && (! TYPE_STUB_DECL (type)
25083 || ! TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))));
25084 int ns_decl = (context_die && context_die->die_tag == DW_TAG_namespace);
25085 complete = complete && should_emit_struct_debug (type, usage);
25086
25087 if (type_die && ! complete)
25088 return;
25089
25090 if (TYPE_CONTEXT (type) != NULL_TREE
25091 && (AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25092 || TREE_CODE (TYPE_CONTEXT (type)) == NAMESPACE_DECL))
25093 nested = 1;
25094
25095 scope_die = scope_die_for (type, context_die);
25096
25097 /* Generate child dies for template paramaters. */
25098 if (!type_die && debug_info_level > DINFO_LEVEL_TERSE)
25099 schedule_generic_params_dies_gen (type);
25100
25101 if (! type_die || (nested && is_cu_die (scope_die)))
25102 /* First occurrence of type or toplevel definition of nested class. */
25103 {
25104 dw_die_ref old_die = type_die;
25105
25106 type_die = new_die (TREE_CODE (type) == RECORD_TYPE
25107 ? record_type_tag (type) : DW_TAG_union_type,
25108 scope_die, type);
25109 equate_type_number_to_die (type, type_die);
25110 if (old_die)
25111 add_AT_specification (type_die, old_die);
25112 else
25113 add_name_attribute (type_die, type_tag (type));
25114 }
25115 else
25116 remove_AT (type_die, DW_AT_declaration);
25117
25118 /* If this type has been completed, then give it a byte_size attribute and
25119 then give a list of members. */
25120 if (complete && !ns_decl)
25121 {
25122 /* Prevent infinite recursion in cases where the type of some member of
25123 this type is expressed in terms of this type itself. */
25124 TREE_ASM_WRITTEN (type) = 1;
25125 add_byte_size_attribute (type_die, type);
25126 add_alignment_attribute (type_die, type);
25127 if (TYPE_STUB_DECL (type) != NULL_TREE)
25128 {
25129 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
25130 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
25131 }
25132
25133 /* If the first reference to this type was as the return type of an
25134 inline function, then it may not have a parent. Fix this now. */
25135 if (type_die->die_parent == NULL)
25136 add_child_die (scope_die, type_die);
25137
25138 gen_member_die (type, type_die);
25139
25140 add_gnat_descriptive_type_attribute (type_die, type, context_die);
25141 if (TYPE_ARTIFICIAL (type))
25142 add_AT_flag (type_die, DW_AT_artificial, 1);
25143
25144 /* GNU extension: Record what type our vtable lives in. */
25145 if (TYPE_VFIELD (type))
25146 {
25147 tree vtype = DECL_FCONTEXT (TYPE_VFIELD (type));
25148
25149 gen_type_die (vtype, context_die);
25150 add_AT_die_ref (type_die, DW_AT_containing_type,
25151 lookup_type_die (vtype));
25152 }
25153 }
25154 else
25155 {
25156 add_AT_flag (type_die, DW_AT_declaration, 1);
25157
25158 /* We don't need to do this for function-local types. */
25159 if (TYPE_STUB_DECL (type)
25160 && ! decl_function_context (TYPE_STUB_DECL (type)))
25161 vec_safe_push (incomplete_types, type);
25162 }
25163
25164 if (get_AT (type_die, DW_AT_name))
25165 add_pubtype (type, type_die);
25166 }
25167
25168 /* Generate a DIE for a subroutine _type_. */
25169
25170 static void
25171 gen_subroutine_type_die (tree type, dw_die_ref context_die)
25172 {
25173 tree return_type = TREE_TYPE (type);
25174 dw_die_ref subr_die
25175 = new_die (DW_TAG_subroutine_type,
25176 scope_die_for (type, context_die), type);
25177
25178 equate_type_number_to_die (type, subr_die);
25179 add_prototyped_attribute (subr_die, type);
25180 add_type_attribute (subr_die, return_type, TYPE_UNQUALIFIED, false,
25181 context_die);
25182 add_alignment_attribute (subr_die, type);
25183 gen_formal_types_die (type, subr_die);
25184
25185 if (get_AT (subr_die, DW_AT_name))
25186 add_pubtype (type, subr_die);
25187 if ((dwarf_version >= 5 || !dwarf_strict)
25188 && lang_hooks.types.type_dwarf_attribute (type, DW_AT_reference) != -1)
25189 add_AT_flag (subr_die, DW_AT_reference, 1);
25190 if ((dwarf_version >= 5 || !dwarf_strict)
25191 && lang_hooks.types.type_dwarf_attribute (type,
25192 DW_AT_rvalue_reference) != -1)
25193 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
25194 }
25195
25196 /* Generate a DIE for a type definition. */
25197
25198 static void
25199 gen_typedef_die (tree decl, dw_die_ref context_die)
25200 {
25201 dw_die_ref type_die;
25202 tree type;
25203
25204 if (TREE_ASM_WRITTEN (decl))
25205 {
25206 if (DECL_ORIGINAL_TYPE (decl))
25207 fill_variable_array_bounds (DECL_ORIGINAL_TYPE (decl));
25208 return;
25209 }
25210
25211 /* As we avoid creating DIEs for local typedefs (see decl_ultimate_origin
25212 checks in process_scope_var and modified_type_die), this should be called
25213 only for original types. */
25214 gcc_assert (decl_ultimate_origin (decl) == NULL
25215 || decl_ultimate_origin (decl) == decl);
25216
25217 TREE_ASM_WRITTEN (decl) = 1;
25218 type_die = new_die (DW_TAG_typedef, context_die, decl);
25219
25220 add_name_and_src_coords_attributes (type_die, decl);
25221 if (DECL_ORIGINAL_TYPE (decl))
25222 {
25223 type = DECL_ORIGINAL_TYPE (decl);
25224 if (type == error_mark_node)
25225 return;
25226
25227 gcc_assert (type != TREE_TYPE (decl));
25228 equate_type_number_to_die (TREE_TYPE (decl), type_die);
25229 }
25230 else
25231 {
25232 type = TREE_TYPE (decl);
25233 if (type == error_mark_node)
25234 return;
25235
25236 if (is_naming_typedef_decl (TYPE_NAME (type)))
25237 {
25238 /* Here, we are in the case of decl being a typedef naming
25239 an anonymous type, e.g:
25240 typedef struct {...} foo;
25241 In that case TREE_TYPE (decl) is not a typedef variant
25242 type and TYPE_NAME of the anonymous type is set to the
25243 TYPE_DECL of the typedef. This construct is emitted by
25244 the C++ FE.
25245
25246 TYPE is the anonymous struct named by the typedef
25247 DECL. As we need the DW_AT_type attribute of the
25248 DW_TAG_typedef to point to the DIE of TYPE, let's
25249 generate that DIE right away. add_type_attribute
25250 called below will then pick (via lookup_type_die) that
25251 anonymous struct DIE. */
25252 if (!TREE_ASM_WRITTEN (type))
25253 gen_tagged_type_die (type, context_die, DINFO_USAGE_DIR_USE);
25254
25255 /* This is a GNU Extension. We are adding a
25256 DW_AT_linkage_name attribute to the DIE of the
25257 anonymous struct TYPE. The value of that attribute
25258 is the name of the typedef decl naming the anonymous
25259 struct. This greatly eases the work of consumers of
25260 this debug info. */
25261 add_linkage_name_raw (lookup_type_die (type), decl);
25262 }
25263 }
25264
25265 add_type_attribute (type_die, type, decl_quals (decl), false,
25266 context_die);
25267
25268 if (is_naming_typedef_decl (decl))
25269 /* We want that all subsequent calls to lookup_type_die with
25270 TYPE in argument yield the DW_TAG_typedef we have just
25271 created. */
25272 equate_type_number_to_die (type, type_die);
25273
25274 add_alignment_attribute (type_die, TREE_TYPE (decl));
25275
25276 add_accessibility_attribute (type_die, decl);
25277
25278 if (DECL_ABSTRACT_P (decl))
25279 equate_decl_number_to_die (decl, type_die);
25280
25281 if (get_AT (type_die, DW_AT_name))
25282 add_pubtype (decl, type_die);
25283 }
25284
25285 /* Generate a DIE for a struct, class, enum or union type. */
25286
25287 static void
25288 gen_tagged_type_die (tree type,
25289 dw_die_ref context_die,
25290 enum debug_info_usage usage)
25291 {
25292 if (type == NULL_TREE
25293 || !is_tagged_type (type))
25294 return;
25295
25296 if (TREE_ASM_WRITTEN (type))
25297 ;
25298 /* If this is a nested type whose containing class hasn't been written
25299 out yet, writing it out will cover this one, too. This does not apply
25300 to instantiations of member class templates; they need to be added to
25301 the containing class as they are generated. FIXME: This hurts the
25302 idea of combining type decls from multiple TUs, since we can't predict
25303 what set of template instantiations we'll get. */
25304 else if (TYPE_CONTEXT (type)
25305 && AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25306 && ! TREE_ASM_WRITTEN (TYPE_CONTEXT (type)))
25307 {
25308 gen_type_die_with_usage (TYPE_CONTEXT (type), context_die, usage);
25309
25310 if (TREE_ASM_WRITTEN (type))
25311 return;
25312
25313 /* If that failed, attach ourselves to the stub. */
25314 context_die = lookup_type_die (TYPE_CONTEXT (type));
25315 }
25316 else if (TYPE_CONTEXT (type) != NULL_TREE
25317 && (TREE_CODE (TYPE_CONTEXT (type)) == FUNCTION_DECL))
25318 {
25319 /* If this type is local to a function that hasn't been written
25320 out yet, use a NULL context for now; it will be fixed up in
25321 decls_for_scope. */
25322 context_die = lookup_decl_die (TYPE_CONTEXT (type));
25323 /* A declaration DIE doesn't count; nested types need to go in the
25324 specification. */
25325 if (context_die && is_declaration_die (context_die))
25326 context_die = NULL;
25327 }
25328 else
25329 context_die = declare_in_namespace (type, context_die);
25330
25331 if (TREE_CODE (type) == ENUMERAL_TYPE)
25332 {
25333 /* This might have been written out by the call to
25334 declare_in_namespace. */
25335 if (!TREE_ASM_WRITTEN (type))
25336 gen_enumeration_type_die (type, context_die);
25337 }
25338 else
25339 gen_struct_or_union_type_die (type, context_die, usage);
25340
25341 /* Don't set TREE_ASM_WRITTEN on an incomplete struct; we want to fix
25342 it up if it is ever completed. gen_*_type_die will set it for us
25343 when appropriate. */
25344 }
25345
25346 /* Generate a type description DIE. */
25347
25348 static void
25349 gen_type_die_with_usage (tree type, dw_die_ref context_die,
25350 enum debug_info_usage usage)
25351 {
25352 struct array_descr_info info;
25353
25354 if (type == NULL_TREE || type == error_mark_node)
25355 return;
25356
25357 if (flag_checking && type)
25358 verify_type (type);
25359
25360 if (TYPE_NAME (type) != NULL_TREE
25361 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
25362 && is_redundant_typedef (TYPE_NAME (type))
25363 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
25364 /* The DECL of this type is a typedef we don't want to emit debug
25365 info for but we want debug info for its underlying typedef.
25366 This can happen for e.g, the injected-class-name of a C++
25367 type. */
25368 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
25369
25370 /* If TYPE is a typedef type variant, let's generate debug info
25371 for the parent typedef which TYPE is a type of. */
25372 if (typedef_variant_p (type))
25373 {
25374 if (TREE_ASM_WRITTEN (type))
25375 return;
25376
25377 tree name = TYPE_NAME (type);
25378 tree origin = decl_ultimate_origin (name);
25379 if (origin != NULL && origin != name)
25380 {
25381 gen_decl_die (origin, NULL, NULL, context_die);
25382 return;
25383 }
25384
25385 /* Prevent broken recursion; we can't hand off to the same type. */
25386 gcc_assert (DECL_ORIGINAL_TYPE (name) != type);
25387
25388 /* Give typedefs the right scope. */
25389 context_die = scope_die_for (type, context_die);
25390
25391 TREE_ASM_WRITTEN (type) = 1;
25392
25393 gen_decl_die (name, NULL, NULL, context_die);
25394 return;
25395 }
25396
25397 /* If type is an anonymous tagged type named by a typedef, let's
25398 generate debug info for the typedef. */
25399 if (is_naming_typedef_decl (TYPE_NAME (type)))
25400 {
25401 /* Give typedefs the right scope. */
25402 context_die = scope_die_for (type, context_die);
25403
25404 gen_decl_die (TYPE_NAME (type), NULL, NULL, context_die);
25405 return;
25406 }
25407
25408 if (lang_hooks.types.get_debug_type)
25409 {
25410 tree debug_type = lang_hooks.types.get_debug_type (type);
25411
25412 if (debug_type != NULL_TREE && debug_type != type)
25413 {
25414 gen_type_die_with_usage (debug_type, context_die, usage);
25415 return;
25416 }
25417 }
25418
25419 /* We are going to output a DIE to represent the unqualified version
25420 of this type (i.e. without any const or volatile qualifiers) so
25421 get the main variant (i.e. the unqualified version) of this type
25422 now. (Vectors and arrays are special because the debugging info is in the
25423 cloned type itself. Similarly function/method types can contain extra
25424 ref-qualification). */
25425 if (TREE_CODE (type) == FUNCTION_TYPE
25426 || TREE_CODE (type) == METHOD_TYPE)
25427 {
25428 /* For function/method types, can't use type_main_variant here,
25429 because that can have different ref-qualifiers for C++,
25430 but try to canonicalize. */
25431 tree main = TYPE_MAIN_VARIANT (type);
25432 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
25433 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
25434 && check_base_type (t, main)
25435 && check_lang_type (t, type))
25436 {
25437 type = t;
25438 break;
25439 }
25440 }
25441 else if (TREE_CODE (type) != VECTOR_TYPE
25442 && TREE_CODE (type) != ARRAY_TYPE)
25443 type = type_main_variant (type);
25444
25445 /* If this is an array type with hidden descriptor, handle it first. */
25446 if (!TREE_ASM_WRITTEN (type)
25447 && lang_hooks.types.get_array_descr_info)
25448 {
25449 memset (&info, 0, sizeof (info));
25450 if (lang_hooks.types.get_array_descr_info (type, &info))
25451 {
25452 /* Fortran sometimes emits array types with no dimension. */
25453 gcc_assert (info.ndimensions >= 0
25454 && (info.ndimensions
25455 <= DWARF2OUT_ARRAY_DESCR_INFO_MAX_DIMEN));
25456 gen_descr_array_type_die (type, &info, context_die);
25457 TREE_ASM_WRITTEN (type) = 1;
25458 return;
25459 }
25460 }
25461
25462 if (TREE_ASM_WRITTEN (type))
25463 {
25464 /* Variable-length types may be incomplete even if
25465 TREE_ASM_WRITTEN. For such types, fall through to
25466 gen_array_type_die() and possibly fill in
25467 DW_AT_{upper,lower}_bound attributes. */
25468 if ((TREE_CODE (type) != ARRAY_TYPE
25469 && TREE_CODE (type) != RECORD_TYPE
25470 && TREE_CODE (type) != UNION_TYPE
25471 && TREE_CODE (type) != QUAL_UNION_TYPE)
25472 || !variably_modified_type_p (type, NULL))
25473 return;
25474 }
25475
25476 switch (TREE_CODE (type))
25477 {
25478 case ERROR_MARK:
25479 break;
25480
25481 case POINTER_TYPE:
25482 case REFERENCE_TYPE:
25483 /* We must set TREE_ASM_WRITTEN in case this is a recursive type. This
25484 ensures that the gen_type_die recursion will terminate even if the
25485 type is recursive. Recursive types are possible in Ada. */
25486 /* ??? We could perhaps do this for all types before the switch
25487 statement. */
25488 TREE_ASM_WRITTEN (type) = 1;
25489
25490 /* For these types, all that is required is that we output a DIE (or a
25491 set of DIEs) to represent the "basis" type. */
25492 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25493 DINFO_USAGE_IND_USE);
25494 break;
25495
25496 case OFFSET_TYPE:
25497 /* This code is used for C++ pointer-to-data-member types.
25498 Output a description of the relevant class type. */
25499 gen_type_die_with_usage (TYPE_OFFSET_BASETYPE (type), context_die,
25500 DINFO_USAGE_IND_USE);
25501
25502 /* Output a description of the type of the object pointed to. */
25503 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25504 DINFO_USAGE_IND_USE);
25505
25506 /* Now output a DIE to represent this pointer-to-data-member type
25507 itself. */
25508 gen_ptr_to_mbr_type_die (type, context_die);
25509 break;
25510
25511 case FUNCTION_TYPE:
25512 /* Force out return type (in case it wasn't forced out already). */
25513 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25514 DINFO_USAGE_DIR_USE);
25515 gen_subroutine_type_die (type, context_die);
25516 break;
25517
25518 case METHOD_TYPE:
25519 /* Force out return type (in case it wasn't forced out already). */
25520 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25521 DINFO_USAGE_DIR_USE);
25522 gen_subroutine_type_die (type, context_die);
25523 break;
25524
25525 case ARRAY_TYPE:
25526 case VECTOR_TYPE:
25527 gen_array_type_die (type, context_die);
25528 break;
25529
25530 case ENUMERAL_TYPE:
25531 case RECORD_TYPE:
25532 case UNION_TYPE:
25533 case QUAL_UNION_TYPE:
25534 gen_tagged_type_die (type, context_die, usage);
25535 return;
25536
25537 case VOID_TYPE:
25538 case INTEGER_TYPE:
25539 case REAL_TYPE:
25540 case FIXED_POINT_TYPE:
25541 case COMPLEX_TYPE:
25542 case BOOLEAN_TYPE:
25543 /* No DIEs needed for fundamental types. */
25544 break;
25545
25546 case NULLPTR_TYPE:
25547 case LANG_TYPE:
25548 /* Just use DW_TAG_unspecified_type. */
25549 {
25550 dw_die_ref type_die = lookup_type_die (type);
25551 if (type_die == NULL)
25552 {
25553 tree name = TYPE_IDENTIFIER (type);
25554 type_die = new_die (DW_TAG_unspecified_type, comp_unit_die (),
25555 type);
25556 add_name_attribute (type_die, IDENTIFIER_POINTER (name));
25557 equate_type_number_to_die (type, type_die);
25558 }
25559 }
25560 break;
25561
25562 default:
25563 if (is_cxx_auto (type))
25564 {
25565 tree name = TYPE_IDENTIFIER (type);
25566 dw_die_ref *die = (name == get_identifier ("auto")
25567 ? &auto_die : &decltype_auto_die);
25568 if (!*die)
25569 {
25570 *die = new_die (DW_TAG_unspecified_type,
25571 comp_unit_die (), NULL_TREE);
25572 add_name_attribute (*die, IDENTIFIER_POINTER (name));
25573 }
25574 equate_type_number_to_die (type, *die);
25575 break;
25576 }
25577 gcc_unreachable ();
25578 }
25579
25580 TREE_ASM_WRITTEN (type) = 1;
25581 }
25582
25583 static void
25584 gen_type_die (tree type, dw_die_ref context_die)
25585 {
25586 if (type != error_mark_node)
25587 {
25588 gen_type_die_with_usage (type, context_die, DINFO_USAGE_DIR_USE);
25589 if (flag_checking)
25590 {
25591 dw_die_ref die = lookup_type_die (type);
25592 if (die)
25593 check_die (die);
25594 }
25595 }
25596 }
25597
25598 /* Generate a DW_TAG_lexical_block DIE followed by DIEs to represent all of the
25599 things which are local to the given block. */
25600
25601 static void
25602 gen_block_die (tree stmt, dw_die_ref context_die)
25603 {
25604 int must_output_die = 0;
25605 bool inlined_func;
25606
25607 /* Ignore blocks that are NULL. */
25608 if (stmt == NULL_TREE)
25609 return;
25610
25611 inlined_func = inlined_function_outer_scope_p (stmt);
25612
25613 /* If the block is one fragment of a non-contiguous block, do not
25614 process the variables, since they will have been done by the
25615 origin block. Do process subblocks. */
25616 if (BLOCK_FRAGMENT_ORIGIN (stmt))
25617 {
25618 tree sub;
25619
25620 for (sub = BLOCK_SUBBLOCKS (stmt); sub; sub = BLOCK_CHAIN (sub))
25621 gen_block_die (sub, context_die);
25622
25623 return;
25624 }
25625
25626 /* Determine if we need to output any Dwarf DIEs at all to represent this
25627 block. */
25628 if (inlined_func)
25629 /* The outer scopes for inlinings *must* always be represented. We
25630 generate DW_TAG_inlined_subroutine DIEs for them. (See below.) */
25631 must_output_die = 1;
25632 else if (lookup_block_die (stmt))
25633 /* If we already have a DIE then it was filled early. Meanwhile
25634 we might have pruned all BLOCK_VARS as optimized out but we
25635 still want to generate high/low PC attributes so output it. */
25636 must_output_die = 1;
25637 else if (TREE_USED (stmt)
25638 || TREE_ASM_WRITTEN (stmt))
25639 {
25640 /* Determine if this block directly contains any "significant"
25641 local declarations which we will need to output DIEs for. */
25642 if (debug_info_level > DINFO_LEVEL_TERSE)
25643 {
25644 /* We are not in terse mode so any local declaration that
25645 is not ignored for debug purposes counts as being a
25646 "significant" one. */
25647 if (BLOCK_NUM_NONLOCALIZED_VARS (stmt))
25648 must_output_die = 1;
25649 else
25650 for (tree var = BLOCK_VARS (stmt); var; var = DECL_CHAIN (var))
25651 if (!DECL_IGNORED_P (var))
25652 {
25653 must_output_die = 1;
25654 break;
25655 }
25656 }
25657 else if (!dwarf2out_ignore_block (stmt))
25658 must_output_die = 1;
25659 }
25660
25661 /* It would be a waste of space to generate a Dwarf DW_TAG_lexical_block
25662 DIE for any block which contains no significant local declarations at
25663 all. Rather, in such cases we just call `decls_for_scope' so that any
25664 needed Dwarf info for any sub-blocks will get properly generated. Note
25665 that in terse mode, our definition of what constitutes a "significant"
25666 local declaration gets restricted to include only inlined function
25667 instances and local (nested) function definitions. */
25668 if (must_output_die)
25669 {
25670 if (inlined_func)
25671 gen_inlined_subroutine_die (stmt, context_die);
25672 else
25673 gen_lexical_block_die (stmt, context_die);
25674 }
25675 else
25676 decls_for_scope (stmt, context_die);
25677 }
25678
25679 /* Process variable DECL (or variable with origin ORIGIN) within
25680 block STMT and add it to CONTEXT_DIE. */
25681 static void
25682 process_scope_var (tree stmt, tree decl, tree origin, dw_die_ref context_die)
25683 {
25684 dw_die_ref die;
25685 tree decl_or_origin = decl ? decl : origin;
25686
25687 if (TREE_CODE (decl_or_origin) == FUNCTION_DECL)
25688 die = lookup_decl_die (decl_or_origin);
25689 else if (TREE_CODE (decl_or_origin) == TYPE_DECL)
25690 {
25691 if (TYPE_DECL_IS_STUB (decl_or_origin))
25692 die = lookup_type_die (TREE_TYPE (decl_or_origin));
25693 else
25694 die = lookup_decl_die (decl_or_origin);
25695 /* Avoid re-creating the DIE late if it was optimized as unused early. */
25696 if (! die && ! early_dwarf)
25697 return;
25698 }
25699 else
25700 die = NULL;
25701
25702 /* Avoid creating DIEs for local typedefs and concrete static variables that
25703 will only be pruned later. */
25704 if ((origin || decl_ultimate_origin (decl))
25705 && (TREE_CODE (decl_or_origin) == TYPE_DECL
25706 || (VAR_P (decl_or_origin) && TREE_STATIC (decl_or_origin))))
25707 {
25708 origin = decl_ultimate_origin (decl_or_origin);
25709 if (decl && VAR_P (decl) && die != NULL)
25710 {
25711 die = lookup_decl_die (origin);
25712 if (die != NULL)
25713 equate_decl_number_to_die (decl, die);
25714 }
25715 return;
25716 }
25717
25718 if (die != NULL && die->die_parent == NULL)
25719 add_child_die (context_die, die);
25720 else if (TREE_CODE (decl_or_origin) == IMPORTED_DECL)
25721 {
25722 if (early_dwarf)
25723 dwarf2out_imported_module_or_decl_1 (decl_or_origin, DECL_NAME (decl_or_origin),
25724 stmt, context_die);
25725 }
25726 else
25727 {
25728 if (decl && DECL_P (decl))
25729 {
25730 die = lookup_decl_die (decl);
25731
25732 /* Early created DIEs do not have a parent as the decls refer
25733 to the function as DECL_CONTEXT rather than the BLOCK. */
25734 if (die && die->die_parent == NULL)
25735 {
25736 gcc_assert (in_lto_p);
25737 add_child_die (context_die, die);
25738 }
25739 }
25740
25741 gen_decl_die (decl, origin, NULL, context_die);
25742 }
25743 }
25744
25745 /* Generate all of the decls declared within a given scope and (recursively)
25746 all of its sub-blocks. */
25747
25748 static void
25749 decls_for_scope (tree stmt, dw_die_ref context_die, bool recurse)
25750 {
25751 tree decl;
25752 unsigned int i;
25753 tree subblocks;
25754
25755 /* Ignore NULL blocks. */
25756 if (stmt == NULL_TREE)
25757 return;
25758
25759 /* Output the DIEs to represent all of the data objects and typedefs
25760 declared directly within this block but not within any nested
25761 sub-blocks. Also, nested function and tag DIEs have been
25762 generated with a parent of NULL; fix that up now. We don't
25763 have to do this if we're at -g1. */
25764 if (debug_info_level > DINFO_LEVEL_TERSE)
25765 {
25766 for (decl = BLOCK_VARS (stmt); decl != NULL; decl = DECL_CHAIN (decl))
25767 process_scope_var (stmt, decl, NULL_TREE, context_die);
25768 /* BLOCK_NONLOCALIZED_VARs simply generate DIE stubs with abstract
25769 origin - avoid doing this twice as we have no good way to see
25770 if we've done it once already. */
25771 if (! early_dwarf)
25772 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (stmt); i++)
25773 {
25774 decl = BLOCK_NONLOCALIZED_VAR (stmt, i);
25775 if (decl == current_function_decl)
25776 /* Ignore declarations of the current function, while they
25777 are declarations, gen_subprogram_die would treat them
25778 as definitions again, because they are equal to
25779 current_function_decl and endlessly recurse. */;
25780 else if (TREE_CODE (decl) == FUNCTION_DECL)
25781 process_scope_var (stmt, decl, NULL_TREE, context_die);
25782 else
25783 process_scope_var (stmt, NULL_TREE, decl, context_die);
25784 }
25785 }
25786
25787 /* Even if we're at -g1, we need to process the subblocks in order to get
25788 inlined call information. */
25789
25790 /* Output the DIEs to represent all sub-blocks (and the items declared
25791 therein) of this block. */
25792 if (recurse)
25793 for (subblocks = BLOCK_SUBBLOCKS (stmt);
25794 subblocks != NULL;
25795 subblocks = BLOCK_CHAIN (subblocks))
25796 gen_block_die (subblocks, context_die);
25797 }
25798
25799 /* Is this a typedef we can avoid emitting? */
25800
25801 static bool
25802 is_redundant_typedef (const_tree decl)
25803 {
25804 if (TYPE_DECL_IS_STUB (decl))
25805 return true;
25806
25807 if (DECL_ARTIFICIAL (decl)
25808 && DECL_CONTEXT (decl)
25809 && is_tagged_type (DECL_CONTEXT (decl))
25810 && TREE_CODE (TYPE_NAME (DECL_CONTEXT (decl))) == TYPE_DECL
25811 && DECL_NAME (decl) == DECL_NAME (TYPE_NAME (DECL_CONTEXT (decl))))
25812 /* Also ignore the artificial member typedef for the class name. */
25813 return true;
25814
25815 return false;
25816 }
25817
25818 /* Return TRUE if TYPE is a typedef that names a type for linkage
25819 purposes. This kind of typedefs is produced by the C++ FE for
25820 constructs like:
25821
25822 typedef struct {...} foo;
25823
25824 In that case, there is no typedef variant type produced for foo.
25825 Rather, the TREE_TYPE of the TYPE_DECL of foo is the anonymous
25826 struct type. */
25827
25828 static bool
25829 is_naming_typedef_decl (const_tree decl)
25830 {
25831 if (decl == NULL_TREE
25832 || TREE_CODE (decl) != TYPE_DECL
25833 || DECL_NAMELESS (decl)
25834 || !is_tagged_type (TREE_TYPE (decl))
25835 || DECL_IS_BUILTIN (decl)
25836 || is_redundant_typedef (decl)
25837 /* It looks like Ada produces TYPE_DECLs that are very similar
25838 to C++ naming typedefs but that have different
25839 semantics. Let's be specific to c++ for now. */
25840 || !is_cxx (decl))
25841 return FALSE;
25842
25843 return (DECL_ORIGINAL_TYPE (decl) == NULL_TREE
25844 && TYPE_NAME (TREE_TYPE (decl)) == decl
25845 && (TYPE_STUB_DECL (TREE_TYPE (decl))
25846 != TYPE_NAME (TREE_TYPE (decl))));
25847 }
25848
25849 /* Looks up the DIE for a context. */
25850
25851 static inline dw_die_ref
25852 lookup_context_die (tree context)
25853 {
25854 if (context)
25855 {
25856 /* Find die that represents this context. */
25857 if (TYPE_P (context))
25858 {
25859 context = TYPE_MAIN_VARIANT (context);
25860 dw_die_ref ctx = lookup_type_die (context);
25861 if (!ctx)
25862 return NULL;
25863 return strip_naming_typedef (context, ctx);
25864 }
25865 else
25866 return lookup_decl_die (context);
25867 }
25868 return comp_unit_die ();
25869 }
25870
25871 /* Returns the DIE for a context. */
25872
25873 static inline dw_die_ref
25874 get_context_die (tree context)
25875 {
25876 if (context)
25877 {
25878 /* Find die that represents this context. */
25879 if (TYPE_P (context))
25880 {
25881 context = TYPE_MAIN_VARIANT (context);
25882 return strip_naming_typedef (context, force_type_die (context));
25883 }
25884 else
25885 return force_decl_die (context);
25886 }
25887 return comp_unit_die ();
25888 }
25889
25890 /* Returns the DIE for decl. A DIE will always be returned. */
25891
25892 static dw_die_ref
25893 force_decl_die (tree decl)
25894 {
25895 dw_die_ref decl_die;
25896 unsigned saved_external_flag;
25897 tree save_fn = NULL_TREE;
25898 decl_die = lookup_decl_die (decl);
25899 if (!decl_die)
25900 {
25901 dw_die_ref context_die = get_context_die (DECL_CONTEXT (decl));
25902
25903 decl_die = lookup_decl_die (decl);
25904 if (decl_die)
25905 return decl_die;
25906
25907 switch (TREE_CODE (decl))
25908 {
25909 case FUNCTION_DECL:
25910 /* Clear current_function_decl, so that gen_subprogram_die thinks
25911 that this is a declaration. At this point, we just want to force
25912 declaration die. */
25913 save_fn = current_function_decl;
25914 current_function_decl = NULL_TREE;
25915 gen_subprogram_die (decl, context_die);
25916 current_function_decl = save_fn;
25917 break;
25918
25919 case VAR_DECL:
25920 /* Set external flag to force declaration die. Restore it after
25921 gen_decl_die() call. */
25922 saved_external_flag = DECL_EXTERNAL (decl);
25923 DECL_EXTERNAL (decl) = 1;
25924 gen_decl_die (decl, NULL, NULL, context_die);
25925 DECL_EXTERNAL (decl) = saved_external_flag;
25926 break;
25927
25928 case NAMESPACE_DECL:
25929 if (dwarf_version >= 3 || !dwarf_strict)
25930 dwarf2out_decl (decl);
25931 else
25932 /* DWARF2 has neither DW_TAG_module, nor DW_TAG_namespace. */
25933 decl_die = comp_unit_die ();
25934 break;
25935
25936 case TRANSLATION_UNIT_DECL:
25937 decl_die = comp_unit_die ();
25938 break;
25939
25940 default:
25941 gcc_unreachable ();
25942 }
25943
25944 /* We should be able to find the DIE now. */
25945 if (!decl_die)
25946 decl_die = lookup_decl_die (decl);
25947 gcc_assert (decl_die);
25948 }
25949
25950 return decl_die;
25951 }
25952
25953 /* Returns the DIE for TYPE, that must not be a base type. A DIE is
25954 always returned. */
25955
25956 static dw_die_ref
25957 force_type_die (tree type)
25958 {
25959 dw_die_ref type_die;
25960
25961 type_die = lookup_type_die (type);
25962 if (!type_die)
25963 {
25964 dw_die_ref context_die = get_context_die (TYPE_CONTEXT (type));
25965
25966 type_die = modified_type_die (type, TYPE_QUALS_NO_ADDR_SPACE (type),
25967 false, context_die);
25968 gcc_assert (type_die);
25969 }
25970 return type_die;
25971 }
25972
25973 /* Force out any required namespaces to be able to output DECL,
25974 and return the new context_die for it, if it's changed. */
25975
25976 static dw_die_ref
25977 setup_namespace_context (tree thing, dw_die_ref context_die)
25978 {
25979 tree context = (DECL_P (thing)
25980 ? DECL_CONTEXT (thing) : TYPE_CONTEXT (thing));
25981 if (context && TREE_CODE (context) == NAMESPACE_DECL)
25982 /* Force out the namespace. */
25983 context_die = force_decl_die (context);
25984
25985 return context_die;
25986 }
25987
25988 /* Emit a declaration DIE for THING (which is either a DECL or a tagged
25989 type) within its namespace, if appropriate.
25990
25991 For compatibility with older debuggers, namespace DIEs only contain
25992 declarations; all definitions are emitted at CU scope, with
25993 DW_AT_specification pointing to the declaration (like with class
25994 members). */
25995
25996 static dw_die_ref
25997 declare_in_namespace (tree thing, dw_die_ref context_die)
25998 {
25999 dw_die_ref ns_context;
26000
26001 if (debug_info_level <= DINFO_LEVEL_TERSE)
26002 return context_die;
26003
26004 /* External declarations in the local scope only need to be emitted
26005 once, not once in the namespace and once in the scope.
26006
26007 This avoids declaring the `extern' below in the
26008 namespace DIE as well as in the innermost scope:
26009
26010 namespace S
26011 {
26012 int i=5;
26013 int foo()
26014 {
26015 int i=8;
26016 extern int i;
26017 return i;
26018 }
26019 }
26020 */
26021 if (DECL_P (thing) && DECL_EXTERNAL (thing) && local_scope_p (context_die))
26022 return context_die;
26023
26024 /* If this decl is from an inlined function, then don't try to emit it in its
26025 namespace, as we will get confused. It would have already been emitted
26026 when the abstract instance of the inline function was emitted anyways. */
26027 if (DECL_P (thing) && DECL_ABSTRACT_ORIGIN (thing))
26028 return context_die;
26029
26030 ns_context = setup_namespace_context (thing, context_die);
26031
26032 if (ns_context != context_die)
26033 {
26034 if (is_fortran ())
26035 return ns_context;
26036 if (DECL_P (thing))
26037 gen_decl_die (thing, NULL, NULL, ns_context);
26038 else
26039 gen_type_die (thing, ns_context);
26040 }
26041 return context_die;
26042 }
26043
26044 /* Generate a DIE for a namespace or namespace alias. */
26045
26046 static void
26047 gen_namespace_die (tree decl, dw_die_ref context_die)
26048 {
26049 dw_die_ref namespace_die;
26050
26051 /* Namespace aliases have a DECL_ABSTRACT_ORIGIN of the namespace
26052 they are an alias of. */
26053 if (DECL_ABSTRACT_ORIGIN (decl) == NULL)
26054 {
26055 /* Output a real namespace or module. */
26056 context_die = setup_namespace_context (decl, comp_unit_die ());
26057 namespace_die = new_die (is_fortran ()
26058 ? DW_TAG_module : DW_TAG_namespace,
26059 context_die, decl);
26060 /* For Fortran modules defined in different CU don't add src coords. */
26061 if (namespace_die->die_tag == DW_TAG_module && DECL_EXTERNAL (decl))
26062 {
26063 const char *name = dwarf2_name (decl, 0);
26064 if (name)
26065 add_name_attribute (namespace_die, name);
26066 }
26067 else
26068 add_name_and_src_coords_attributes (namespace_die, decl);
26069 if (DECL_EXTERNAL (decl))
26070 add_AT_flag (namespace_die, DW_AT_declaration, 1);
26071 equate_decl_number_to_die (decl, namespace_die);
26072 }
26073 else
26074 {
26075 /* Output a namespace alias. */
26076
26077 /* Force out the namespace we are an alias of, if necessary. */
26078 dw_die_ref origin_die
26079 = force_decl_die (DECL_ABSTRACT_ORIGIN (decl));
26080
26081 if (DECL_FILE_SCOPE_P (decl)
26082 || TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL)
26083 context_die = setup_namespace_context (decl, comp_unit_die ());
26084 /* Now create the namespace alias DIE. */
26085 namespace_die = new_die (DW_TAG_imported_declaration, context_die, decl);
26086 add_name_and_src_coords_attributes (namespace_die, decl);
26087 add_AT_die_ref (namespace_die, DW_AT_import, origin_die);
26088 equate_decl_number_to_die (decl, namespace_die);
26089 }
26090 if ((dwarf_version >= 5 || !dwarf_strict)
26091 && lang_hooks.decls.decl_dwarf_attribute (decl,
26092 DW_AT_export_symbols) == 1)
26093 add_AT_flag (namespace_die, DW_AT_export_symbols, 1);
26094
26095 /* Bypass dwarf2_name's check for DECL_NAMELESS. */
26096 if (want_pubnames ())
26097 add_pubname_string (lang_hooks.dwarf_name (decl, 1), namespace_die);
26098 }
26099
26100 /* Generate Dwarf debug information for a decl described by DECL.
26101 The return value is currently only meaningful for PARM_DECLs,
26102 for all other decls it returns NULL.
26103
26104 If DECL is a FIELD_DECL, CTX is required: see the comment for VLR_CONTEXT.
26105 It can be NULL otherwise. */
26106
26107 static dw_die_ref
26108 gen_decl_die (tree decl, tree origin, struct vlr_context *ctx,
26109 dw_die_ref context_die)
26110 {
26111 tree decl_or_origin = decl ? decl : origin;
26112 tree class_origin = NULL, ultimate_origin;
26113
26114 if (DECL_P (decl_or_origin) && DECL_IGNORED_P (decl_or_origin))
26115 return NULL;
26116
26117 switch (TREE_CODE (decl_or_origin))
26118 {
26119 case ERROR_MARK:
26120 break;
26121
26122 case CONST_DECL:
26123 if (!is_fortran () && !is_ada ())
26124 {
26125 /* The individual enumerators of an enum type get output when we output
26126 the Dwarf representation of the relevant enum type itself. */
26127 break;
26128 }
26129
26130 /* Emit its type. */
26131 gen_type_die (TREE_TYPE (decl), context_die);
26132
26133 /* And its containing namespace. */
26134 context_die = declare_in_namespace (decl, context_die);
26135
26136 gen_const_die (decl, context_die);
26137 break;
26138
26139 case FUNCTION_DECL:
26140 #if 0
26141 /* FIXME */
26142 /* This doesn't work because the C frontend sets DECL_ABSTRACT_ORIGIN
26143 on local redeclarations of global functions. That seems broken. */
26144 if (current_function_decl != decl)
26145 /* This is only a declaration. */;
26146 #endif
26147
26148 /* We should have abstract copies already and should not generate
26149 stray type DIEs in late LTO dumping. */
26150 if (! early_dwarf)
26151 ;
26152
26153 /* If we're emitting a clone, emit info for the abstract instance. */
26154 else if (origin || DECL_ORIGIN (decl) != decl)
26155 dwarf2out_abstract_function (origin
26156 ? DECL_ORIGIN (origin)
26157 : DECL_ABSTRACT_ORIGIN (decl));
26158
26159 /* If we're emitting a possibly inlined function emit it as
26160 abstract instance. */
26161 else if (cgraph_function_possibly_inlined_p (decl)
26162 && ! DECL_ABSTRACT_P (decl)
26163 && ! class_or_namespace_scope_p (context_die)
26164 /* dwarf2out_abstract_function won't emit a die if this is just
26165 a declaration. We must avoid setting DECL_ABSTRACT_ORIGIN in
26166 that case, because that works only if we have a die. */
26167 && DECL_INITIAL (decl) != NULL_TREE)
26168 dwarf2out_abstract_function (decl);
26169
26170 /* Otherwise we're emitting the primary DIE for this decl. */
26171 else if (debug_info_level > DINFO_LEVEL_TERSE)
26172 {
26173 /* Before we describe the FUNCTION_DECL itself, make sure that we
26174 have its containing type. */
26175 if (!origin)
26176 origin = decl_class_context (decl);
26177 if (origin != NULL_TREE)
26178 gen_type_die (origin, context_die);
26179
26180 /* And its return type. */
26181 gen_type_die (TREE_TYPE (TREE_TYPE (decl)), context_die);
26182
26183 /* And its virtual context. */
26184 if (DECL_VINDEX (decl) != NULL_TREE)
26185 gen_type_die (DECL_CONTEXT (decl), context_die);
26186
26187 /* Make sure we have a member DIE for decl. */
26188 if (origin != NULL_TREE)
26189 gen_type_die_for_member (origin, decl, context_die);
26190
26191 /* And its containing namespace. */
26192 context_die = declare_in_namespace (decl, context_die);
26193 }
26194
26195 /* Now output a DIE to represent the function itself. */
26196 if (decl)
26197 gen_subprogram_die (decl, context_die);
26198 break;
26199
26200 case TYPE_DECL:
26201 /* If we are in terse mode, don't generate any DIEs to represent any
26202 actual typedefs. */
26203 if (debug_info_level <= DINFO_LEVEL_TERSE)
26204 break;
26205
26206 /* In the special case of a TYPE_DECL node representing the declaration
26207 of some type tag, if the given TYPE_DECL is marked as having been
26208 instantiated from some other (original) TYPE_DECL node (e.g. one which
26209 was generated within the original definition of an inline function) we
26210 used to generate a special (abbreviated) DW_TAG_structure_type,
26211 DW_TAG_union_type, or DW_TAG_enumeration_type DIE here. But nothing
26212 should be actually referencing those DIEs, as variable DIEs with that
26213 type would be emitted already in the abstract origin, so it was always
26214 removed during unused type prunning. Don't add anything in this
26215 case. */
26216 if (TYPE_DECL_IS_STUB (decl) && decl_ultimate_origin (decl) != NULL_TREE)
26217 break;
26218
26219 if (is_redundant_typedef (decl))
26220 gen_type_die (TREE_TYPE (decl), context_die);
26221 else
26222 /* Output a DIE to represent the typedef itself. */
26223 gen_typedef_die (decl, context_die);
26224 break;
26225
26226 case LABEL_DECL:
26227 if (debug_info_level >= DINFO_LEVEL_NORMAL)
26228 gen_label_die (decl, context_die);
26229 break;
26230
26231 case VAR_DECL:
26232 case RESULT_DECL:
26233 /* If we are in terse mode, don't generate any DIEs to represent any
26234 variable declarations or definitions. */
26235 if (debug_info_level <= DINFO_LEVEL_TERSE)
26236 break;
26237
26238 /* Avoid generating stray type DIEs during late dwarf dumping.
26239 All types have been dumped early. */
26240 if (early_dwarf
26241 /* ??? But in LTRANS we cannot annotate early created variably
26242 modified type DIEs without copying them and adjusting all
26243 references to them. Dump them again as happens for inlining
26244 which copies both the decl and the types. */
26245 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26246 in VLA bound information for example. */
26247 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26248 current_function_decl)))
26249 {
26250 /* Output any DIEs that are needed to specify the type of this data
26251 object. */
26252 if (decl_by_reference_p (decl_or_origin))
26253 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26254 else
26255 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26256 }
26257
26258 if (early_dwarf)
26259 {
26260 /* And its containing type. */
26261 class_origin = decl_class_context (decl_or_origin);
26262 if (class_origin != NULL_TREE)
26263 gen_type_die_for_member (class_origin, decl_or_origin, context_die);
26264
26265 /* And its containing namespace. */
26266 context_die = declare_in_namespace (decl_or_origin, context_die);
26267 }
26268
26269 /* Now output the DIE to represent the data object itself. This gets
26270 complicated because of the possibility that the VAR_DECL really
26271 represents an inlined instance of a formal parameter for an inline
26272 function. */
26273 ultimate_origin = decl_ultimate_origin (decl_or_origin);
26274 if (ultimate_origin != NULL_TREE
26275 && TREE_CODE (ultimate_origin) == PARM_DECL)
26276 gen_formal_parameter_die (decl, origin,
26277 true /* Emit name attribute. */,
26278 context_die);
26279 else
26280 gen_variable_die (decl, origin, context_die);
26281 break;
26282
26283 case FIELD_DECL:
26284 gcc_assert (ctx != NULL && ctx->struct_type != NULL);
26285 /* Ignore the nameless fields that are used to skip bits but handle C++
26286 anonymous unions and structs. */
26287 if (DECL_NAME (decl) != NULL_TREE
26288 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE
26289 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE)
26290 {
26291 gen_type_die (member_declared_type (decl), context_die);
26292 gen_field_die (decl, ctx, context_die);
26293 }
26294 break;
26295
26296 case PARM_DECL:
26297 /* Avoid generating stray type DIEs during late dwarf dumping.
26298 All types have been dumped early. */
26299 if (early_dwarf
26300 /* ??? But in LTRANS we cannot annotate early created variably
26301 modified type DIEs without copying them and adjusting all
26302 references to them. Dump them again as happens for inlining
26303 which copies both the decl and the types. */
26304 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26305 in VLA bound information for example. */
26306 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26307 current_function_decl)))
26308 {
26309 if (DECL_BY_REFERENCE (decl_or_origin))
26310 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26311 else
26312 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26313 }
26314 return gen_formal_parameter_die (decl, origin,
26315 true /* Emit name attribute. */,
26316 context_die);
26317
26318 case NAMESPACE_DECL:
26319 if (dwarf_version >= 3 || !dwarf_strict)
26320 gen_namespace_die (decl, context_die);
26321 break;
26322
26323 case IMPORTED_DECL:
26324 dwarf2out_imported_module_or_decl_1 (decl, DECL_NAME (decl),
26325 DECL_CONTEXT (decl), context_die);
26326 break;
26327
26328 case NAMELIST_DECL:
26329 gen_namelist_decl (DECL_NAME (decl), context_die,
26330 NAMELIST_DECL_ASSOCIATED_DECL (decl));
26331 break;
26332
26333 default:
26334 /* Probably some frontend-internal decl. Assume we don't care. */
26335 gcc_assert ((int)TREE_CODE (decl) > NUM_TREE_CODES);
26336 break;
26337 }
26338
26339 return NULL;
26340 }
26341 \f
26342 /* Output initial debug information for global DECL. Called at the
26343 end of the parsing process.
26344
26345 This is the initial debug generation process. As such, the DIEs
26346 generated may be incomplete. A later debug generation pass
26347 (dwarf2out_late_global_decl) will augment the information generated
26348 in this pass (e.g., with complete location info). */
26349
26350 static void
26351 dwarf2out_early_global_decl (tree decl)
26352 {
26353 set_early_dwarf s;
26354
26355 /* gen_decl_die() will set DECL_ABSTRACT because
26356 cgraph_function_possibly_inlined_p() returns true. This is in
26357 turn will cause DW_AT_inline attributes to be set.
26358
26359 This happens because at early dwarf generation, there is no
26360 cgraph information, causing cgraph_function_possibly_inlined_p()
26361 to return true. Trick cgraph_function_possibly_inlined_p()
26362 while we generate dwarf early. */
26363 bool save = symtab->global_info_ready;
26364 symtab->global_info_ready = true;
26365
26366 /* We don't handle TYPE_DECLs. If required, they'll be reached via
26367 other DECLs and they can point to template types or other things
26368 that dwarf2out can't handle when done via dwarf2out_decl. */
26369 if (TREE_CODE (decl) != TYPE_DECL
26370 && TREE_CODE (decl) != PARM_DECL)
26371 {
26372 if (TREE_CODE (decl) == FUNCTION_DECL)
26373 {
26374 tree save_fndecl = current_function_decl;
26375
26376 /* For nested functions, make sure we have DIEs for the parents first
26377 so that all nested DIEs are generated at the proper scope in the
26378 first shot. */
26379 tree context = decl_function_context (decl);
26380 if (context != NULL)
26381 {
26382 dw_die_ref context_die = lookup_decl_die (context);
26383 current_function_decl = context;
26384
26385 /* Avoid emitting DIEs multiple times, but still process CONTEXT
26386 enough so that it lands in its own context. This avoids type
26387 pruning issues later on. */
26388 if (context_die == NULL || is_declaration_die (context_die))
26389 dwarf2out_decl (context);
26390 }
26391
26392 /* Emit an abstract origin of a function first. This happens
26393 with C++ constructor clones for example and makes
26394 dwarf2out_abstract_function happy which requires the early
26395 DIE of the abstract instance to be present. */
26396 tree origin = DECL_ABSTRACT_ORIGIN (decl);
26397 dw_die_ref origin_die;
26398 if (origin != NULL
26399 /* Do not emit the DIE multiple times but make sure to
26400 process it fully here in case we just saw a declaration. */
26401 && ((origin_die = lookup_decl_die (origin)) == NULL
26402 || is_declaration_die (origin_die)))
26403 {
26404 current_function_decl = origin;
26405 dwarf2out_decl (origin);
26406 }
26407
26408 /* Emit the DIE for decl but avoid doing that multiple times. */
26409 dw_die_ref old_die;
26410 if ((old_die = lookup_decl_die (decl)) == NULL
26411 || is_declaration_die (old_die))
26412 {
26413 current_function_decl = decl;
26414 dwarf2out_decl (decl);
26415 }
26416
26417 current_function_decl = save_fndecl;
26418 }
26419 else
26420 dwarf2out_decl (decl);
26421 }
26422 symtab->global_info_ready = save;
26423 }
26424
26425 /* Return whether EXPR is an expression with the following pattern:
26426 INDIRECT_REF (NOP_EXPR (INTEGER_CST)). */
26427
26428 static bool
26429 is_trivial_indirect_ref (tree expr)
26430 {
26431 if (expr == NULL_TREE || TREE_CODE (expr) != INDIRECT_REF)
26432 return false;
26433
26434 tree nop = TREE_OPERAND (expr, 0);
26435 if (nop == NULL_TREE || TREE_CODE (nop) != NOP_EXPR)
26436 return false;
26437
26438 tree int_cst = TREE_OPERAND (nop, 0);
26439 return int_cst != NULL_TREE && TREE_CODE (int_cst) == INTEGER_CST;
26440 }
26441
26442 /* Output debug information for global decl DECL. Called from
26443 toplev.c after compilation proper has finished. */
26444
26445 static void
26446 dwarf2out_late_global_decl (tree decl)
26447 {
26448 /* Fill-in any location information we were unable to determine
26449 on the first pass. */
26450 if (VAR_P (decl))
26451 {
26452 dw_die_ref die = lookup_decl_die (decl);
26453
26454 /* We may have to generate early debug late for LTO in case debug
26455 was not enabled at compile-time or the target doesn't support
26456 the LTO early debug scheme. */
26457 if (! die && in_lto_p)
26458 {
26459 dwarf2out_decl (decl);
26460 die = lookup_decl_die (decl);
26461 }
26462
26463 if (die)
26464 {
26465 /* We get called via the symtab code invoking late_global_decl
26466 for symbols that are optimized out.
26467
26468 Do not add locations for those, except if they have a
26469 DECL_VALUE_EXPR, in which case they are relevant for debuggers.
26470 Still don't add a location if the DECL_VALUE_EXPR is not a trivial
26471 INDIRECT_REF expression, as this could generate relocations to
26472 text symbols in LTO object files, which is invalid. */
26473 varpool_node *node = varpool_node::get (decl);
26474 if ((! node || ! node->definition)
26475 && ! (DECL_HAS_VALUE_EXPR_P (decl)
26476 && is_trivial_indirect_ref (DECL_VALUE_EXPR (decl))))
26477 tree_add_const_value_attribute_for_decl (die, decl);
26478 else
26479 add_location_or_const_value_attribute (die, decl, false);
26480 }
26481 }
26482 }
26483
26484 /* Output debug information for type decl DECL. Called from toplev.c
26485 and from language front ends (to record built-in types). */
26486 static void
26487 dwarf2out_type_decl (tree decl, int local)
26488 {
26489 if (!local)
26490 {
26491 set_early_dwarf s;
26492 dwarf2out_decl (decl);
26493 }
26494 }
26495
26496 /* Output debug information for imported module or decl DECL.
26497 NAME is non-NULL name in the lexical block if the decl has been renamed.
26498 LEXICAL_BLOCK is the lexical block (which TREE_CODE is a BLOCK)
26499 that DECL belongs to.
26500 LEXICAL_BLOCK_DIE is the DIE of LEXICAL_BLOCK. */
26501 static void
26502 dwarf2out_imported_module_or_decl_1 (tree decl,
26503 tree name,
26504 tree lexical_block,
26505 dw_die_ref lexical_block_die)
26506 {
26507 expanded_location xloc;
26508 dw_die_ref imported_die = NULL;
26509 dw_die_ref at_import_die;
26510
26511 if (TREE_CODE (decl) == IMPORTED_DECL)
26512 {
26513 xloc = expand_location (DECL_SOURCE_LOCATION (decl));
26514 decl = IMPORTED_DECL_ASSOCIATED_DECL (decl);
26515 gcc_assert (decl);
26516 }
26517 else
26518 xloc = expand_location (input_location);
26519
26520 if (TREE_CODE (decl) == TYPE_DECL || TREE_CODE (decl) == CONST_DECL)
26521 {
26522 at_import_die = force_type_die (TREE_TYPE (decl));
26523 /* For namespace N { typedef void T; } using N::T; base_type_die
26524 returns NULL, but DW_TAG_imported_declaration requires
26525 the DW_AT_import tag. Force creation of DW_TAG_typedef. */
26526 if (!at_import_die)
26527 {
26528 gcc_assert (TREE_CODE (decl) == TYPE_DECL);
26529 gen_typedef_die (decl, get_context_die (DECL_CONTEXT (decl)));
26530 at_import_die = lookup_type_die (TREE_TYPE (decl));
26531 gcc_assert (at_import_die);
26532 }
26533 }
26534 else
26535 {
26536 at_import_die = lookup_decl_die (decl);
26537 if (!at_import_die)
26538 {
26539 /* If we're trying to avoid duplicate debug info, we may not have
26540 emitted the member decl for this field. Emit it now. */
26541 if (TREE_CODE (decl) == FIELD_DECL)
26542 {
26543 tree type = DECL_CONTEXT (decl);
26544
26545 if (TYPE_CONTEXT (type)
26546 && TYPE_P (TYPE_CONTEXT (type))
26547 && !should_emit_struct_debug (TYPE_CONTEXT (type),
26548 DINFO_USAGE_DIR_USE))
26549 return;
26550 gen_type_die_for_member (type, decl,
26551 get_context_die (TYPE_CONTEXT (type)));
26552 }
26553 if (TREE_CODE (decl) == NAMELIST_DECL)
26554 at_import_die = gen_namelist_decl (DECL_NAME (decl),
26555 get_context_die (DECL_CONTEXT (decl)),
26556 NULL_TREE);
26557 else
26558 at_import_die = force_decl_die (decl);
26559 }
26560 }
26561
26562 if (TREE_CODE (decl) == NAMESPACE_DECL)
26563 {
26564 if (dwarf_version >= 3 || !dwarf_strict)
26565 imported_die = new_die (DW_TAG_imported_module,
26566 lexical_block_die,
26567 lexical_block);
26568 else
26569 return;
26570 }
26571 else
26572 imported_die = new_die (DW_TAG_imported_declaration,
26573 lexical_block_die,
26574 lexical_block);
26575
26576 add_AT_file (imported_die, DW_AT_decl_file, lookup_filename (xloc.file));
26577 add_AT_unsigned (imported_die, DW_AT_decl_line, xloc.line);
26578 if (debug_column_info && xloc.column)
26579 add_AT_unsigned (imported_die, DW_AT_decl_column, xloc.column);
26580 if (name)
26581 add_AT_string (imported_die, DW_AT_name,
26582 IDENTIFIER_POINTER (name));
26583 add_AT_die_ref (imported_die, DW_AT_import, at_import_die);
26584 }
26585
26586 /* Output debug information for imported module or decl DECL.
26587 NAME is non-NULL name in context if the decl has been renamed.
26588 CHILD is true if decl is one of the renamed decls as part of
26589 importing whole module.
26590 IMPLICIT is set if this hook is called for an implicit import
26591 such as inline namespace. */
26592
26593 static void
26594 dwarf2out_imported_module_or_decl (tree decl, tree name, tree context,
26595 bool child, bool implicit)
26596 {
26597 /* dw_die_ref at_import_die; */
26598 dw_die_ref scope_die;
26599
26600 if (debug_info_level <= DINFO_LEVEL_TERSE)
26601 return;
26602
26603 gcc_assert (decl);
26604
26605 /* For DWARF5, just DW_AT_export_symbols on the DW_TAG_namespace
26606 should be enough, for DWARF4 and older even if we emit as extension
26607 DW_AT_export_symbols add the implicit DW_TAG_imported_module anyway
26608 for the benefit of consumers unaware of DW_AT_export_symbols. */
26609 if (implicit
26610 && dwarf_version >= 5
26611 && lang_hooks.decls.decl_dwarf_attribute (decl,
26612 DW_AT_export_symbols) == 1)
26613 return;
26614
26615 set_early_dwarf s;
26616
26617 /* To emit DW_TAG_imported_module or DW_TAG_imported_decl, we need two DIEs.
26618 We need decl DIE for reference and scope die. First, get DIE for the decl
26619 itself. */
26620
26621 /* Get the scope die for decl context. Use comp_unit_die for global module
26622 or decl. If die is not found for non globals, force new die. */
26623 if (context
26624 && TYPE_P (context)
26625 && !should_emit_struct_debug (context, DINFO_USAGE_DIR_USE))
26626 return;
26627
26628 scope_die = get_context_die (context);
26629
26630 if (child)
26631 {
26632 /* DW_TAG_imported_module was introduced in the DWARFv3 specification, so
26633 there is nothing we can do, here. */
26634 if (dwarf_version < 3 && dwarf_strict)
26635 return;
26636
26637 gcc_assert (scope_die->die_child);
26638 gcc_assert (scope_die->die_child->die_tag == DW_TAG_imported_module);
26639 gcc_assert (TREE_CODE (decl) != NAMESPACE_DECL);
26640 scope_die = scope_die->die_child;
26641 }
26642
26643 /* OK, now we have DIEs for decl as well as scope. Emit imported die. */
26644 dwarf2out_imported_module_or_decl_1 (decl, name, context, scope_die);
26645 }
26646
26647 /* Output debug information for namelists. */
26648
26649 static dw_die_ref
26650 gen_namelist_decl (tree name, dw_die_ref scope_die, tree item_decls)
26651 {
26652 dw_die_ref nml_die, nml_item_die, nml_item_ref_die;
26653 tree value;
26654 unsigned i;
26655
26656 if (debug_info_level <= DINFO_LEVEL_TERSE)
26657 return NULL;
26658
26659 gcc_assert (scope_die != NULL);
26660 nml_die = new_die (DW_TAG_namelist, scope_die, NULL);
26661 add_AT_string (nml_die, DW_AT_name, IDENTIFIER_POINTER (name));
26662
26663 /* If there are no item_decls, we have a nondefining namelist, e.g.
26664 with USE association; hence, set DW_AT_declaration. */
26665 if (item_decls == NULL_TREE)
26666 {
26667 add_AT_flag (nml_die, DW_AT_declaration, 1);
26668 return nml_die;
26669 }
26670
26671 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (item_decls), i, value)
26672 {
26673 nml_item_ref_die = lookup_decl_die (value);
26674 if (!nml_item_ref_die)
26675 nml_item_ref_die = force_decl_die (value);
26676
26677 nml_item_die = new_die (DW_TAG_namelist_item, nml_die, NULL);
26678 add_AT_die_ref (nml_item_die, DW_AT_namelist_items, nml_item_ref_die);
26679 }
26680 return nml_die;
26681 }
26682
26683
26684 /* Write the debugging output for DECL and return the DIE. */
26685
26686 static void
26687 dwarf2out_decl (tree decl)
26688 {
26689 dw_die_ref context_die = comp_unit_die ();
26690
26691 switch (TREE_CODE (decl))
26692 {
26693 case ERROR_MARK:
26694 return;
26695
26696 case FUNCTION_DECL:
26697 /* If we're a nested function, initially use a parent of NULL; if we're
26698 a plain function, this will be fixed up in decls_for_scope. If
26699 we're a method, it will be ignored, since we already have a DIE.
26700 Avoid doing this late though since clones of class methods may
26701 otherwise end up in limbo and create type DIEs late. */
26702 if (early_dwarf
26703 && decl_function_context (decl)
26704 /* But if we're in terse mode, we don't care about scope. */
26705 && debug_info_level > DINFO_LEVEL_TERSE)
26706 context_die = NULL;
26707 break;
26708
26709 case VAR_DECL:
26710 /* For local statics lookup proper context die. */
26711 if (local_function_static (decl))
26712 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26713
26714 /* If we are in terse mode, don't generate any DIEs to represent any
26715 variable declarations or definitions. */
26716 if (debug_info_level <= DINFO_LEVEL_TERSE)
26717 return;
26718 break;
26719
26720 case CONST_DECL:
26721 if (debug_info_level <= DINFO_LEVEL_TERSE)
26722 return;
26723 if (!is_fortran () && !is_ada ())
26724 return;
26725 if (TREE_STATIC (decl) && decl_function_context (decl))
26726 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26727 break;
26728
26729 case NAMESPACE_DECL:
26730 case IMPORTED_DECL:
26731 if (debug_info_level <= DINFO_LEVEL_TERSE)
26732 return;
26733 if (lookup_decl_die (decl) != NULL)
26734 return;
26735 break;
26736
26737 case TYPE_DECL:
26738 /* Don't emit stubs for types unless they are needed by other DIEs. */
26739 if (TYPE_DECL_SUPPRESS_DEBUG (decl))
26740 return;
26741
26742 /* Don't bother trying to generate any DIEs to represent any of the
26743 normal built-in types for the language we are compiling. */
26744 if (DECL_IS_BUILTIN (decl))
26745 return;
26746
26747 /* If we are in terse mode, don't generate any DIEs for types. */
26748 if (debug_info_level <= DINFO_LEVEL_TERSE)
26749 return;
26750
26751 /* If we're a function-scope tag, initially use a parent of NULL;
26752 this will be fixed up in decls_for_scope. */
26753 if (decl_function_context (decl))
26754 context_die = NULL;
26755
26756 break;
26757
26758 case NAMELIST_DECL:
26759 break;
26760
26761 default:
26762 return;
26763 }
26764
26765 gen_decl_die (decl, NULL, NULL, context_die);
26766
26767 if (flag_checking)
26768 {
26769 dw_die_ref die = lookup_decl_die (decl);
26770 if (die)
26771 check_die (die);
26772 }
26773 }
26774
26775 /* Write the debugging output for DECL. */
26776
26777 static void
26778 dwarf2out_function_decl (tree decl)
26779 {
26780 dwarf2out_decl (decl);
26781 call_arg_locations = NULL;
26782 call_arg_loc_last = NULL;
26783 call_site_count = -1;
26784 tail_call_site_count = -1;
26785 decl_loc_table->empty ();
26786 cached_dw_loc_list_table->empty ();
26787 }
26788
26789 /* Output a marker (i.e. a label) for the beginning of the generated code for
26790 a lexical block. */
26791
26792 static void
26793 dwarf2out_begin_block (unsigned int line ATTRIBUTE_UNUSED,
26794 unsigned int blocknum)
26795 {
26796 switch_to_section (current_function_section ());
26797 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_BEGIN_LABEL, blocknum);
26798 }
26799
26800 /* Output a marker (i.e. a label) for the end of the generated code for a
26801 lexical block. */
26802
26803 static void
26804 dwarf2out_end_block (unsigned int line ATTRIBUTE_UNUSED, unsigned int blocknum)
26805 {
26806 switch_to_section (current_function_section ());
26807 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_END_LABEL, blocknum);
26808 }
26809
26810 /* Returns nonzero if it is appropriate not to emit any debugging
26811 information for BLOCK, because it doesn't contain any instructions.
26812
26813 Don't allow this for blocks with nested functions or local classes
26814 as we would end up with orphans, and in the presence of scheduling
26815 we may end up calling them anyway. */
26816
26817 static bool
26818 dwarf2out_ignore_block (const_tree block)
26819 {
26820 tree decl;
26821 unsigned int i;
26822
26823 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
26824 if (TREE_CODE (decl) == FUNCTION_DECL
26825 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26826 return 0;
26827 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (block); i++)
26828 {
26829 decl = BLOCK_NONLOCALIZED_VAR (block, i);
26830 if (TREE_CODE (decl) == FUNCTION_DECL
26831 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26832 return 0;
26833 }
26834
26835 return 1;
26836 }
26837
26838 /* Hash table routines for file_hash. */
26839
26840 bool
26841 dwarf_file_hasher::equal (dwarf_file_data *p1, const char *p2)
26842 {
26843 return filename_cmp (p1->filename, p2) == 0;
26844 }
26845
26846 hashval_t
26847 dwarf_file_hasher::hash (dwarf_file_data *p)
26848 {
26849 return htab_hash_string (p->filename);
26850 }
26851
26852 /* Lookup FILE_NAME (in the list of filenames that we know about here in
26853 dwarf2out.c) and return its "index". The index of each (known) filename is
26854 just a unique number which is associated with only that one filename. We
26855 need such numbers for the sake of generating labels (in the .debug_sfnames
26856 section) and references to those files numbers (in the .debug_srcinfo
26857 and .debug_macinfo sections). If the filename given as an argument is not
26858 found in our current list, add it to the list and assign it the next
26859 available unique index number. */
26860
26861 static struct dwarf_file_data *
26862 lookup_filename (const char *file_name)
26863 {
26864 struct dwarf_file_data * created;
26865
26866 if (!file_name)
26867 return NULL;
26868
26869 dwarf_file_data **slot
26870 = file_table->find_slot_with_hash (file_name, htab_hash_string (file_name),
26871 INSERT);
26872 if (*slot)
26873 return *slot;
26874
26875 created = ggc_alloc<dwarf_file_data> ();
26876 created->filename = file_name;
26877 created->emitted_number = 0;
26878 *slot = created;
26879 return created;
26880 }
26881
26882 /* If the assembler will construct the file table, then translate the compiler
26883 internal file table number into the assembler file table number, and emit
26884 a .file directive if we haven't already emitted one yet. The file table
26885 numbers are different because we prune debug info for unused variables and
26886 types, which may include filenames. */
26887
26888 static int
26889 maybe_emit_file (struct dwarf_file_data * fd)
26890 {
26891 if (! fd->emitted_number)
26892 {
26893 if (last_emitted_file)
26894 fd->emitted_number = last_emitted_file->emitted_number + 1;
26895 else
26896 fd->emitted_number = 1;
26897 last_emitted_file = fd;
26898
26899 if (output_asm_line_debug_info ())
26900 {
26901 fprintf (asm_out_file, "\t.file %u ", fd->emitted_number);
26902 output_quoted_string (asm_out_file,
26903 remap_debug_filename (fd->filename));
26904 fputc ('\n', asm_out_file);
26905 }
26906 }
26907
26908 return fd->emitted_number;
26909 }
26910
26911 /* Schedule generation of a DW_AT_const_value attribute to DIE.
26912 That generation should happen after function debug info has been
26913 generated. The value of the attribute is the constant value of ARG. */
26914
26915 static void
26916 append_entry_to_tmpl_value_parm_die_table (dw_die_ref die, tree arg)
26917 {
26918 die_arg_entry entry;
26919
26920 if (!die || !arg)
26921 return;
26922
26923 gcc_assert (early_dwarf);
26924
26925 if (!tmpl_value_parm_die_table)
26926 vec_alloc (tmpl_value_parm_die_table, 32);
26927
26928 entry.die = die;
26929 entry.arg = arg;
26930 vec_safe_push (tmpl_value_parm_die_table, entry);
26931 }
26932
26933 /* Return TRUE if T is an instance of generic type, FALSE
26934 otherwise. */
26935
26936 static bool
26937 generic_type_p (tree t)
26938 {
26939 if (t == NULL_TREE || !TYPE_P (t))
26940 return false;
26941 return lang_hooks.get_innermost_generic_parms (t) != NULL_TREE;
26942 }
26943
26944 /* Schedule the generation of the generic parameter dies for the
26945 instance of generic type T. The proper generation itself is later
26946 done by gen_scheduled_generic_parms_dies. */
26947
26948 static void
26949 schedule_generic_params_dies_gen (tree t)
26950 {
26951 if (!generic_type_p (t))
26952 return;
26953
26954 gcc_assert (early_dwarf);
26955
26956 if (!generic_type_instances)
26957 vec_alloc (generic_type_instances, 256);
26958
26959 vec_safe_push (generic_type_instances, t);
26960 }
26961
26962 /* Add a DW_AT_const_value attribute to DIEs that were scheduled
26963 by append_entry_to_tmpl_value_parm_die_table. This function must
26964 be called after function DIEs have been generated. */
26965
26966 static void
26967 gen_remaining_tmpl_value_param_die_attribute (void)
26968 {
26969 if (tmpl_value_parm_die_table)
26970 {
26971 unsigned i, j;
26972 die_arg_entry *e;
26973
26974 /* We do this in two phases - first get the cases we can
26975 handle during early-finish, preserving those we cannot
26976 (containing symbolic constants where we don't yet know
26977 whether we are going to output the referenced symbols).
26978 For those we try again at late-finish. */
26979 j = 0;
26980 FOR_EACH_VEC_ELT (*tmpl_value_parm_die_table, i, e)
26981 {
26982 if (!e->die->removed
26983 && !tree_add_const_value_attribute (e->die, e->arg))
26984 {
26985 dw_loc_descr_ref loc = NULL;
26986 if (! early_dwarf
26987 && (dwarf_version >= 5 || !dwarf_strict))
26988 loc = loc_descriptor_from_tree (e->arg, 2, NULL);
26989 if (loc)
26990 add_AT_loc (e->die, DW_AT_location, loc);
26991 else
26992 (*tmpl_value_parm_die_table)[j++] = *e;
26993 }
26994 }
26995 tmpl_value_parm_die_table->truncate (j);
26996 }
26997 }
26998
26999 /* Generate generic parameters DIEs for instances of generic types
27000 that have been previously scheduled by
27001 schedule_generic_params_dies_gen. This function must be called
27002 after all the types of the CU have been laid out. */
27003
27004 static void
27005 gen_scheduled_generic_parms_dies (void)
27006 {
27007 unsigned i;
27008 tree t;
27009
27010 if (!generic_type_instances)
27011 return;
27012
27013 FOR_EACH_VEC_ELT (*generic_type_instances, i, t)
27014 if (COMPLETE_TYPE_P (t))
27015 gen_generic_params_dies (t);
27016
27017 generic_type_instances = NULL;
27018 }
27019
27020
27021 /* Replace DW_AT_name for the decl with name. */
27022
27023 static void
27024 dwarf2out_set_name (tree decl, tree name)
27025 {
27026 dw_die_ref die;
27027 dw_attr_node *attr;
27028 const char *dname;
27029
27030 die = TYPE_SYMTAB_DIE (decl);
27031 if (!die)
27032 return;
27033
27034 dname = dwarf2_name (name, 0);
27035 if (!dname)
27036 return;
27037
27038 attr = get_AT (die, DW_AT_name);
27039 if (attr)
27040 {
27041 struct indirect_string_node *node;
27042
27043 node = find_AT_string (dname);
27044 /* replace the string. */
27045 attr->dw_attr_val.v.val_str = node;
27046 }
27047
27048 else
27049 add_name_attribute (die, dname);
27050 }
27051
27052 /* True if before or during processing of the first function being emitted. */
27053 static bool in_first_function_p = true;
27054 /* True if loc_note during dwarf2out_var_location call might still be
27055 before first real instruction at address equal to .Ltext0. */
27056 static bool maybe_at_text_label_p = true;
27057 /* One above highest N where .LVLN label might be equal to .Ltext0 label. */
27058 static unsigned int first_loclabel_num_not_at_text_label;
27059
27060 /* Look ahead for a real insn, or for a begin stmt marker. */
27061
27062 static rtx_insn *
27063 dwarf2out_next_real_insn (rtx_insn *loc_note)
27064 {
27065 rtx_insn *next_real = NEXT_INSN (loc_note);
27066
27067 while (next_real)
27068 if (INSN_P (next_real))
27069 break;
27070 else
27071 next_real = NEXT_INSN (next_real);
27072
27073 return next_real;
27074 }
27075
27076 /* Called by the final INSN scan whenever we see a var location. We
27077 use it to drop labels in the right places, and throw the location in
27078 our lookup table. */
27079
27080 static void
27081 dwarf2out_var_location (rtx_insn *loc_note)
27082 {
27083 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES + 2];
27084 struct var_loc_node *newloc;
27085 rtx_insn *next_real, *next_note;
27086 rtx_insn *call_insn = NULL;
27087 static const char *last_label;
27088 static const char *last_postcall_label;
27089 static bool last_in_cold_section_p;
27090 static rtx_insn *expected_next_loc_note;
27091 tree decl;
27092 bool var_loc_p;
27093 var_loc_view view = 0;
27094
27095 if (!NOTE_P (loc_note))
27096 {
27097 if (CALL_P (loc_note))
27098 {
27099 maybe_reset_location_view (loc_note, cur_line_info_table);
27100 call_site_count++;
27101 if (SIBLING_CALL_P (loc_note))
27102 tail_call_site_count++;
27103 if (find_reg_note (loc_note, REG_CALL_ARG_LOCATION, NULL_RTX))
27104 {
27105 call_insn = loc_note;
27106 loc_note = NULL;
27107 var_loc_p = false;
27108
27109 next_real = dwarf2out_next_real_insn (call_insn);
27110 next_note = NULL;
27111 cached_next_real_insn = NULL;
27112 goto create_label;
27113 }
27114 if (optimize == 0 && !flag_var_tracking)
27115 {
27116 /* When the var-tracking pass is not running, there is no note
27117 for indirect calls whose target is compile-time known. In this
27118 case, process such calls specifically so that we generate call
27119 sites for them anyway. */
27120 rtx x = PATTERN (loc_note);
27121 if (GET_CODE (x) == PARALLEL)
27122 x = XVECEXP (x, 0, 0);
27123 if (GET_CODE (x) == SET)
27124 x = SET_SRC (x);
27125 if (GET_CODE (x) == CALL)
27126 x = XEXP (x, 0);
27127 if (!MEM_P (x)
27128 || GET_CODE (XEXP (x, 0)) != SYMBOL_REF
27129 || !SYMBOL_REF_DECL (XEXP (x, 0))
27130 || (TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0)))
27131 != FUNCTION_DECL))
27132 {
27133 call_insn = loc_note;
27134 loc_note = NULL;
27135 var_loc_p = false;
27136
27137 next_real = dwarf2out_next_real_insn (call_insn);
27138 next_note = NULL;
27139 cached_next_real_insn = NULL;
27140 goto create_label;
27141 }
27142 }
27143 }
27144 else if (!debug_variable_location_views)
27145 gcc_unreachable ();
27146 else
27147 maybe_reset_location_view (loc_note, cur_line_info_table);
27148
27149 return;
27150 }
27151
27152 var_loc_p = NOTE_KIND (loc_note) == NOTE_INSN_VAR_LOCATION;
27153 if (var_loc_p && !DECL_P (NOTE_VAR_LOCATION_DECL (loc_note)))
27154 return;
27155
27156 /* Optimize processing a large consecutive sequence of location
27157 notes so we don't spend too much time in next_real_insn. If the
27158 next insn is another location note, remember the next_real_insn
27159 calculation for next time. */
27160 next_real = cached_next_real_insn;
27161 if (next_real)
27162 {
27163 if (expected_next_loc_note != loc_note)
27164 next_real = NULL;
27165 }
27166
27167 next_note = NEXT_INSN (loc_note);
27168 if (! next_note
27169 || next_note->deleted ()
27170 || ! NOTE_P (next_note)
27171 || (NOTE_KIND (next_note) != NOTE_INSN_VAR_LOCATION
27172 && NOTE_KIND (next_note) != NOTE_INSN_BEGIN_STMT
27173 && NOTE_KIND (next_note) != NOTE_INSN_INLINE_ENTRY))
27174 next_note = NULL;
27175
27176 if (! next_real)
27177 next_real = dwarf2out_next_real_insn (loc_note);
27178
27179 if (next_note)
27180 {
27181 expected_next_loc_note = next_note;
27182 cached_next_real_insn = next_real;
27183 }
27184 else
27185 cached_next_real_insn = NULL;
27186
27187 /* If there are no instructions which would be affected by this note,
27188 don't do anything. */
27189 if (var_loc_p
27190 && next_real == NULL_RTX
27191 && !NOTE_DURING_CALL_P (loc_note))
27192 return;
27193
27194 create_label:
27195
27196 if (next_real == NULL_RTX)
27197 next_real = get_last_insn ();
27198
27199 /* If there were any real insns between note we processed last time
27200 and this note (or if it is the first note), clear
27201 last_{,postcall_}label so that they are not reused this time. */
27202 if (last_var_location_insn == NULL_RTX
27203 || last_var_location_insn != next_real
27204 || last_in_cold_section_p != in_cold_section_p)
27205 {
27206 last_label = NULL;
27207 last_postcall_label = NULL;
27208 }
27209
27210 if (var_loc_p)
27211 {
27212 const char *label
27213 = NOTE_DURING_CALL_P (loc_note) ? last_postcall_label : last_label;
27214 view = cur_line_info_table->view;
27215 decl = NOTE_VAR_LOCATION_DECL (loc_note);
27216 newloc = add_var_loc_to_decl (decl, loc_note, label, view);
27217 if (newloc == NULL)
27218 return;
27219 }
27220 else
27221 {
27222 decl = NULL_TREE;
27223 newloc = NULL;
27224 }
27225
27226 /* If there were no real insns between note we processed last time
27227 and this note, use the label we emitted last time. Otherwise
27228 create a new label and emit it. */
27229 if (last_label == NULL)
27230 {
27231 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", loclabel_num);
27232 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LVL", loclabel_num);
27233 loclabel_num++;
27234 last_label = ggc_strdup (loclabel);
27235 /* See if loclabel might be equal to .Ltext0. If yes,
27236 bump first_loclabel_num_not_at_text_label. */
27237 if (!have_multiple_function_sections
27238 && in_first_function_p
27239 && maybe_at_text_label_p)
27240 {
27241 static rtx_insn *last_start;
27242 rtx_insn *insn;
27243 for (insn = loc_note; insn; insn = previous_insn (insn))
27244 if (insn == last_start)
27245 break;
27246 else if (!NONDEBUG_INSN_P (insn))
27247 continue;
27248 else
27249 {
27250 rtx body = PATTERN (insn);
27251 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
27252 continue;
27253 /* Inline asm could occupy zero bytes. */
27254 else if (GET_CODE (body) == ASM_INPUT
27255 || asm_noperands (body) >= 0)
27256 continue;
27257 #ifdef HAVE_ATTR_length /* ??? We don't include insn-attr.h. */
27258 else if (HAVE_ATTR_length && get_attr_min_length (insn) == 0)
27259 continue;
27260 #endif
27261 else
27262 {
27263 /* Assume insn has non-zero length. */
27264 maybe_at_text_label_p = false;
27265 break;
27266 }
27267 }
27268 if (maybe_at_text_label_p)
27269 {
27270 last_start = loc_note;
27271 first_loclabel_num_not_at_text_label = loclabel_num;
27272 }
27273 }
27274 }
27275
27276 gcc_assert ((loc_note == NULL_RTX && call_insn != NULL_RTX)
27277 || (loc_note != NULL_RTX && call_insn == NULL_RTX));
27278
27279 if (!var_loc_p)
27280 {
27281 struct call_arg_loc_node *ca_loc
27282 = ggc_cleared_alloc<call_arg_loc_node> ();
27283 rtx_insn *prev = call_insn;
27284
27285 ca_loc->call_arg_loc_note
27286 = find_reg_note (call_insn, REG_CALL_ARG_LOCATION, NULL_RTX);
27287 ca_loc->next = NULL;
27288 ca_loc->label = last_label;
27289 gcc_assert (prev
27290 && (CALL_P (prev)
27291 || (NONJUMP_INSN_P (prev)
27292 && GET_CODE (PATTERN (prev)) == SEQUENCE
27293 && CALL_P (XVECEXP (PATTERN (prev), 0, 0)))));
27294 if (!CALL_P (prev))
27295 prev = as_a <rtx_sequence *> (PATTERN (prev))->insn (0);
27296 ca_loc->tail_call_p = SIBLING_CALL_P (prev);
27297
27298 /* Look for a SYMBOL_REF in the "prev" instruction. */
27299 rtx x = get_call_rtx_from (PATTERN (prev));
27300 if (x)
27301 {
27302 /* Try to get the call symbol, if any. */
27303 if (MEM_P (XEXP (x, 0)))
27304 x = XEXP (x, 0);
27305 /* First, look for a memory access to a symbol_ref. */
27306 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
27307 && SYMBOL_REF_DECL (XEXP (x, 0))
27308 && TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0))) == FUNCTION_DECL)
27309 ca_loc->symbol_ref = XEXP (x, 0);
27310 /* Otherwise, look at a compile-time known user-level function
27311 declaration. */
27312 else if (MEM_P (x)
27313 && MEM_EXPR (x)
27314 && TREE_CODE (MEM_EXPR (x)) == FUNCTION_DECL)
27315 ca_loc->symbol_ref = XEXP (DECL_RTL (MEM_EXPR (x)), 0);
27316 }
27317
27318 ca_loc->block = insn_scope (prev);
27319 if (call_arg_locations)
27320 call_arg_loc_last->next = ca_loc;
27321 else
27322 call_arg_locations = ca_loc;
27323 call_arg_loc_last = ca_loc;
27324 }
27325 else if (loc_note != NULL_RTX && !NOTE_DURING_CALL_P (loc_note))
27326 {
27327 newloc->label = last_label;
27328 newloc->view = view;
27329 }
27330 else
27331 {
27332 if (!last_postcall_label)
27333 {
27334 sprintf (loclabel, "%s-1", last_label);
27335 last_postcall_label = ggc_strdup (loclabel);
27336 }
27337 newloc->label = last_postcall_label;
27338 /* ??? This view is at last_label, not last_label-1, but we
27339 could only assume view at last_label-1 is zero if we could
27340 assume calls always have length greater than one. This is
27341 probably true in general, though there might be a rare
27342 exception to this rule, e.g. if a call insn is optimized out
27343 by target magic. Then, even the -1 in the label will be
27344 wrong, which might invalidate the range. Anyway, using view,
27345 though technically possibly incorrect, will work as far as
27346 ranges go: since L-1 is in the middle of the call insn,
27347 (L-1).0 and (L-1).V shouldn't make any difference, and having
27348 the loclist entry refer to the .loc entry might be useful, so
27349 leave it like this. */
27350 newloc->view = view;
27351 }
27352
27353 if (var_loc_p && flag_debug_asm)
27354 {
27355 const char *name, *sep, *patstr;
27356 if (decl && DECL_NAME (decl))
27357 name = IDENTIFIER_POINTER (DECL_NAME (decl));
27358 else
27359 name = "";
27360 if (NOTE_VAR_LOCATION_LOC (loc_note))
27361 {
27362 sep = " => ";
27363 patstr = str_pattern_slim (NOTE_VAR_LOCATION_LOC (loc_note));
27364 }
27365 else
27366 {
27367 sep = " ";
27368 patstr = "RESET";
27369 }
27370 fprintf (asm_out_file, "\t%s DEBUG %s%s%s\n", ASM_COMMENT_START,
27371 name, sep, patstr);
27372 }
27373
27374 last_var_location_insn = next_real;
27375 last_in_cold_section_p = in_cold_section_p;
27376 }
27377
27378 /* Check whether BLOCK, a lexical block, is nested within OUTER, or is
27379 OUTER itself. If BOTHWAYS, check not only that BLOCK can reach
27380 OUTER through BLOCK_SUPERCONTEXT links, but also that there is a
27381 path from OUTER to BLOCK through BLOCK_SUBBLOCKs and
27382 BLOCK_FRAGMENT_ORIGIN links. */
27383 static bool
27384 block_within_block_p (tree block, tree outer, bool bothways)
27385 {
27386 if (block == outer)
27387 return true;
27388
27389 /* Quickly check that OUTER is up BLOCK's supercontext chain. */
27390 for (tree context = BLOCK_SUPERCONTEXT (block);
27391 context != outer;
27392 context = BLOCK_SUPERCONTEXT (context))
27393 if (!context || TREE_CODE (context) != BLOCK)
27394 return false;
27395
27396 if (!bothways)
27397 return true;
27398
27399 /* Now check that each block is actually referenced by its
27400 parent. */
27401 for (tree context = BLOCK_SUPERCONTEXT (block); ;
27402 context = BLOCK_SUPERCONTEXT (context))
27403 {
27404 if (BLOCK_FRAGMENT_ORIGIN (context))
27405 {
27406 gcc_assert (!BLOCK_SUBBLOCKS (context));
27407 context = BLOCK_FRAGMENT_ORIGIN (context);
27408 }
27409 for (tree sub = BLOCK_SUBBLOCKS (context);
27410 sub != block;
27411 sub = BLOCK_CHAIN (sub))
27412 if (!sub)
27413 return false;
27414 if (context == outer)
27415 return true;
27416 else
27417 block = context;
27418 }
27419 }
27420
27421 /* Called during final while assembling the marker of the entry point
27422 for an inlined function. */
27423
27424 static void
27425 dwarf2out_inline_entry (tree block)
27426 {
27427 gcc_assert (debug_inline_points);
27428
27429 /* If we can't represent it, don't bother. */
27430 if (!(dwarf_version >= 3 || !dwarf_strict))
27431 return;
27432
27433 gcc_assert (DECL_P (block_ultimate_origin (block)));
27434
27435 /* Sanity check the block tree. This would catch a case in which
27436 BLOCK got removed from the tree reachable from the outermost
27437 lexical block, but got retained in markers. It would still link
27438 back to its parents, but some ancestor would be missing a link
27439 down the path to the sub BLOCK. If the block got removed, its
27440 BLOCK_NUMBER will not be a usable value. */
27441 if (flag_checking)
27442 gcc_assert (block_within_block_p (block,
27443 DECL_INITIAL (current_function_decl),
27444 true));
27445
27446 gcc_assert (inlined_function_outer_scope_p (block));
27447 gcc_assert (!lookup_block_die (block));
27448
27449 if (BLOCK_FRAGMENT_ORIGIN (block))
27450 block = BLOCK_FRAGMENT_ORIGIN (block);
27451 /* Can the entry point ever not be at the beginning of an
27452 unfragmented lexical block? */
27453 else if (!(BLOCK_FRAGMENT_CHAIN (block)
27454 || (cur_line_info_table
27455 && !ZERO_VIEW_P (cur_line_info_table->view))))
27456 return;
27457
27458 if (!inline_entry_data_table)
27459 inline_entry_data_table
27460 = hash_table<inline_entry_data_hasher>::create_ggc (10);
27461
27462
27463 inline_entry_data **iedp
27464 = inline_entry_data_table->find_slot_with_hash (block,
27465 htab_hash_pointer (block),
27466 INSERT);
27467 if (*iedp)
27468 /* ??? Ideally, we'd record all entry points for the same inlined
27469 function (some may have been duplicated by e.g. unrolling), but
27470 we have no way to represent that ATM. */
27471 return;
27472
27473 inline_entry_data *ied = *iedp = ggc_cleared_alloc<inline_entry_data> ();
27474 ied->block = block;
27475 ied->label_pfx = BLOCK_INLINE_ENTRY_LABEL;
27476 ied->label_num = BLOCK_NUMBER (block);
27477 if (cur_line_info_table)
27478 ied->view = cur_line_info_table->view;
27479
27480 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27481
27482 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_INLINE_ENTRY_LABEL,
27483 BLOCK_NUMBER (block));
27484 ASM_OUTPUT_LABEL (asm_out_file, label);
27485 }
27486
27487 /* Called from finalize_size_functions for size functions so that their body
27488 can be encoded in the debug info to describe the layout of variable-length
27489 structures. */
27490
27491 static void
27492 dwarf2out_size_function (tree decl)
27493 {
27494 function_to_dwarf_procedure (decl);
27495 }
27496
27497 /* Note in one location list that text section has changed. */
27498
27499 int
27500 var_location_switch_text_section_1 (var_loc_list **slot, void *)
27501 {
27502 var_loc_list *list = *slot;
27503 if (list->first)
27504 list->last_before_switch
27505 = list->last->next ? list->last->next : list->last;
27506 return 1;
27507 }
27508
27509 /* Note in all location lists that text section has changed. */
27510
27511 static void
27512 var_location_switch_text_section (void)
27513 {
27514 if (decl_loc_table == NULL)
27515 return;
27516
27517 decl_loc_table->traverse<void *, var_location_switch_text_section_1> (NULL);
27518 }
27519
27520 /* Create a new line number table. */
27521
27522 static dw_line_info_table *
27523 new_line_info_table (void)
27524 {
27525 dw_line_info_table *table;
27526
27527 table = ggc_cleared_alloc<dw_line_info_table> ();
27528 table->file_num = 1;
27529 table->line_num = 1;
27530 table->is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
27531 FORCE_RESET_NEXT_VIEW (table->view);
27532 table->symviews_since_reset = 0;
27533
27534 return table;
27535 }
27536
27537 /* Lookup the "current" table into which we emit line info, so
27538 that we don't have to do it for every source line. */
27539
27540 static void
27541 set_cur_line_info_table (section *sec)
27542 {
27543 dw_line_info_table *table;
27544
27545 if (sec == text_section)
27546 table = text_section_line_info;
27547 else if (sec == cold_text_section)
27548 {
27549 table = cold_text_section_line_info;
27550 if (!table)
27551 {
27552 cold_text_section_line_info = table = new_line_info_table ();
27553 table->end_label = cold_end_label;
27554 }
27555 }
27556 else
27557 {
27558 const char *end_label;
27559
27560 if (crtl->has_bb_partition)
27561 {
27562 if (in_cold_section_p)
27563 end_label = crtl->subsections.cold_section_end_label;
27564 else
27565 end_label = crtl->subsections.hot_section_end_label;
27566 }
27567 else
27568 {
27569 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27570 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
27571 current_function_funcdef_no);
27572 end_label = ggc_strdup (label);
27573 }
27574
27575 table = new_line_info_table ();
27576 table->end_label = end_label;
27577
27578 vec_safe_push (separate_line_info, table);
27579 }
27580
27581 if (output_asm_line_debug_info ())
27582 table->is_stmt = (cur_line_info_table
27583 ? cur_line_info_table->is_stmt
27584 : DWARF_LINE_DEFAULT_IS_STMT_START);
27585 cur_line_info_table = table;
27586 }
27587
27588
27589 /* We need to reset the locations at the beginning of each
27590 function. We can't do this in the end_function hook, because the
27591 declarations that use the locations won't have been output when
27592 that hook is called. Also compute have_multiple_function_sections here. */
27593
27594 static void
27595 dwarf2out_begin_function (tree fun)
27596 {
27597 section *sec = function_section (fun);
27598
27599 if (sec != text_section)
27600 have_multiple_function_sections = true;
27601
27602 if (crtl->has_bb_partition && !cold_text_section)
27603 {
27604 gcc_assert (current_function_decl == fun);
27605 cold_text_section = unlikely_text_section ();
27606 switch_to_section (cold_text_section);
27607 ASM_OUTPUT_LABEL (asm_out_file, cold_text_section_label);
27608 switch_to_section (sec);
27609 }
27610
27611 dwarf2out_note_section_used ();
27612 call_site_count = 0;
27613 tail_call_site_count = 0;
27614
27615 set_cur_line_info_table (sec);
27616 FORCE_RESET_NEXT_VIEW (cur_line_info_table->view);
27617 }
27618
27619 /* Helper function of dwarf2out_end_function, called only after emitting
27620 the very first function into assembly. Check if some .debug_loc range
27621 might end with a .LVL* label that could be equal to .Ltext0.
27622 In that case we must force using absolute addresses in .debug_loc ranges,
27623 because this range could be .LVLN-.Ltext0 .. .LVLM-.Ltext0 for
27624 .LVLN == .LVLM == .Ltext0, thus 0 .. 0, which is a .debug_loc
27625 list terminator.
27626 Set have_multiple_function_sections to true in that case and
27627 terminate htab traversal. */
27628
27629 int
27630 find_empty_loc_ranges_at_text_label (var_loc_list **slot, int)
27631 {
27632 var_loc_list *entry = *slot;
27633 struct var_loc_node *node;
27634
27635 node = entry->first;
27636 if (node && node->next && node->next->label)
27637 {
27638 unsigned int i;
27639 const char *label = node->next->label;
27640 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES];
27641
27642 for (i = 0; i < first_loclabel_num_not_at_text_label; i++)
27643 {
27644 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", i);
27645 if (strcmp (label, loclabel) == 0)
27646 {
27647 have_multiple_function_sections = true;
27648 return 0;
27649 }
27650 }
27651 }
27652 return 1;
27653 }
27654
27655 /* Hook called after emitting a function into assembly.
27656 This does something only for the very first function emitted. */
27657
27658 static void
27659 dwarf2out_end_function (unsigned int)
27660 {
27661 if (in_first_function_p
27662 && !have_multiple_function_sections
27663 && first_loclabel_num_not_at_text_label
27664 && decl_loc_table)
27665 decl_loc_table->traverse<int, find_empty_loc_ranges_at_text_label> (0);
27666 in_first_function_p = false;
27667 maybe_at_text_label_p = false;
27668 }
27669
27670 /* Temporary holder for dwarf2out_register_main_translation_unit. Used to let
27671 front-ends register a translation unit even before dwarf2out_init is
27672 called. */
27673 static tree main_translation_unit = NULL_TREE;
27674
27675 /* Hook called by front-ends after they built their main translation unit.
27676 Associate comp_unit_die to UNIT. */
27677
27678 static void
27679 dwarf2out_register_main_translation_unit (tree unit)
27680 {
27681 gcc_assert (TREE_CODE (unit) == TRANSLATION_UNIT_DECL
27682 && main_translation_unit == NULL_TREE);
27683 main_translation_unit = unit;
27684 /* If dwarf2out_init has not been called yet, it will perform the association
27685 itself looking at main_translation_unit. */
27686 if (decl_die_table != NULL)
27687 equate_decl_number_to_die (unit, comp_unit_die ());
27688 }
27689
27690 /* Add OPCODE+VAL as an entry at the end of the opcode array in TABLE. */
27691
27692 static void
27693 push_dw_line_info_entry (dw_line_info_table *table,
27694 enum dw_line_info_opcode opcode, unsigned int val)
27695 {
27696 dw_line_info_entry e;
27697 e.opcode = opcode;
27698 e.val = val;
27699 vec_safe_push (table->entries, e);
27700 }
27701
27702 /* Output a label to mark the beginning of a source code line entry
27703 and record information relating to this source line, in
27704 'line_info_table' for later output of the .debug_line section. */
27705 /* ??? The discriminator parameter ought to be unsigned. */
27706
27707 static void
27708 dwarf2out_source_line (unsigned int line, unsigned int column,
27709 const char *filename,
27710 int discriminator, bool is_stmt)
27711 {
27712 unsigned int file_num;
27713 dw_line_info_table *table;
27714 static var_loc_view lvugid;
27715
27716 if (debug_info_level < DINFO_LEVEL_TERSE)
27717 return;
27718
27719 table = cur_line_info_table;
27720
27721 if (line == 0)
27722 {
27723 if (debug_variable_location_views
27724 && output_asm_line_debug_info ()
27725 && table && !RESETTING_VIEW_P (table->view))
27726 {
27727 /* If we're using the assembler to compute view numbers, we
27728 can't issue a .loc directive for line zero, so we can't
27729 get a view number at this point. We might attempt to
27730 compute it from the previous view, or equate it to a
27731 subsequent view (though it might not be there!), but
27732 since we're omitting the line number entry, we might as
27733 well omit the view number as well. That means pretending
27734 it's a view number zero, which might very well turn out
27735 to be correct. ??? Extend the assembler so that the
27736 compiler could emit e.g. ".locview .LVU#", to output a
27737 view without changing line number information. We'd then
27738 have to count it in symviews_since_reset; when it's omitted,
27739 it doesn't count. */
27740 if (!zero_view_p)
27741 zero_view_p = BITMAP_GGC_ALLOC ();
27742 bitmap_set_bit (zero_view_p, table->view);
27743 if (flag_debug_asm)
27744 {
27745 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27746 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27747 fprintf (asm_out_file, "\t%s line 0, omitted view ",
27748 ASM_COMMENT_START);
27749 assemble_name (asm_out_file, label);
27750 putc ('\n', asm_out_file);
27751 }
27752 table->view = ++lvugid;
27753 }
27754 return;
27755 }
27756
27757 /* The discriminator column was added in dwarf4. Simplify the below
27758 by simply removing it if we're not supposed to output it. */
27759 if (dwarf_version < 4 && dwarf_strict)
27760 discriminator = 0;
27761
27762 if (!debug_column_info)
27763 column = 0;
27764
27765 file_num = maybe_emit_file (lookup_filename (filename));
27766
27767 /* ??? TODO: Elide duplicate line number entries. Traditionally,
27768 the debugger has used the second (possibly duplicate) line number
27769 at the beginning of the function to mark the end of the prologue.
27770 We could eliminate any other duplicates within the function. For
27771 Dwarf3, we ought to include the DW_LNS_set_prologue_end mark in
27772 that second line number entry. */
27773 /* Recall that this end-of-prologue indication is *not* the same thing
27774 as the end_prologue debug hook. The NOTE_INSN_PROLOGUE_END note,
27775 to which the hook corresponds, follows the last insn that was
27776 emitted by gen_prologue. What we need is to precede the first insn
27777 that had been emitted after NOTE_INSN_FUNCTION_BEG, i.e. the first
27778 insn that corresponds to something the user wrote. These may be
27779 very different locations once scheduling is enabled. */
27780
27781 if (0 && file_num == table->file_num
27782 && line == table->line_num
27783 && column == table->column_num
27784 && discriminator == table->discrim_num
27785 && is_stmt == table->is_stmt)
27786 return;
27787
27788 switch_to_section (current_function_section ());
27789
27790 /* If requested, emit something human-readable. */
27791 if (flag_debug_asm)
27792 {
27793 if (debug_column_info)
27794 fprintf (asm_out_file, "\t%s %s:%d:%d\n", ASM_COMMENT_START,
27795 filename, line, column);
27796 else
27797 fprintf (asm_out_file, "\t%s %s:%d\n", ASM_COMMENT_START,
27798 filename, line);
27799 }
27800
27801 if (output_asm_line_debug_info ())
27802 {
27803 /* Emit the .loc directive understood by GNU as. */
27804 /* "\t.loc %u %u 0 is_stmt %u discriminator %u",
27805 file_num, line, is_stmt, discriminator */
27806 fputs ("\t.loc ", asm_out_file);
27807 fprint_ul (asm_out_file, file_num);
27808 putc (' ', asm_out_file);
27809 fprint_ul (asm_out_file, line);
27810 putc (' ', asm_out_file);
27811 fprint_ul (asm_out_file, column);
27812
27813 if (is_stmt != table->is_stmt)
27814 {
27815 #if HAVE_GAS_LOC_STMT
27816 fputs (" is_stmt ", asm_out_file);
27817 putc (is_stmt ? '1' : '0', asm_out_file);
27818 #endif
27819 }
27820 if (SUPPORTS_DISCRIMINATOR && discriminator != 0)
27821 {
27822 gcc_assert (discriminator > 0);
27823 fputs (" discriminator ", asm_out_file);
27824 fprint_ul (asm_out_file, (unsigned long) discriminator);
27825 }
27826 if (debug_variable_location_views)
27827 {
27828 if (!RESETTING_VIEW_P (table->view))
27829 {
27830 table->symviews_since_reset++;
27831 if (table->symviews_since_reset > symview_upper_bound)
27832 symview_upper_bound = table->symviews_since_reset;
27833 /* When we're using the assembler to compute view
27834 numbers, we output symbolic labels after "view" in
27835 .loc directives, and the assembler will set them for
27836 us, so that we can refer to the view numbers in
27837 location lists. The only exceptions are when we know
27838 a view will be zero: "-0" is a forced reset, used
27839 e.g. in the beginning of functions, whereas "0" tells
27840 the assembler to check that there was a PC change
27841 since the previous view, in a way that implicitly
27842 resets the next view. */
27843 fputs (" view ", asm_out_file);
27844 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27845 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27846 assemble_name (asm_out_file, label);
27847 table->view = ++lvugid;
27848 }
27849 else
27850 {
27851 table->symviews_since_reset = 0;
27852 if (FORCE_RESETTING_VIEW_P (table->view))
27853 fputs (" view -0", asm_out_file);
27854 else
27855 fputs (" view 0", asm_out_file);
27856 /* Mark the present view as a zero view. Earlier debug
27857 binds may have already added its id to loclists to be
27858 emitted later, so we can't reuse the id for something
27859 else. However, it's good to know whether a view is
27860 known to be zero, because then we may be able to
27861 optimize out locviews that are all zeros, so take
27862 note of it in zero_view_p. */
27863 if (!zero_view_p)
27864 zero_view_p = BITMAP_GGC_ALLOC ();
27865 bitmap_set_bit (zero_view_p, lvugid);
27866 table->view = ++lvugid;
27867 }
27868 }
27869 putc ('\n', asm_out_file);
27870 }
27871 else
27872 {
27873 unsigned int label_num = ++line_info_label_num;
27874
27875 targetm.asm_out.internal_label (asm_out_file, LINE_CODE_LABEL, label_num);
27876
27877 if (debug_variable_location_views && !RESETTING_VIEW_P (table->view))
27878 push_dw_line_info_entry (table, LI_adv_address, label_num);
27879 else
27880 push_dw_line_info_entry (table, LI_set_address, label_num);
27881 if (debug_variable_location_views)
27882 {
27883 bool resetting = FORCE_RESETTING_VIEW_P (table->view);
27884 if (resetting)
27885 table->view = 0;
27886
27887 if (flag_debug_asm)
27888 fprintf (asm_out_file, "\t%s view %s%d\n",
27889 ASM_COMMENT_START,
27890 resetting ? "-" : "",
27891 table->view);
27892
27893 table->view++;
27894 }
27895 if (file_num != table->file_num)
27896 push_dw_line_info_entry (table, LI_set_file, file_num);
27897 if (discriminator != table->discrim_num)
27898 push_dw_line_info_entry (table, LI_set_discriminator, discriminator);
27899 if (is_stmt != table->is_stmt)
27900 push_dw_line_info_entry (table, LI_negate_stmt, 0);
27901 push_dw_line_info_entry (table, LI_set_line, line);
27902 if (debug_column_info)
27903 push_dw_line_info_entry (table, LI_set_column, column);
27904 }
27905
27906 table->file_num = file_num;
27907 table->line_num = line;
27908 table->column_num = column;
27909 table->discrim_num = discriminator;
27910 table->is_stmt = is_stmt;
27911 table->in_use = true;
27912 }
27913
27914 /* Record the beginning of a new source file. */
27915
27916 static void
27917 dwarf2out_start_source_file (unsigned int lineno, const char *filename)
27918 {
27919 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27920 {
27921 macinfo_entry e;
27922 e.code = DW_MACINFO_start_file;
27923 e.lineno = lineno;
27924 e.info = ggc_strdup (filename);
27925 vec_safe_push (macinfo_table, e);
27926 }
27927 }
27928
27929 /* Record the end of a source file. */
27930
27931 static void
27932 dwarf2out_end_source_file (unsigned int lineno ATTRIBUTE_UNUSED)
27933 {
27934 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27935 {
27936 macinfo_entry e;
27937 e.code = DW_MACINFO_end_file;
27938 e.lineno = lineno;
27939 e.info = NULL;
27940 vec_safe_push (macinfo_table, e);
27941 }
27942 }
27943
27944 /* Called from debug_define in toplev.c. The `buffer' parameter contains
27945 the tail part of the directive line, i.e. the part which is past the
27946 initial whitespace, #, whitespace, directive-name, whitespace part. */
27947
27948 static void
27949 dwarf2out_define (unsigned int lineno ATTRIBUTE_UNUSED,
27950 const char *buffer ATTRIBUTE_UNUSED)
27951 {
27952 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27953 {
27954 macinfo_entry e;
27955 /* Insert a dummy first entry to be able to optimize the whole
27956 predefined macro block using DW_MACRO_import. */
27957 if (macinfo_table->is_empty () && lineno <= 1)
27958 {
27959 e.code = 0;
27960 e.lineno = 0;
27961 e.info = NULL;
27962 vec_safe_push (macinfo_table, e);
27963 }
27964 e.code = DW_MACINFO_define;
27965 e.lineno = lineno;
27966 e.info = ggc_strdup (buffer);
27967 vec_safe_push (macinfo_table, e);
27968 }
27969 }
27970
27971 /* Called from debug_undef in toplev.c. The `buffer' parameter contains
27972 the tail part of the directive line, i.e. the part which is past the
27973 initial whitespace, #, whitespace, directive-name, whitespace part. */
27974
27975 static void
27976 dwarf2out_undef (unsigned int lineno ATTRIBUTE_UNUSED,
27977 const char *buffer ATTRIBUTE_UNUSED)
27978 {
27979 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27980 {
27981 macinfo_entry e;
27982 /* Insert a dummy first entry to be able to optimize the whole
27983 predefined macro block using DW_MACRO_import. */
27984 if (macinfo_table->is_empty () && lineno <= 1)
27985 {
27986 e.code = 0;
27987 e.lineno = 0;
27988 e.info = NULL;
27989 vec_safe_push (macinfo_table, e);
27990 }
27991 e.code = DW_MACINFO_undef;
27992 e.lineno = lineno;
27993 e.info = ggc_strdup (buffer);
27994 vec_safe_push (macinfo_table, e);
27995 }
27996 }
27997
27998 /* Helpers to manipulate hash table of CUs. */
27999
28000 struct macinfo_entry_hasher : nofree_ptr_hash <macinfo_entry>
28001 {
28002 static inline hashval_t hash (const macinfo_entry *);
28003 static inline bool equal (const macinfo_entry *, const macinfo_entry *);
28004 };
28005
28006 inline hashval_t
28007 macinfo_entry_hasher::hash (const macinfo_entry *entry)
28008 {
28009 return htab_hash_string (entry->info);
28010 }
28011
28012 inline bool
28013 macinfo_entry_hasher::equal (const macinfo_entry *entry1,
28014 const macinfo_entry *entry2)
28015 {
28016 return !strcmp (entry1->info, entry2->info);
28017 }
28018
28019 typedef hash_table<macinfo_entry_hasher> macinfo_hash_type;
28020
28021 /* Output a single .debug_macinfo entry. */
28022
28023 static void
28024 output_macinfo_op (macinfo_entry *ref)
28025 {
28026 int file_num;
28027 size_t len;
28028 struct indirect_string_node *node;
28029 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28030 struct dwarf_file_data *fd;
28031
28032 switch (ref->code)
28033 {
28034 case DW_MACINFO_start_file:
28035 fd = lookup_filename (ref->info);
28036 file_num = maybe_emit_file (fd);
28037 dw2_asm_output_data (1, DW_MACINFO_start_file, "Start new file");
28038 dw2_asm_output_data_uleb128 (ref->lineno,
28039 "Included from line number %lu",
28040 (unsigned long) ref->lineno);
28041 dw2_asm_output_data_uleb128 (file_num, "file %s", ref->info);
28042 break;
28043 case DW_MACINFO_end_file:
28044 dw2_asm_output_data (1, DW_MACINFO_end_file, "End file");
28045 break;
28046 case DW_MACINFO_define:
28047 case DW_MACINFO_undef:
28048 len = strlen (ref->info) + 1;
28049 if (!dwarf_strict
28050 && len > DWARF_OFFSET_SIZE
28051 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28052 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28053 {
28054 ref->code = ref->code == DW_MACINFO_define
28055 ? DW_MACRO_define_strp : DW_MACRO_undef_strp;
28056 output_macinfo_op (ref);
28057 return;
28058 }
28059 dw2_asm_output_data (1, ref->code,
28060 ref->code == DW_MACINFO_define
28061 ? "Define macro" : "Undefine macro");
28062 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28063 (unsigned long) ref->lineno);
28064 dw2_asm_output_nstring (ref->info, -1, "The macro");
28065 break;
28066 case DW_MACRO_define_strp:
28067 case DW_MACRO_undef_strp:
28068 node = find_AT_string (ref->info);
28069 gcc_assert (node
28070 && (node->form == DW_FORM_strp
28071 || node->form == dwarf_FORM (DW_FORM_strx)));
28072 dw2_asm_output_data (1, ref->code,
28073 ref->code == DW_MACRO_define_strp
28074 ? "Define macro strp"
28075 : "Undefine macro strp");
28076 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28077 (unsigned long) ref->lineno);
28078 if (node->form == DW_FORM_strp)
28079 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
28080 debug_str_section, "The macro: \"%s\"",
28081 ref->info);
28082 else
28083 dw2_asm_output_data_uleb128 (node->index, "The macro: \"%s\"",
28084 ref->info);
28085 break;
28086 case DW_MACRO_import:
28087 dw2_asm_output_data (1, ref->code, "Import");
28088 ASM_GENERATE_INTERNAL_LABEL (label,
28089 DEBUG_MACRO_SECTION_LABEL,
28090 ref->lineno + macinfo_label_base);
28091 dw2_asm_output_offset (DWARF_OFFSET_SIZE, label, NULL, NULL);
28092 break;
28093 default:
28094 fprintf (asm_out_file, "%s unrecognized macinfo code %lu\n",
28095 ASM_COMMENT_START, (unsigned long) ref->code);
28096 break;
28097 }
28098 }
28099
28100 /* Attempt to make a sequence of define/undef macinfo ops shareable with
28101 other compilation unit .debug_macinfo sections. IDX is the first
28102 index of a define/undef, return the number of ops that should be
28103 emitted in a comdat .debug_macinfo section and emit
28104 a DW_MACRO_import entry referencing it.
28105 If the define/undef entry should be emitted normally, return 0. */
28106
28107 static unsigned
28108 optimize_macinfo_range (unsigned int idx, vec<macinfo_entry, va_gc> *files,
28109 macinfo_hash_type **macinfo_htab)
28110 {
28111 macinfo_entry *first, *second, *cur, *inc;
28112 char linebuf[sizeof (HOST_WIDE_INT) * 3 + 1];
28113 unsigned char checksum[16];
28114 struct md5_ctx ctx;
28115 char *grp_name, *tail;
28116 const char *base;
28117 unsigned int i, count, encoded_filename_len, linebuf_len;
28118 macinfo_entry **slot;
28119
28120 first = &(*macinfo_table)[idx];
28121 second = &(*macinfo_table)[idx + 1];
28122
28123 /* Optimize only if there are at least two consecutive define/undef ops,
28124 and either all of them are before first DW_MACINFO_start_file
28125 with lineno {0,1} (i.e. predefined macro block), or all of them are
28126 in some included header file. */
28127 if (second->code != DW_MACINFO_define && second->code != DW_MACINFO_undef)
28128 return 0;
28129 if (vec_safe_is_empty (files))
28130 {
28131 if (first->lineno > 1 || second->lineno > 1)
28132 return 0;
28133 }
28134 else if (first->lineno == 0)
28135 return 0;
28136
28137 /* Find the last define/undef entry that can be grouped together
28138 with first and at the same time compute md5 checksum of their
28139 codes, linenumbers and strings. */
28140 md5_init_ctx (&ctx);
28141 for (i = idx; macinfo_table->iterate (i, &cur); i++)
28142 if (cur->code != DW_MACINFO_define && cur->code != DW_MACINFO_undef)
28143 break;
28144 else if (vec_safe_is_empty (files) && cur->lineno > 1)
28145 break;
28146 else
28147 {
28148 unsigned char code = cur->code;
28149 md5_process_bytes (&code, 1, &ctx);
28150 checksum_uleb128 (cur->lineno, &ctx);
28151 md5_process_bytes (cur->info, strlen (cur->info) + 1, &ctx);
28152 }
28153 md5_finish_ctx (&ctx, checksum);
28154 count = i - idx;
28155
28156 /* From the containing include filename (if any) pick up just
28157 usable characters from its basename. */
28158 if (vec_safe_is_empty (files))
28159 base = "";
28160 else
28161 base = lbasename (files->last ().info);
28162 for (encoded_filename_len = 0, i = 0; base[i]; i++)
28163 if (ISIDNUM (base[i]) || base[i] == '.')
28164 encoded_filename_len++;
28165 /* Count . at the end. */
28166 if (encoded_filename_len)
28167 encoded_filename_len++;
28168
28169 sprintf (linebuf, HOST_WIDE_INT_PRINT_UNSIGNED, first->lineno);
28170 linebuf_len = strlen (linebuf);
28171
28172 /* The group name format is: wmN.[<encoded filename>.]<lineno>.<md5sum> */
28173 grp_name = XALLOCAVEC (char, 4 + encoded_filename_len + linebuf_len + 1
28174 + 16 * 2 + 1);
28175 memcpy (grp_name, DWARF_OFFSET_SIZE == 4 ? "wm4." : "wm8.", 4);
28176 tail = grp_name + 4;
28177 if (encoded_filename_len)
28178 {
28179 for (i = 0; base[i]; i++)
28180 if (ISIDNUM (base[i]) || base[i] == '.')
28181 *tail++ = base[i];
28182 *tail++ = '.';
28183 }
28184 memcpy (tail, linebuf, linebuf_len);
28185 tail += linebuf_len;
28186 *tail++ = '.';
28187 for (i = 0; i < 16; i++)
28188 sprintf (tail + i * 2, "%02x", checksum[i] & 0xff);
28189
28190 /* Construct a macinfo_entry for DW_MACRO_import
28191 in the empty vector entry before the first define/undef. */
28192 inc = &(*macinfo_table)[idx - 1];
28193 inc->code = DW_MACRO_import;
28194 inc->lineno = 0;
28195 inc->info = ggc_strdup (grp_name);
28196 if (!*macinfo_htab)
28197 *macinfo_htab = new macinfo_hash_type (10);
28198 /* Avoid emitting duplicates. */
28199 slot = (*macinfo_htab)->find_slot (inc, INSERT);
28200 if (*slot != NULL)
28201 {
28202 inc->code = 0;
28203 inc->info = NULL;
28204 /* If such an entry has been used before, just emit
28205 a DW_MACRO_import op. */
28206 inc = *slot;
28207 output_macinfo_op (inc);
28208 /* And clear all macinfo_entry in the range to avoid emitting them
28209 in the second pass. */
28210 for (i = idx; macinfo_table->iterate (i, &cur) && i < idx + count; i++)
28211 {
28212 cur->code = 0;
28213 cur->info = NULL;
28214 }
28215 }
28216 else
28217 {
28218 *slot = inc;
28219 inc->lineno = (*macinfo_htab)->elements ();
28220 output_macinfo_op (inc);
28221 }
28222 return count;
28223 }
28224
28225 /* Save any strings needed by the macinfo table in the debug str
28226 table. All strings must be collected into the table by the time
28227 index_string is called. */
28228
28229 static void
28230 save_macinfo_strings (void)
28231 {
28232 unsigned len;
28233 unsigned i;
28234 macinfo_entry *ref;
28235
28236 for (i = 0; macinfo_table && macinfo_table->iterate (i, &ref); i++)
28237 {
28238 switch (ref->code)
28239 {
28240 /* Match the logic in output_macinfo_op to decide on
28241 indirect strings. */
28242 case DW_MACINFO_define:
28243 case DW_MACINFO_undef:
28244 len = strlen (ref->info) + 1;
28245 if (!dwarf_strict
28246 && len > DWARF_OFFSET_SIZE
28247 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28248 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28249 set_indirect_string (find_AT_string (ref->info));
28250 break;
28251 case DW_MACINFO_start_file:
28252 /* -gsplit-dwarf -g3 will also output filename as indirect
28253 string. */
28254 if (!dwarf_split_debug_info)
28255 break;
28256 /* Fall through. */
28257 case DW_MACRO_define_strp:
28258 case DW_MACRO_undef_strp:
28259 set_indirect_string (find_AT_string (ref->info));
28260 break;
28261 default:
28262 break;
28263 }
28264 }
28265 }
28266
28267 /* Output macinfo section(s). */
28268
28269 static void
28270 output_macinfo (const char *debug_line_label, bool early_lto_debug)
28271 {
28272 unsigned i;
28273 unsigned long length = vec_safe_length (macinfo_table);
28274 macinfo_entry *ref;
28275 vec<macinfo_entry, va_gc> *files = NULL;
28276 macinfo_hash_type *macinfo_htab = NULL;
28277 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
28278
28279 if (! length)
28280 return;
28281
28282 /* output_macinfo* uses these interchangeably. */
28283 gcc_assert ((int) DW_MACINFO_define == (int) DW_MACRO_define
28284 && (int) DW_MACINFO_undef == (int) DW_MACRO_undef
28285 && (int) DW_MACINFO_start_file == (int) DW_MACRO_start_file
28286 && (int) DW_MACINFO_end_file == (int) DW_MACRO_end_file);
28287
28288 /* AIX Assembler inserts the length, so adjust the reference to match the
28289 offset expected by debuggers. */
28290 strcpy (dl_section_ref, debug_line_label);
28291 if (XCOFF_DEBUGGING_INFO)
28292 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
28293
28294 /* For .debug_macro emit the section header. */
28295 if (!dwarf_strict || dwarf_version >= 5)
28296 {
28297 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28298 "DWARF macro version number");
28299 if (DWARF_OFFSET_SIZE == 8)
28300 dw2_asm_output_data (1, 3, "Flags: 64-bit, lineptr present");
28301 else
28302 dw2_asm_output_data (1, 2, "Flags: 32-bit, lineptr present");
28303 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_line_label,
28304 debug_line_section, NULL);
28305 }
28306
28307 /* In the first loop, it emits the primary .debug_macinfo section
28308 and after each emitted op the macinfo_entry is cleared.
28309 If a longer range of define/undef ops can be optimized using
28310 DW_MACRO_import, the DW_MACRO_import op is emitted and kept in
28311 the vector before the first define/undef in the range and the
28312 whole range of define/undef ops is not emitted and kept. */
28313 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28314 {
28315 switch (ref->code)
28316 {
28317 case DW_MACINFO_start_file:
28318 vec_safe_push (files, *ref);
28319 break;
28320 case DW_MACINFO_end_file:
28321 if (!vec_safe_is_empty (files))
28322 files->pop ();
28323 break;
28324 case DW_MACINFO_define:
28325 case DW_MACINFO_undef:
28326 if ((!dwarf_strict || dwarf_version >= 5)
28327 && HAVE_COMDAT_GROUP
28328 && vec_safe_length (files) != 1
28329 && i > 0
28330 && i + 1 < length
28331 && (*macinfo_table)[i - 1].code == 0)
28332 {
28333 unsigned count = optimize_macinfo_range (i, files, &macinfo_htab);
28334 if (count)
28335 {
28336 i += count - 1;
28337 continue;
28338 }
28339 }
28340 break;
28341 case 0:
28342 /* A dummy entry may be inserted at the beginning to be able
28343 to optimize the whole block of predefined macros. */
28344 if (i == 0)
28345 continue;
28346 default:
28347 break;
28348 }
28349 output_macinfo_op (ref);
28350 ref->info = NULL;
28351 ref->code = 0;
28352 }
28353
28354 if (!macinfo_htab)
28355 return;
28356
28357 /* Save the number of transparent includes so we can adjust the
28358 label number for the fat LTO object DWARF. */
28359 unsigned macinfo_label_base_adj = macinfo_htab->elements ();
28360
28361 delete macinfo_htab;
28362 macinfo_htab = NULL;
28363
28364 /* If any DW_MACRO_import were used, on those DW_MACRO_import entries
28365 terminate the current chain and switch to a new comdat .debug_macinfo
28366 section and emit the define/undef entries within it. */
28367 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28368 switch (ref->code)
28369 {
28370 case 0:
28371 continue;
28372 case DW_MACRO_import:
28373 {
28374 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28375 tree comdat_key = get_identifier (ref->info);
28376 /* Terminate the previous .debug_macinfo section. */
28377 dw2_asm_output_data (1, 0, "End compilation unit");
28378 targetm.asm_out.named_section (debug_macinfo_section_name,
28379 SECTION_DEBUG
28380 | SECTION_LINKONCE
28381 | (early_lto_debug
28382 ? SECTION_EXCLUDE : 0),
28383 comdat_key);
28384 ASM_GENERATE_INTERNAL_LABEL (label,
28385 DEBUG_MACRO_SECTION_LABEL,
28386 ref->lineno + macinfo_label_base);
28387 ASM_OUTPUT_LABEL (asm_out_file, label);
28388 ref->code = 0;
28389 ref->info = NULL;
28390 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28391 "DWARF macro version number");
28392 if (DWARF_OFFSET_SIZE == 8)
28393 dw2_asm_output_data (1, 1, "Flags: 64-bit");
28394 else
28395 dw2_asm_output_data (1, 0, "Flags: 32-bit");
28396 }
28397 break;
28398 case DW_MACINFO_define:
28399 case DW_MACINFO_undef:
28400 output_macinfo_op (ref);
28401 ref->code = 0;
28402 ref->info = NULL;
28403 break;
28404 default:
28405 gcc_unreachable ();
28406 }
28407
28408 macinfo_label_base += macinfo_label_base_adj;
28409 }
28410
28411 /* Initialize the various sections and labels for dwarf output and prefix
28412 them with PREFIX if non-NULL. Returns the generation (zero based
28413 number of times function was called). */
28414
28415 static unsigned
28416 init_sections_and_labels (bool early_lto_debug)
28417 {
28418 /* As we may get called multiple times have a generation count for
28419 labels. */
28420 static unsigned generation = 0;
28421
28422 if (early_lto_debug)
28423 {
28424 if (!dwarf_split_debug_info)
28425 {
28426 debug_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28427 SECTION_DEBUG | SECTION_EXCLUDE,
28428 NULL);
28429 debug_abbrev_section = get_section (DEBUG_LTO_ABBREV_SECTION,
28430 SECTION_DEBUG | SECTION_EXCLUDE,
28431 NULL);
28432 debug_macinfo_section_name
28433 = ((dwarf_strict && dwarf_version < 5)
28434 ? DEBUG_LTO_MACINFO_SECTION : DEBUG_LTO_MACRO_SECTION);
28435 debug_macinfo_section = get_section (debug_macinfo_section_name,
28436 SECTION_DEBUG
28437 | SECTION_EXCLUDE, NULL);
28438 }
28439 else
28440 {
28441 /* ??? Which of the following do we need early? */
28442 debug_info_section = get_section (DEBUG_LTO_DWO_INFO_SECTION,
28443 SECTION_DEBUG | SECTION_EXCLUDE,
28444 NULL);
28445 debug_abbrev_section = get_section (DEBUG_LTO_DWO_ABBREV_SECTION,
28446 SECTION_DEBUG | SECTION_EXCLUDE,
28447 NULL);
28448 debug_skeleton_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28449 SECTION_DEBUG
28450 | SECTION_EXCLUDE, NULL);
28451 debug_skeleton_abbrev_section
28452 = get_section (DEBUG_LTO_ABBREV_SECTION,
28453 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28454 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28455 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28456 generation);
28457
28458 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28459 stay in the main .o, but the skeleton_line goes into the split
28460 off dwo. */
28461 debug_skeleton_line_section
28462 = get_section (DEBUG_LTO_LINE_SECTION,
28463 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28464 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28465 DEBUG_SKELETON_LINE_SECTION_LABEL,
28466 generation);
28467 debug_str_offsets_section
28468 = get_section (DEBUG_LTO_DWO_STR_OFFSETS_SECTION,
28469 SECTION_DEBUG | SECTION_EXCLUDE,
28470 NULL);
28471 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28472 DEBUG_SKELETON_INFO_SECTION_LABEL,
28473 generation);
28474 debug_str_dwo_section = get_section (DEBUG_LTO_STR_DWO_SECTION,
28475 DEBUG_STR_DWO_SECTION_FLAGS,
28476 NULL);
28477 debug_macinfo_section_name
28478 = ((dwarf_strict && dwarf_version < 5)
28479 ? DEBUG_LTO_DWO_MACINFO_SECTION : DEBUG_LTO_DWO_MACRO_SECTION);
28480 debug_macinfo_section = get_section (debug_macinfo_section_name,
28481 SECTION_DEBUG | SECTION_EXCLUDE,
28482 NULL);
28483 }
28484 /* For macro info and the file table we have to refer to a
28485 debug_line section. */
28486 debug_line_section = get_section (DEBUG_LTO_LINE_SECTION,
28487 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28488 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28489 DEBUG_LINE_SECTION_LABEL, generation);
28490
28491 debug_str_section = get_section (DEBUG_LTO_STR_SECTION,
28492 DEBUG_STR_SECTION_FLAGS
28493 | SECTION_EXCLUDE, NULL);
28494 if (!dwarf_split_debug_info)
28495 debug_line_str_section
28496 = get_section (DEBUG_LTO_LINE_STR_SECTION,
28497 DEBUG_STR_SECTION_FLAGS | SECTION_EXCLUDE, NULL);
28498 }
28499 else
28500 {
28501 if (!dwarf_split_debug_info)
28502 {
28503 debug_info_section = get_section (DEBUG_INFO_SECTION,
28504 SECTION_DEBUG, NULL);
28505 debug_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28506 SECTION_DEBUG, NULL);
28507 debug_loc_section = get_section (dwarf_version >= 5
28508 ? DEBUG_LOCLISTS_SECTION
28509 : DEBUG_LOC_SECTION,
28510 SECTION_DEBUG, NULL);
28511 debug_macinfo_section_name
28512 = ((dwarf_strict && dwarf_version < 5)
28513 ? DEBUG_MACINFO_SECTION : DEBUG_MACRO_SECTION);
28514 debug_macinfo_section = get_section (debug_macinfo_section_name,
28515 SECTION_DEBUG, NULL);
28516 }
28517 else
28518 {
28519 debug_info_section = get_section (DEBUG_DWO_INFO_SECTION,
28520 SECTION_DEBUG | SECTION_EXCLUDE,
28521 NULL);
28522 debug_abbrev_section = get_section (DEBUG_DWO_ABBREV_SECTION,
28523 SECTION_DEBUG | SECTION_EXCLUDE,
28524 NULL);
28525 debug_addr_section = get_section (DEBUG_ADDR_SECTION,
28526 SECTION_DEBUG, NULL);
28527 debug_skeleton_info_section = get_section (DEBUG_INFO_SECTION,
28528 SECTION_DEBUG, NULL);
28529 debug_skeleton_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28530 SECTION_DEBUG, NULL);
28531 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28532 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28533 generation);
28534
28535 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28536 stay in the main .o, but the skeleton_line goes into the
28537 split off dwo. */
28538 debug_skeleton_line_section
28539 = get_section (DEBUG_DWO_LINE_SECTION,
28540 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28541 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28542 DEBUG_SKELETON_LINE_SECTION_LABEL,
28543 generation);
28544 debug_str_offsets_section
28545 = get_section (DEBUG_DWO_STR_OFFSETS_SECTION,
28546 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28547 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28548 DEBUG_SKELETON_INFO_SECTION_LABEL,
28549 generation);
28550 debug_loc_section = get_section (dwarf_version >= 5
28551 ? DEBUG_DWO_LOCLISTS_SECTION
28552 : DEBUG_DWO_LOC_SECTION,
28553 SECTION_DEBUG | SECTION_EXCLUDE,
28554 NULL);
28555 debug_str_dwo_section = get_section (DEBUG_STR_DWO_SECTION,
28556 DEBUG_STR_DWO_SECTION_FLAGS,
28557 NULL);
28558 debug_macinfo_section_name
28559 = ((dwarf_strict && dwarf_version < 5)
28560 ? DEBUG_DWO_MACINFO_SECTION : DEBUG_DWO_MACRO_SECTION);
28561 debug_macinfo_section = get_section (debug_macinfo_section_name,
28562 SECTION_DEBUG | SECTION_EXCLUDE,
28563 NULL);
28564 }
28565 debug_aranges_section = get_section (DEBUG_ARANGES_SECTION,
28566 SECTION_DEBUG, NULL);
28567 debug_line_section = get_section (DEBUG_LINE_SECTION,
28568 SECTION_DEBUG, NULL);
28569 debug_pubnames_section = get_section (DEBUG_PUBNAMES_SECTION,
28570 SECTION_DEBUG, NULL);
28571 debug_pubtypes_section = get_section (DEBUG_PUBTYPES_SECTION,
28572 SECTION_DEBUG, NULL);
28573 debug_str_section = get_section (DEBUG_STR_SECTION,
28574 DEBUG_STR_SECTION_FLAGS, NULL);
28575 if (!dwarf_split_debug_info && !output_asm_line_debug_info ())
28576 debug_line_str_section = get_section (DEBUG_LINE_STR_SECTION,
28577 DEBUG_STR_SECTION_FLAGS, NULL);
28578
28579 debug_ranges_section = get_section (dwarf_version >= 5
28580 ? DEBUG_RNGLISTS_SECTION
28581 : DEBUG_RANGES_SECTION,
28582 SECTION_DEBUG, NULL);
28583 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
28584 SECTION_DEBUG, NULL);
28585 }
28586
28587 ASM_GENERATE_INTERNAL_LABEL (abbrev_section_label,
28588 DEBUG_ABBREV_SECTION_LABEL, generation);
28589 ASM_GENERATE_INTERNAL_LABEL (debug_info_section_label,
28590 DEBUG_INFO_SECTION_LABEL, generation);
28591 info_section_emitted = false;
28592 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28593 DEBUG_LINE_SECTION_LABEL, generation);
28594 /* There are up to 4 unique ranges labels per generation.
28595 See also output_rnglists. */
28596 ASM_GENERATE_INTERNAL_LABEL (ranges_section_label,
28597 DEBUG_RANGES_SECTION_LABEL, generation * 4);
28598 if (dwarf_version >= 5 && dwarf_split_debug_info)
28599 ASM_GENERATE_INTERNAL_LABEL (ranges_base_label,
28600 DEBUG_RANGES_SECTION_LABEL,
28601 1 + generation * 4);
28602 ASM_GENERATE_INTERNAL_LABEL (debug_addr_section_label,
28603 DEBUG_ADDR_SECTION_LABEL, generation);
28604 ASM_GENERATE_INTERNAL_LABEL (macinfo_section_label,
28605 (dwarf_strict && dwarf_version < 5)
28606 ? DEBUG_MACINFO_SECTION_LABEL
28607 : DEBUG_MACRO_SECTION_LABEL, generation);
28608 ASM_GENERATE_INTERNAL_LABEL (loc_section_label, DEBUG_LOC_SECTION_LABEL,
28609 generation);
28610
28611 ++generation;
28612 return generation - 1;
28613 }
28614
28615 /* Set up for Dwarf output at the start of compilation. */
28616
28617 static void
28618 dwarf2out_init (const char *filename ATTRIBUTE_UNUSED)
28619 {
28620 /* Allocate the file_table. */
28621 file_table = hash_table<dwarf_file_hasher>::create_ggc (50);
28622
28623 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28624 /* Allocate the decl_die_table. */
28625 decl_die_table = hash_table<decl_die_hasher>::create_ggc (10);
28626
28627 /* Allocate the decl_loc_table. */
28628 decl_loc_table = hash_table<decl_loc_hasher>::create_ggc (10);
28629
28630 /* Allocate the cached_dw_loc_list_table. */
28631 cached_dw_loc_list_table = hash_table<dw_loc_list_hasher>::create_ggc (10);
28632
28633 /* Allocate the initial hunk of the abbrev_die_table. */
28634 vec_alloc (abbrev_die_table, 256);
28635 /* Zero-th entry is allocated, but unused. */
28636 abbrev_die_table->quick_push (NULL);
28637
28638 /* Allocate the dwarf_proc_stack_usage_map. */
28639 dwarf_proc_stack_usage_map = new hash_map<dw_die_ref, int>;
28640
28641 /* Allocate the pubtypes and pubnames vectors. */
28642 vec_alloc (pubname_table, 32);
28643 vec_alloc (pubtype_table, 32);
28644
28645 vec_alloc (incomplete_types, 64);
28646
28647 vec_alloc (used_rtx_array, 32);
28648
28649 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28650 vec_alloc (macinfo_table, 64);
28651 #endif
28652
28653 /* If front-ends already registered a main translation unit but we were not
28654 ready to perform the association, do this now. */
28655 if (main_translation_unit != NULL_TREE)
28656 equate_decl_number_to_die (main_translation_unit, comp_unit_die ());
28657 }
28658
28659 /* Called before compile () starts outputtting functions, variables
28660 and toplevel asms into assembly. */
28661
28662 static void
28663 dwarf2out_assembly_start (void)
28664 {
28665 if (text_section_line_info)
28666 return;
28667
28668 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28669 ASM_GENERATE_INTERNAL_LABEL (text_section_label, TEXT_SECTION_LABEL, 0);
28670 ASM_GENERATE_INTERNAL_LABEL (text_end_label, TEXT_END_LABEL, 0);
28671 ASM_GENERATE_INTERNAL_LABEL (cold_text_section_label,
28672 COLD_TEXT_SECTION_LABEL, 0);
28673 ASM_GENERATE_INTERNAL_LABEL (cold_end_label, COLD_END_LABEL, 0);
28674
28675 switch_to_section (text_section);
28676 ASM_OUTPUT_LABEL (asm_out_file, text_section_label);
28677 #endif
28678
28679 /* Make sure the line number table for .text always exists. */
28680 text_section_line_info = new_line_info_table ();
28681 text_section_line_info->end_label = text_end_label;
28682
28683 #ifdef DWARF2_LINENO_DEBUGGING_INFO
28684 cur_line_info_table = text_section_line_info;
28685 #endif
28686
28687 if (HAVE_GAS_CFI_SECTIONS_DIRECTIVE
28688 && dwarf2out_do_cfi_asm ()
28689 && !dwarf2out_do_eh_frame ())
28690 fprintf (asm_out_file, "\t.cfi_sections\t.debug_frame\n");
28691 }
28692
28693 /* A helper function for dwarf2out_finish called through
28694 htab_traverse. Assign a string its index. All strings must be
28695 collected into the table by the time index_string is called,
28696 because the indexing code relies on htab_traverse to traverse nodes
28697 in the same order for each run. */
28698
28699 int
28700 index_string (indirect_string_node **h, unsigned int *index)
28701 {
28702 indirect_string_node *node = *h;
28703
28704 find_string_form (node);
28705 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28706 {
28707 gcc_assert (node->index == NO_INDEX_ASSIGNED);
28708 node->index = *index;
28709 *index += 1;
28710 }
28711 return 1;
28712 }
28713
28714 /* A helper function for output_indirect_strings called through
28715 htab_traverse. Output the offset to a string and update the
28716 current offset. */
28717
28718 int
28719 output_index_string_offset (indirect_string_node **h, unsigned int *offset)
28720 {
28721 indirect_string_node *node = *h;
28722
28723 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28724 {
28725 /* Assert that this node has been assigned an index. */
28726 gcc_assert (node->index != NO_INDEX_ASSIGNED
28727 && node->index != NOT_INDEXED);
28728 dw2_asm_output_data (DWARF_OFFSET_SIZE, *offset,
28729 "indexed string 0x%x: %s", node->index, node->str);
28730 *offset += strlen (node->str) + 1;
28731 }
28732 return 1;
28733 }
28734
28735 /* A helper function for dwarf2out_finish called through
28736 htab_traverse. Output the indexed string. */
28737
28738 int
28739 output_index_string (indirect_string_node **h, unsigned int *cur_idx)
28740 {
28741 struct indirect_string_node *node = *h;
28742
28743 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28744 {
28745 /* Assert that the strings are output in the same order as their
28746 indexes were assigned. */
28747 gcc_assert (*cur_idx == node->index);
28748 assemble_string (node->str, strlen (node->str) + 1);
28749 *cur_idx += 1;
28750 }
28751 return 1;
28752 }
28753
28754 /* A helper function for output_indirect_strings. Counts the number
28755 of index strings offsets. Must match the logic of the functions
28756 output_index_string[_offsets] above. */
28757 int
28758 count_index_strings (indirect_string_node **h, unsigned int *last_idx)
28759 {
28760 struct indirect_string_node *node = *h;
28761
28762 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28763 *last_idx += 1;
28764 return 1;
28765 }
28766
28767 /* A helper function for dwarf2out_finish called through
28768 htab_traverse. Emit one queued .debug_str string. */
28769
28770 int
28771 output_indirect_string (indirect_string_node **h, enum dwarf_form form)
28772 {
28773 struct indirect_string_node *node = *h;
28774
28775 node->form = find_string_form (node);
28776 if (node->form == form && node->refcount > 0)
28777 {
28778 ASM_OUTPUT_LABEL (asm_out_file, node->label);
28779 assemble_string (node->str, strlen (node->str) + 1);
28780 }
28781
28782 return 1;
28783 }
28784
28785 /* Output the indexed string table. */
28786
28787 static void
28788 output_indirect_strings (void)
28789 {
28790 switch_to_section (debug_str_section);
28791 if (!dwarf_split_debug_info)
28792 debug_str_hash->traverse<enum dwarf_form,
28793 output_indirect_string> (DW_FORM_strp);
28794 else
28795 {
28796 unsigned int offset = 0;
28797 unsigned int cur_idx = 0;
28798
28799 if (skeleton_debug_str_hash)
28800 skeleton_debug_str_hash->traverse<enum dwarf_form,
28801 output_indirect_string> (DW_FORM_strp);
28802
28803 switch_to_section (debug_str_offsets_section);
28804 /* For DWARF5 the .debug_str_offsets[.dwo] section needs a unit
28805 header. Note that we don't need to generate a label to the
28806 actual index table following the header here, because this is
28807 for the split dwarf case only. In an .dwo file there is only
28808 one string offsets table (and one debug info section). But
28809 if we would start using string offset tables for the main (or
28810 skeleton) unit, then we have to add a DW_AT_str_offsets_base
28811 pointing to the actual index after the header. Split dwarf
28812 units will never have a string offsets base attribute. When
28813 a split unit is moved into a .dwp file the string offsets can
28814 be found through the .debug_cu_index section table. */
28815 if (dwarf_version >= 5)
28816 {
28817 unsigned int last_idx = 0;
28818 unsigned long str_offsets_length;
28819
28820 debug_str_hash->traverse_noresize
28821 <unsigned int *, count_index_strings> (&last_idx);
28822 str_offsets_length = last_idx * DWARF_OFFSET_SIZE + 4;
28823 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
28824 dw2_asm_output_data (4, 0xffffffff,
28825 "Escape value for 64-bit DWARF extension");
28826 dw2_asm_output_data (DWARF_OFFSET_SIZE, str_offsets_length,
28827 "Length of string offsets unit");
28828 dw2_asm_output_data (2, 5, "DWARF string offsets version");
28829 dw2_asm_output_data (2, 0, "Header zero padding");
28830 }
28831 debug_str_hash->traverse_noresize
28832 <unsigned int *, output_index_string_offset> (&offset);
28833 switch_to_section (debug_str_dwo_section);
28834 debug_str_hash->traverse_noresize<unsigned int *, output_index_string>
28835 (&cur_idx);
28836 }
28837 }
28838
28839 /* Callback for htab_traverse to assign an index to an entry in the
28840 table, and to write that entry to the .debug_addr section. */
28841
28842 int
28843 output_addr_table_entry (addr_table_entry **slot, unsigned int *cur_index)
28844 {
28845 addr_table_entry *entry = *slot;
28846
28847 if (entry->refcount == 0)
28848 {
28849 gcc_assert (entry->index == NO_INDEX_ASSIGNED
28850 || entry->index == NOT_INDEXED);
28851 return 1;
28852 }
28853
28854 gcc_assert (entry->index == *cur_index);
28855 (*cur_index)++;
28856
28857 switch (entry->kind)
28858 {
28859 case ate_kind_rtx:
28860 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, entry->addr.rtl,
28861 "0x%x", entry->index);
28862 break;
28863 case ate_kind_rtx_dtprel:
28864 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
28865 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
28866 DWARF2_ADDR_SIZE,
28867 entry->addr.rtl);
28868 fputc ('\n', asm_out_file);
28869 break;
28870 case ate_kind_label:
28871 dw2_asm_output_addr (DWARF2_ADDR_SIZE, entry->addr.label,
28872 "0x%x", entry->index);
28873 break;
28874 default:
28875 gcc_unreachable ();
28876 }
28877 return 1;
28878 }
28879
28880 /* A helper function for dwarf2out_finish. Counts the number
28881 of indexed addresses. Must match the logic of the functions
28882 output_addr_table_entry above. */
28883 int
28884 count_index_addrs (addr_table_entry **slot, unsigned int *last_idx)
28885 {
28886 addr_table_entry *entry = *slot;
28887
28888 if (entry->refcount > 0)
28889 *last_idx += 1;
28890 return 1;
28891 }
28892
28893 /* Produce the .debug_addr section. */
28894
28895 static void
28896 output_addr_table (void)
28897 {
28898 unsigned int index = 0;
28899 if (addr_index_table == NULL || addr_index_table->size () == 0)
28900 return;
28901
28902 switch_to_section (debug_addr_section);
28903 addr_index_table
28904 ->traverse_noresize<unsigned int *, output_addr_table_entry> (&index);
28905 }
28906
28907 #if ENABLE_ASSERT_CHECKING
28908 /* Verify that all marks are clear. */
28909
28910 static void
28911 verify_marks_clear (dw_die_ref die)
28912 {
28913 dw_die_ref c;
28914
28915 gcc_assert (! die->die_mark);
28916 FOR_EACH_CHILD (die, c, verify_marks_clear (c));
28917 }
28918 #endif /* ENABLE_ASSERT_CHECKING */
28919
28920 /* Clear the marks for a die and its children.
28921 Be cool if the mark isn't set. */
28922
28923 static void
28924 prune_unmark_dies (dw_die_ref die)
28925 {
28926 dw_die_ref c;
28927
28928 if (die->die_mark)
28929 die->die_mark = 0;
28930 FOR_EACH_CHILD (die, c, prune_unmark_dies (c));
28931 }
28932
28933 /* Given LOC that is referenced by a DIE we're marking as used, find all
28934 referenced DWARF procedures it references and mark them as used. */
28935
28936 static void
28937 prune_unused_types_walk_loc_descr (dw_loc_descr_ref loc)
28938 {
28939 for (; loc != NULL; loc = loc->dw_loc_next)
28940 switch (loc->dw_loc_opc)
28941 {
28942 case DW_OP_implicit_pointer:
28943 case DW_OP_convert:
28944 case DW_OP_reinterpret:
28945 case DW_OP_GNU_implicit_pointer:
28946 case DW_OP_GNU_convert:
28947 case DW_OP_GNU_reinterpret:
28948 if (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref)
28949 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
28950 break;
28951 case DW_OP_GNU_variable_value:
28952 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
28953 {
28954 dw_die_ref ref
28955 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
28956 if (ref == NULL)
28957 break;
28958 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
28959 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
28960 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
28961 }
28962 /* FALLTHRU */
28963 case DW_OP_call2:
28964 case DW_OP_call4:
28965 case DW_OP_call_ref:
28966 case DW_OP_const_type:
28967 case DW_OP_GNU_const_type:
28968 case DW_OP_GNU_parameter_ref:
28969 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref);
28970 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
28971 break;
28972 case DW_OP_regval_type:
28973 case DW_OP_deref_type:
28974 case DW_OP_GNU_regval_type:
28975 case DW_OP_GNU_deref_type:
28976 gcc_assert (loc->dw_loc_oprnd2.val_class == dw_val_class_die_ref);
28977 prune_unused_types_mark (loc->dw_loc_oprnd2.v.val_die_ref.die, 1);
28978 break;
28979 case DW_OP_entry_value:
28980 case DW_OP_GNU_entry_value:
28981 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_loc);
28982 prune_unused_types_walk_loc_descr (loc->dw_loc_oprnd1.v.val_loc);
28983 break;
28984 default:
28985 break;
28986 }
28987 }
28988
28989 /* Given DIE that we're marking as used, find any other dies
28990 it references as attributes and mark them as used. */
28991
28992 static void
28993 prune_unused_types_walk_attribs (dw_die_ref die)
28994 {
28995 dw_attr_node *a;
28996 unsigned ix;
28997
28998 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
28999 {
29000 switch (AT_class (a))
29001 {
29002 /* Make sure DWARF procedures referenced by location descriptions will
29003 get emitted. */
29004 case dw_val_class_loc:
29005 prune_unused_types_walk_loc_descr (AT_loc (a));
29006 break;
29007 case dw_val_class_loc_list:
29008 for (dw_loc_list_ref list = AT_loc_list (a);
29009 list != NULL;
29010 list = list->dw_loc_next)
29011 prune_unused_types_walk_loc_descr (list->expr);
29012 break;
29013
29014 case dw_val_class_view_list:
29015 /* This points to a loc_list in another attribute, so it's
29016 already covered. */
29017 break;
29018
29019 case dw_val_class_die_ref:
29020 /* A reference to another DIE.
29021 Make sure that it will get emitted.
29022 If it was broken out into a comdat group, don't follow it. */
29023 if (! AT_ref (a)->comdat_type_p
29024 || a->dw_attr == DW_AT_specification)
29025 prune_unused_types_mark (a->dw_attr_val.v.val_die_ref.die, 1);
29026 break;
29027
29028 case dw_val_class_str:
29029 /* Set the string's refcount to 0 so that prune_unused_types_mark
29030 accounts properly for it. */
29031 a->dw_attr_val.v.val_str->refcount = 0;
29032 break;
29033
29034 default:
29035 break;
29036 }
29037 }
29038 }
29039
29040 /* Mark the generic parameters and arguments children DIEs of DIE. */
29041
29042 static void
29043 prune_unused_types_mark_generic_parms_dies (dw_die_ref die)
29044 {
29045 dw_die_ref c;
29046
29047 if (die == NULL || die->die_child == NULL)
29048 return;
29049 c = die->die_child;
29050 do
29051 {
29052 if (is_template_parameter (c))
29053 prune_unused_types_mark (c, 1);
29054 c = c->die_sib;
29055 } while (c && c != die->die_child);
29056 }
29057
29058 /* Mark DIE as being used. If DOKIDS is true, then walk down
29059 to DIE's children. */
29060
29061 static void
29062 prune_unused_types_mark (dw_die_ref die, int dokids)
29063 {
29064 dw_die_ref c;
29065
29066 if (die->die_mark == 0)
29067 {
29068 /* We haven't done this node yet. Mark it as used. */
29069 die->die_mark = 1;
29070 /* If this is the DIE of a generic type instantiation,
29071 mark the children DIEs that describe its generic parms and
29072 args. */
29073 prune_unused_types_mark_generic_parms_dies (die);
29074
29075 /* We also have to mark its parents as used.
29076 (But we don't want to mark our parent's kids due to this,
29077 unless it is a class.) */
29078 if (die->die_parent)
29079 prune_unused_types_mark (die->die_parent,
29080 class_scope_p (die->die_parent));
29081
29082 /* Mark any referenced nodes. */
29083 prune_unused_types_walk_attribs (die);
29084
29085 /* If this node is a specification,
29086 also mark the definition, if it exists. */
29087 if (get_AT_flag (die, DW_AT_declaration) && die->die_definition)
29088 prune_unused_types_mark (die->die_definition, 1);
29089 }
29090
29091 if (dokids && die->die_mark != 2)
29092 {
29093 /* We need to walk the children, but haven't done so yet.
29094 Remember that we've walked the kids. */
29095 die->die_mark = 2;
29096
29097 /* If this is an array type, we need to make sure our
29098 kids get marked, even if they're types. If we're
29099 breaking out types into comdat sections, do this
29100 for all type definitions. */
29101 if (die->die_tag == DW_TAG_array_type
29102 || (use_debug_types
29103 && is_type_die (die) && ! is_declaration_die (die)))
29104 FOR_EACH_CHILD (die, c, prune_unused_types_mark (c, 1));
29105 else
29106 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29107 }
29108 }
29109
29110 /* For local classes, look if any static member functions were emitted
29111 and if so, mark them. */
29112
29113 static void
29114 prune_unused_types_walk_local_classes (dw_die_ref die)
29115 {
29116 dw_die_ref c;
29117
29118 if (die->die_mark == 2)
29119 return;
29120
29121 switch (die->die_tag)
29122 {
29123 case DW_TAG_structure_type:
29124 case DW_TAG_union_type:
29125 case DW_TAG_class_type:
29126 break;
29127
29128 case DW_TAG_subprogram:
29129 if (!get_AT_flag (die, DW_AT_declaration)
29130 || die->die_definition != NULL)
29131 prune_unused_types_mark (die, 1);
29132 return;
29133
29134 default:
29135 return;
29136 }
29137
29138 /* Mark children. */
29139 FOR_EACH_CHILD (die, c, prune_unused_types_walk_local_classes (c));
29140 }
29141
29142 /* Walk the tree DIE and mark types that we actually use. */
29143
29144 static void
29145 prune_unused_types_walk (dw_die_ref die)
29146 {
29147 dw_die_ref c;
29148
29149 /* Don't do anything if this node is already marked and
29150 children have been marked as well. */
29151 if (die->die_mark == 2)
29152 return;
29153
29154 switch (die->die_tag)
29155 {
29156 case DW_TAG_structure_type:
29157 case DW_TAG_union_type:
29158 case DW_TAG_class_type:
29159 if (die->die_perennial_p)
29160 break;
29161
29162 for (c = die->die_parent; c; c = c->die_parent)
29163 if (c->die_tag == DW_TAG_subprogram)
29164 break;
29165
29166 /* Finding used static member functions inside of classes
29167 is needed just for local classes, because for other classes
29168 static member function DIEs with DW_AT_specification
29169 are emitted outside of the DW_TAG_*_type. If we ever change
29170 it, we'd need to call this even for non-local classes. */
29171 if (c)
29172 prune_unused_types_walk_local_classes (die);
29173
29174 /* It's a type node --- don't mark it. */
29175 return;
29176
29177 case DW_TAG_const_type:
29178 case DW_TAG_packed_type:
29179 case DW_TAG_pointer_type:
29180 case DW_TAG_reference_type:
29181 case DW_TAG_rvalue_reference_type:
29182 case DW_TAG_volatile_type:
29183 case DW_TAG_typedef:
29184 case DW_TAG_array_type:
29185 case DW_TAG_interface_type:
29186 case DW_TAG_friend:
29187 case DW_TAG_enumeration_type:
29188 case DW_TAG_subroutine_type:
29189 case DW_TAG_string_type:
29190 case DW_TAG_set_type:
29191 case DW_TAG_subrange_type:
29192 case DW_TAG_ptr_to_member_type:
29193 case DW_TAG_file_type:
29194 /* Type nodes are useful only when other DIEs reference them --- don't
29195 mark them. */
29196 /* FALLTHROUGH */
29197
29198 case DW_TAG_dwarf_procedure:
29199 /* Likewise for DWARF procedures. */
29200
29201 if (die->die_perennial_p)
29202 break;
29203
29204 return;
29205
29206 default:
29207 /* Mark everything else. */
29208 break;
29209 }
29210
29211 if (die->die_mark == 0)
29212 {
29213 die->die_mark = 1;
29214
29215 /* Now, mark any dies referenced from here. */
29216 prune_unused_types_walk_attribs (die);
29217 }
29218
29219 die->die_mark = 2;
29220
29221 /* Mark children. */
29222 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29223 }
29224
29225 /* Increment the string counts on strings referred to from DIE's
29226 attributes. */
29227
29228 static void
29229 prune_unused_types_update_strings (dw_die_ref die)
29230 {
29231 dw_attr_node *a;
29232 unsigned ix;
29233
29234 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29235 if (AT_class (a) == dw_val_class_str)
29236 {
29237 struct indirect_string_node *s = a->dw_attr_val.v.val_str;
29238 s->refcount++;
29239 /* Avoid unnecessarily putting strings that are used less than
29240 twice in the hash table. */
29241 if (s->refcount
29242 == ((DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) ? 1 : 2))
29243 {
29244 indirect_string_node **slot
29245 = debug_str_hash->find_slot_with_hash (s->str,
29246 htab_hash_string (s->str),
29247 INSERT);
29248 gcc_assert (*slot == NULL);
29249 *slot = s;
29250 }
29251 }
29252 }
29253
29254 /* Mark DIE and its children as removed. */
29255
29256 static void
29257 mark_removed (dw_die_ref die)
29258 {
29259 dw_die_ref c;
29260 die->removed = true;
29261 FOR_EACH_CHILD (die, c, mark_removed (c));
29262 }
29263
29264 /* Remove from the tree DIE any dies that aren't marked. */
29265
29266 static void
29267 prune_unused_types_prune (dw_die_ref die)
29268 {
29269 dw_die_ref c;
29270
29271 gcc_assert (die->die_mark);
29272 prune_unused_types_update_strings (die);
29273
29274 if (! die->die_child)
29275 return;
29276
29277 c = die->die_child;
29278 do {
29279 dw_die_ref prev = c, next;
29280 for (c = c->die_sib; ! c->die_mark; c = next)
29281 if (c == die->die_child)
29282 {
29283 /* No marked children between 'prev' and the end of the list. */
29284 if (prev == c)
29285 /* No marked children at all. */
29286 die->die_child = NULL;
29287 else
29288 {
29289 prev->die_sib = c->die_sib;
29290 die->die_child = prev;
29291 }
29292 c->die_sib = NULL;
29293 mark_removed (c);
29294 return;
29295 }
29296 else
29297 {
29298 next = c->die_sib;
29299 c->die_sib = NULL;
29300 mark_removed (c);
29301 }
29302
29303 if (c != prev->die_sib)
29304 prev->die_sib = c;
29305 prune_unused_types_prune (c);
29306 } while (c != die->die_child);
29307 }
29308
29309 /* Remove dies representing declarations that we never use. */
29310
29311 static void
29312 prune_unused_types (void)
29313 {
29314 unsigned int i;
29315 limbo_die_node *node;
29316 comdat_type_node *ctnode;
29317 pubname_entry *pub;
29318 dw_die_ref base_type;
29319
29320 #if ENABLE_ASSERT_CHECKING
29321 /* All the marks should already be clear. */
29322 verify_marks_clear (comp_unit_die ());
29323 for (node = limbo_die_list; node; node = node->next)
29324 verify_marks_clear (node->die);
29325 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29326 verify_marks_clear (ctnode->root_die);
29327 #endif /* ENABLE_ASSERT_CHECKING */
29328
29329 /* Mark types that are used in global variables. */
29330 premark_types_used_by_global_vars ();
29331
29332 /* Set the mark on nodes that are actually used. */
29333 prune_unused_types_walk (comp_unit_die ());
29334 for (node = limbo_die_list; node; node = node->next)
29335 prune_unused_types_walk (node->die);
29336 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29337 {
29338 prune_unused_types_walk (ctnode->root_die);
29339 prune_unused_types_mark (ctnode->type_die, 1);
29340 }
29341
29342 /* Also set the mark on nodes referenced from the pubname_table. Enumerators
29343 are unusual in that they are pubnames that are the children of pubtypes.
29344 They should only be marked via their parent DW_TAG_enumeration_type die,
29345 not as roots in themselves. */
29346 FOR_EACH_VEC_ELT (*pubname_table, i, pub)
29347 if (pub->die->die_tag != DW_TAG_enumerator)
29348 prune_unused_types_mark (pub->die, 1);
29349 for (i = 0; base_types.iterate (i, &base_type); i++)
29350 prune_unused_types_mark (base_type, 1);
29351
29352 /* For -fvar-tracking-assignments, also set the mark on nodes that could be
29353 referenced by DW_TAG_call_site DW_AT_call_origin (i.e. direct call
29354 callees). */
29355 cgraph_node *cnode;
29356 FOR_EACH_FUNCTION (cnode)
29357 if (cnode->referred_to_p (false))
29358 {
29359 dw_die_ref die = lookup_decl_die (cnode->decl);
29360 if (die == NULL || die->die_mark)
29361 continue;
29362 for (cgraph_edge *e = cnode->callers; e; e = e->next_caller)
29363 if (e->caller != cnode
29364 && opt_for_fn (e->caller->decl, flag_var_tracking_assignments))
29365 {
29366 prune_unused_types_mark (die, 1);
29367 break;
29368 }
29369 }
29370
29371 if (debug_str_hash)
29372 debug_str_hash->empty ();
29373 if (skeleton_debug_str_hash)
29374 skeleton_debug_str_hash->empty ();
29375 prune_unused_types_prune (comp_unit_die ());
29376 for (limbo_die_node **pnode = &limbo_die_list; *pnode; )
29377 {
29378 node = *pnode;
29379 if (!node->die->die_mark)
29380 *pnode = node->next;
29381 else
29382 {
29383 prune_unused_types_prune (node->die);
29384 pnode = &node->next;
29385 }
29386 }
29387 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29388 prune_unused_types_prune (ctnode->root_die);
29389
29390 /* Leave the marks clear. */
29391 prune_unmark_dies (comp_unit_die ());
29392 for (node = limbo_die_list; node; node = node->next)
29393 prune_unmark_dies (node->die);
29394 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29395 prune_unmark_dies (ctnode->root_die);
29396 }
29397
29398 /* Helpers to manipulate hash table of comdat type units. */
29399
29400 struct comdat_type_hasher : nofree_ptr_hash <comdat_type_node>
29401 {
29402 static inline hashval_t hash (const comdat_type_node *);
29403 static inline bool equal (const comdat_type_node *, const comdat_type_node *);
29404 };
29405
29406 inline hashval_t
29407 comdat_type_hasher::hash (const comdat_type_node *type_node)
29408 {
29409 hashval_t h;
29410 memcpy (&h, type_node->signature, sizeof (h));
29411 return h;
29412 }
29413
29414 inline bool
29415 comdat_type_hasher::equal (const comdat_type_node *type_node_1,
29416 const comdat_type_node *type_node_2)
29417 {
29418 return (! memcmp (type_node_1->signature, type_node_2->signature,
29419 DWARF_TYPE_SIGNATURE_SIZE));
29420 }
29421
29422 /* Move a DW_AT_{,MIPS_}linkage_name attribute just added to dw_die_ref
29423 to the location it would have been added, should we know its
29424 DECL_ASSEMBLER_NAME when we added other attributes. This will
29425 probably improve compactness of debug info, removing equivalent
29426 abbrevs, and hide any differences caused by deferring the
29427 computation of the assembler name, triggered by e.g. PCH. */
29428
29429 static inline void
29430 move_linkage_attr (dw_die_ref die)
29431 {
29432 unsigned ix = vec_safe_length (die->die_attr);
29433 dw_attr_node linkage = (*die->die_attr)[ix - 1];
29434
29435 gcc_assert (linkage.dw_attr == DW_AT_linkage_name
29436 || linkage.dw_attr == DW_AT_MIPS_linkage_name);
29437
29438 while (--ix > 0)
29439 {
29440 dw_attr_node *prev = &(*die->die_attr)[ix - 1];
29441
29442 if (prev->dw_attr == DW_AT_decl_line
29443 || prev->dw_attr == DW_AT_decl_column
29444 || prev->dw_attr == DW_AT_name)
29445 break;
29446 }
29447
29448 if (ix != vec_safe_length (die->die_attr) - 1)
29449 {
29450 die->die_attr->pop ();
29451 die->die_attr->quick_insert (ix, linkage);
29452 }
29453 }
29454
29455 /* Helper function for resolve_addr, mark DW_TAG_base_type nodes
29456 referenced from typed stack ops and count how often they are used. */
29457
29458 static void
29459 mark_base_types (dw_loc_descr_ref loc)
29460 {
29461 dw_die_ref base_type = NULL;
29462
29463 for (; loc; loc = loc->dw_loc_next)
29464 {
29465 switch (loc->dw_loc_opc)
29466 {
29467 case DW_OP_regval_type:
29468 case DW_OP_deref_type:
29469 case DW_OP_GNU_regval_type:
29470 case DW_OP_GNU_deref_type:
29471 base_type = loc->dw_loc_oprnd2.v.val_die_ref.die;
29472 break;
29473 case DW_OP_convert:
29474 case DW_OP_reinterpret:
29475 case DW_OP_GNU_convert:
29476 case DW_OP_GNU_reinterpret:
29477 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
29478 continue;
29479 /* FALLTHRU */
29480 case DW_OP_const_type:
29481 case DW_OP_GNU_const_type:
29482 base_type = loc->dw_loc_oprnd1.v.val_die_ref.die;
29483 break;
29484 case DW_OP_entry_value:
29485 case DW_OP_GNU_entry_value:
29486 mark_base_types (loc->dw_loc_oprnd1.v.val_loc);
29487 continue;
29488 default:
29489 continue;
29490 }
29491 gcc_assert (base_type->die_parent == comp_unit_die ());
29492 if (base_type->die_mark)
29493 base_type->die_mark++;
29494 else
29495 {
29496 base_types.safe_push (base_type);
29497 base_type->die_mark = 1;
29498 }
29499 }
29500 }
29501
29502 /* Comparison function for sorting marked base types. */
29503
29504 static int
29505 base_type_cmp (const void *x, const void *y)
29506 {
29507 dw_die_ref dx = *(const dw_die_ref *) x;
29508 dw_die_ref dy = *(const dw_die_ref *) y;
29509 unsigned int byte_size1, byte_size2;
29510 unsigned int encoding1, encoding2;
29511 unsigned int align1, align2;
29512 if (dx->die_mark > dy->die_mark)
29513 return -1;
29514 if (dx->die_mark < dy->die_mark)
29515 return 1;
29516 byte_size1 = get_AT_unsigned (dx, DW_AT_byte_size);
29517 byte_size2 = get_AT_unsigned (dy, DW_AT_byte_size);
29518 if (byte_size1 < byte_size2)
29519 return 1;
29520 if (byte_size1 > byte_size2)
29521 return -1;
29522 encoding1 = get_AT_unsigned (dx, DW_AT_encoding);
29523 encoding2 = get_AT_unsigned (dy, DW_AT_encoding);
29524 if (encoding1 < encoding2)
29525 return 1;
29526 if (encoding1 > encoding2)
29527 return -1;
29528 align1 = get_AT_unsigned (dx, DW_AT_alignment);
29529 align2 = get_AT_unsigned (dy, DW_AT_alignment);
29530 if (align1 < align2)
29531 return 1;
29532 if (align1 > align2)
29533 return -1;
29534 return 0;
29535 }
29536
29537 /* Move base types marked by mark_base_types as early as possible
29538 in the CU, sorted by decreasing usage count both to make the
29539 uleb128 references as small as possible and to make sure they
29540 will have die_offset already computed by calc_die_sizes when
29541 sizes of typed stack loc ops is computed. */
29542
29543 static void
29544 move_marked_base_types (void)
29545 {
29546 unsigned int i;
29547 dw_die_ref base_type, die, c;
29548
29549 if (base_types.is_empty ())
29550 return;
29551
29552 /* Sort by decreasing usage count, they will be added again in that
29553 order later on. */
29554 base_types.qsort (base_type_cmp);
29555 die = comp_unit_die ();
29556 c = die->die_child;
29557 do
29558 {
29559 dw_die_ref prev = c;
29560 c = c->die_sib;
29561 while (c->die_mark)
29562 {
29563 remove_child_with_prev (c, prev);
29564 /* As base types got marked, there must be at least
29565 one node other than DW_TAG_base_type. */
29566 gcc_assert (die->die_child != NULL);
29567 c = prev->die_sib;
29568 }
29569 }
29570 while (c != die->die_child);
29571 gcc_assert (die->die_child);
29572 c = die->die_child;
29573 for (i = 0; base_types.iterate (i, &base_type); i++)
29574 {
29575 base_type->die_mark = 0;
29576 base_type->die_sib = c->die_sib;
29577 c->die_sib = base_type;
29578 c = base_type;
29579 }
29580 }
29581
29582 /* Helper function for resolve_addr, attempt to resolve
29583 one CONST_STRING, return true if successful. Similarly verify that
29584 SYMBOL_REFs refer to variables emitted in the current CU. */
29585
29586 static bool
29587 resolve_one_addr (rtx *addr)
29588 {
29589 rtx rtl = *addr;
29590
29591 if (GET_CODE (rtl) == CONST_STRING)
29592 {
29593 size_t len = strlen (XSTR (rtl, 0)) + 1;
29594 tree t = build_string (len, XSTR (rtl, 0));
29595 tree tlen = size_int (len - 1);
29596 TREE_TYPE (t)
29597 = build_array_type (char_type_node, build_index_type (tlen));
29598 rtl = lookup_constant_def (t);
29599 if (!rtl || !MEM_P (rtl))
29600 return false;
29601 rtl = XEXP (rtl, 0);
29602 if (GET_CODE (rtl) == SYMBOL_REF
29603 && SYMBOL_REF_DECL (rtl)
29604 && !TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29605 return false;
29606 vec_safe_push (used_rtx_array, rtl);
29607 *addr = rtl;
29608 return true;
29609 }
29610
29611 if (GET_CODE (rtl) == SYMBOL_REF
29612 && SYMBOL_REF_DECL (rtl))
29613 {
29614 if (TREE_CONSTANT_POOL_ADDRESS_P (rtl))
29615 {
29616 if (!TREE_ASM_WRITTEN (DECL_INITIAL (SYMBOL_REF_DECL (rtl))))
29617 return false;
29618 }
29619 else if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29620 return false;
29621 }
29622
29623 if (GET_CODE (rtl) == CONST)
29624 {
29625 subrtx_ptr_iterator::array_type array;
29626 FOR_EACH_SUBRTX_PTR (iter, array, &XEXP (rtl, 0), ALL)
29627 if (!resolve_one_addr (*iter))
29628 return false;
29629 }
29630
29631 return true;
29632 }
29633
29634 /* For STRING_CST, return SYMBOL_REF of its constant pool entry,
29635 if possible, and create DW_TAG_dwarf_procedure that can be referenced
29636 from DW_OP_implicit_pointer if the string hasn't been seen yet. */
29637
29638 static rtx
29639 string_cst_pool_decl (tree t)
29640 {
29641 rtx rtl = output_constant_def (t, 1);
29642 unsigned char *array;
29643 dw_loc_descr_ref l;
29644 tree decl;
29645 size_t len;
29646 dw_die_ref ref;
29647
29648 if (!rtl || !MEM_P (rtl))
29649 return NULL_RTX;
29650 rtl = XEXP (rtl, 0);
29651 if (GET_CODE (rtl) != SYMBOL_REF
29652 || SYMBOL_REF_DECL (rtl) == NULL_TREE)
29653 return NULL_RTX;
29654
29655 decl = SYMBOL_REF_DECL (rtl);
29656 if (!lookup_decl_die (decl))
29657 {
29658 len = TREE_STRING_LENGTH (t);
29659 vec_safe_push (used_rtx_array, rtl);
29660 ref = new_die (DW_TAG_dwarf_procedure, comp_unit_die (), decl);
29661 array = ggc_vec_alloc<unsigned char> (len);
29662 memcpy (array, TREE_STRING_POINTER (t), len);
29663 l = new_loc_descr (DW_OP_implicit_value, len, 0);
29664 l->dw_loc_oprnd2.val_class = dw_val_class_vec;
29665 l->dw_loc_oprnd2.v.val_vec.length = len;
29666 l->dw_loc_oprnd2.v.val_vec.elt_size = 1;
29667 l->dw_loc_oprnd2.v.val_vec.array = array;
29668 add_AT_loc (ref, DW_AT_location, l);
29669 equate_decl_number_to_die (decl, ref);
29670 }
29671 return rtl;
29672 }
29673
29674 /* Helper function of resolve_addr_in_expr. LOC is
29675 a DW_OP_addr followed by DW_OP_stack_value, either at the start
29676 of exprloc or after DW_OP_{,bit_}piece, and val_addr can't be
29677 resolved. Replace it (both DW_OP_addr and DW_OP_stack_value)
29678 with DW_OP_implicit_pointer if possible
29679 and return true, if unsuccessful, return false. */
29680
29681 static bool
29682 optimize_one_addr_into_implicit_ptr (dw_loc_descr_ref loc)
29683 {
29684 rtx rtl = loc->dw_loc_oprnd1.v.val_addr;
29685 HOST_WIDE_INT offset = 0;
29686 dw_die_ref ref = NULL;
29687 tree decl;
29688
29689 if (GET_CODE (rtl) == CONST
29690 && GET_CODE (XEXP (rtl, 0)) == PLUS
29691 && CONST_INT_P (XEXP (XEXP (rtl, 0), 1)))
29692 {
29693 offset = INTVAL (XEXP (XEXP (rtl, 0), 1));
29694 rtl = XEXP (XEXP (rtl, 0), 0);
29695 }
29696 if (GET_CODE (rtl) == CONST_STRING)
29697 {
29698 size_t len = strlen (XSTR (rtl, 0)) + 1;
29699 tree t = build_string (len, XSTR (rtl, 0));
29700 tree tlen = size_int (len - 1);
29701
29702 TREE_TYPE (t)
29703 = build_array_type (char_type_node, build_index_type (tlen));
29704 rtl = string_cst_pool_decl (t);
29705 if (!rtl)
29706 return false;
29707 }
29708 if (GET_CODE (rtl) == SYMBOL_REF && SYMBOL_REF_DECL (rtl))
29709 {
29710 decl = SYMBOL_REF_DECL (rtl);
29711 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
29712 {
29713 ref = lookup_decl_die (decl);
29714 if (ref && (get_AT (ref, DW_AT_location)
29715 || get_AT (ref, DW_AT_const_value)))
29716 {
29717 loc->dw_loc_opc = dwarf_OP (DW_OP_implicit_pointer);
29718 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29719 loc->dw_loc_oprnd1.val_entry = NULL;
29720 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29721 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29722 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
29723 loc->dw_loc_oprnd2.v.val_int = offset;
29724 return true;
29725 }
29726 }
29727 }
29728 return false;
29729 }
29730
29731 /* Helper function for resolve_addr, handle one location
29732 expression, return false if at least one CONST_STRING or SYMBOL_REF in
29733 the location list couldn't be resolved. */
29734
29735 static bool
29736 resolve_addr_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
29737 {
29738 dw_loc_descr_ref keep = NULL;
29739 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = loc->dw_loc_next)
29740 switch (loc->dw_loc_opc)
29741 {
29742 case DW_OP_addr:
29743 if (!resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
29744 {
29745 if ((prev == NULL
29746 || prev->dw_loc_opc == DW_OP_piece
29747 || prev->dw_loc_opc == DW_OP_bit_piece)
29748 && loc->dw_loc_next
29749 && loc->dw_loc_next->dw_loc_opc == DW_OP_stack_value
29750 && (!dwarf_strict || dwarf_version >= 5)
29751 && optimize_one_addr_into_implicit_ptr (loc))
29752 break;
29753 return false;
29754 }
29755 break;
29756 case DW_OP_GNU_addr_index:
29757 case DW_OP_addrx:
29758 case DW_OP_GNU_const_index:
29759 case DW_OP_constx:
29760 if ((loc->dw_loc_opc == DW_OP_GNU_addr_index
29761 || loc->dw_loc_opc == DW_OP_addrx)
29762 || ((loc->dw_loc_opc == DW_OP_GNU_const_index
29763 || loc->dw_loc_opc == DW_OP_constx)
29764 && loc->dtprel))
29765 {
29766 rtx rtl = loc->dw_loc_oprnd1.val_entry->addr.rtl;
29767 if (!resolve_one_addr (&rtl))
29768 return false;
29769 remove_addr_table_entry (loc->dw_loc_oprnd1.val_entry);
29770 loc->dw_loc_oprnd1.val_entry
29771 = add_addr_table_entry (rtl, ate_kind_rtx);
29772 }
29773 break;
29774 case DW_OP_const4u:
29775 case DW_OP_const8u:
29776 if (loc->dtprel
29777 && !resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
29778 return false;
29779 break;
29780 case DW_OP_plus_uconst:
29781 if (size_of_loc_descr (loc)
29782 > size_of_int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned)
29783 + 1
29784 && loc->dw_loc_oprnd1.v.val_unsigned > 0)
29785 {
29786 dw_loc_descr_ref repl
29787 = int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned);
29788 add_loc_descr (&repl, new_loc_descr (DW_OP_plus, 0, 0));
29789 add_loc_descr (&repl, loc->dw_loc_next);
29790 *loc = *repl;
29791 }
29792 break;
29793 case DW_OP_implicit_value:
29794 if (loc->dw_loc_oprnd2.val_class == dw_val_class_addr
29795 && !resolve_one_addr (&loc->dw_loc_oprnd2.v.val_addr))
29796 return false;
29797 break;
29798 case DW_OP_implicit_pointer:
29799 case DW_OP_GNU_implicit_pointer:
29800 case DW_OP_GNU_parameter_ref:
29801 case DW_OP_GNU_variable_value:
29802 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
29803 {
29804 dw_die_ref ref
29805 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
29806 if (ref == NULL)
29807 return false;
29808 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29809 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29810 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29811 }
29812 if (loc->dw_loc_opc == DW_OP_GNU_variable_value)
29813 {
29814 if (prev == NULL
29815 && loc->dw_loc_next == NULL
29816 && AT_class (a) == dw_val_class_loc)
29817 switch (a->dw_attr)
29818 {
29819 /* Following attributes allow both exprloc and reference,
29820 so if the whole expression is DW_OP_GNU_variable_value
29821 alone we could transform it into reference. */
29822 case DW_AT_byte_size:
29823 case DW_AT_bit_size:
29824 case DW_AT_lower_bound:
29825 case DW_AT_upper_bound:
29826 case DW_AT_bit_stride:
29827 case DW_AT_count:
29828 case DW_AT_allocated:
29829 case DW_AT_associated:
29830 case DW_AT_byte_stride:
29831 a->dw_attr_val.val_class = dw_val_class_die_ref;
29832 a->dw_attr_val.val_entry = NULL;
29833 a->dw_attr_val.v.val_die_ref.die
29834 = loc->dw_loc_oprnd1.v.val_die_ref.die;
29835 a->dw_attr_val.v.val_die_ref.external = 0;
29836 return true;
29837 default:
29838 break;
29839 }
29840 if (dwarf_strict)
29841 return false;
29842 }
29843 break;
29844 case DW_OP_const_type:
29845 case DW_OP_regval_type:
29846 case DW_OP_deref_type:
29847 case DW_OP_convert:
29848 case DW_OP_reinterpret:
29849 case DW_OP_GNU_const_type:
29850 case DW_OP_GNU_regval_type:
29851 case DW_OP_GNU_deref_type:
29852 case DW_OP_GNU_convert:
29853 case DW_OP_GNU_reinterpret:
29854 while (loc->dw_loc_next
29855 && (loc->dw_loc_next->dw_loc_opc == DW_OP_convert
29856 || loc->dw_loc_next->dw_loc_opc == DW_OP_GNU_convert))
29857 {
29858 dw_die_ref base1, base2;
29859 unsigned enc1, enc2, size1, size2;
29860 if (loc->dw_loc_opc == DW_OP_regval_type
29861 || loc->dw_loc_opc == DW_OP_deref_type
29862 || loc->dw_loc_opc == DW_OP_GNU_regval_type
29863 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
29864 base1 = loc->dw_loc_oprnd2.v.val_die_ref.die;
29865 else if (loc->dw_loc_oprnd1.val_class
29866 == dw_val_class_unsigned_const)
29867 break;
29868 else
29869 base1 = loc->dw_loc_oprnd1.v.val_die_ref.die;
29870 if (loc->dw_loc_next->dw_loc_oprnd1.val_class
29871 == dw_val_class_unsigned_const)
29872 break;
29873 base2 = loc->dw_loc_next->dw_loc_oprnd1.v.val_die_ref.die;
29874 gcc_assert (base1->die_tag == DW_TAG_base_type
29875 && base2->die_tag == DW_TAG_base_type);
29876 enc1 = get_AT_unsigned (base1, DW_AT_encoding);
29877 enc2 = get_AT_unsigned (base2, DW_AT_encoding);
29878 size1 = get_AT_unsigned (base1, DW_AT_byte_size);
29879 size2 = get_AT_unsigned (base2, DW_AT_byte_size);
29880 if (size1 == size2
29881 && (((enc1 == DW_ATE_unsigned || enc1 == DW_ATE_signed)
29882 && (enc2 == DW_ATE_unsigned || enc2 == DW_ATE_signed)
29883 && loc != keep)
29884 || enc1 == enc2))
29885 {
29886 /* Optimize away next DW_OP_convert after
29887 adjusting LOC's base type die reference. */
29888 if (loc->dw_loc_opc == DW_OP_regval_type
29889 || loc->dw_loc_opc == DW_OP_deref_type
29890 || loc->dw_loc_opc == DW_OP_GNU_regval_type
29891 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
29892 loc->dw_loc_oprnd2.v.val_die_ref.die = base2;
29893 else
29894 loc->dw_loc_oprnd1.v.val_die_ref.die = base2;
29895 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
29896 continue;
29897 }
29898 /* Don't change integer DW_OP_convert after e.g. floating
29899 point typed stack entry. */
29900 else if (enc1 != DW_ATE_unsigned && enc1 != DW_ATE_signed)
29901 keep = loc->dw_loc_next;
29902 break;
29903 }
29904 break;
29905 default:
29906 break;
29907 }
29908 return true;
29909 }
29910
29911 /* Helper function of resolve_addr. DIE had DW_AT_location of
29912 DW_OP_addr alone, which referred to DECL in DW_OP_addr's operand
29913 and DW_OP_addr couldn't be resolved. resolve_addr has already
29914 removed the DW_AT_location attribute. This function attempts to
29915 add a new DW_AT_location attribute with DW_OP_implicit_pointer
29916 to it or DW_AT_const_value attribute, if possible. */
29917
29918 static void
29919 optimize_location_into_implicit_ptr (dw_die_ref die, tree decl)
29920 {
29921 if (!VAR_P (decl)
29922 || lookup_decl_die (decl) != die
29923 || DECL_EXTERNAL (decl)
29924 || !TREE_STATIC (decl)
29925 || DECL_INITIAL (decl) == NULL_TREE
29926 || DECL_P (DECL_INITIAL (decl))
29927 || get_AT (die, DW_AT_const_value))
29928 return;
29929
29930 tree init = DECL_INITIAL (decl);
29931 HOST_WIDE_INT offset = 0;
29932 /* For variables that have been optimized away and thus
29933 don't have a memory location, see if we can emit
29934 DW_AT_const_value instead. */
29935 if (tree_add_const_value_attribute (die, init))
29936 return;
29937 if (dwarf_strict && dwarf_version < 5)
29938 return;
29939 /* If init is ADDR_EXPR or POINTER_PLUS_EXPR of ADDR_EXPR,
29940 and ADDR_EXPR refers to a decl that has DW_AT_location or
29941 DW_AT_const_value (but isn't addressable, otherwise
29942 resolving the original DW_OP_addr wouldn't fail), see if
29943 we can add DW_OP_implicit_pointer. */
29944 STRIP_NOPS (init);
29945 if (TREE_CODE (init) == POINTER_PLUS_EXPR
29946 && tree_fits_shwi_p (TREE_OPERAND (init, 1)))
29947 {
29948 offset = tree_to_shwi (TREE_OPERAND (init, 1));
29949 init = TREE_OPERAND (init, 0);
29950 STRIP_NOPS (init);
29951 }
29952 if (TREE_CODE (init) != ADDR_EXPR)
29953 return;
29954 if ((TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST
29955 && !TREE_ASM_WRITTEN (TREE_OPERAND (init, 0)))
29956 || (TREE_CODE (TREE_OPERAND (init, 0)) == VAR_DECL
29957 && !DECL_EXTERNAL (TREE_OPERAND (init, 0))
29958 && TREE_OPERAND (init, 0) != decl))
29959 {
29960 dw_die_ref ref;
29961 dw_loc_descr_ref l;
29962
29963 if (TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST)
29964 {
29965 rtx rtl = string_cst_pool_decl (TREE_OPERAND (init, 0));
29966 if (!rtl)
29967 return;
29968 decl = SYMBOL_REF_DECL (rtl);
29969 }
29970 else
29971 decl = TREE_OPERAND (init, 0);
29972 ref = lookup_decl_die (decl);
29973 if (ref == NULL
29974 || (!get_AT (ref, DW_AT_location)
29975 && !get_AT (ref, DW_AT_const_value)))
29976 return;
29977 l = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
29978 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29979 l->dw_loc_oprnd1.v.val_die_ref.die = ref;
29980 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
29981 add_AT_loc (die, DW_AT_location, l);
29982 }
29983 }
29984
29985 /* Return NULL if l is a DWARF expression, or first op that is not
29986 valid DWARF expression. */
29987
29988 static dw_loc_descr_ref
29989 non_dwarf_expression (dw_loc_descr_ref l)
29990 {
29991 while (l)
29992 {
29993 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
29994 return l;
29995 switch (l->dw_loc_opc)
29996 {
29997 case DW_OP_regx:
29998 case DW_OP_implicit_value:
29999 case DW_OP_stack_value:
30000 case DW_OP_implicit_pointer:
30001 case DW_OP_GNU_implicit_pointer:
30002 case DW_OP_GNU_parameter_ref:
30003 case DW_OP_piece:
30004 case DW_OP_bit_piece:
30005 return l;
30006 default:
30007 break;
30008 }
30009 l = l->dw_loc_next;
30010 }
30011 return NULL;
30012 }
30013
30014 /* Return adjusted copy of EXPR:
30015 If it is empty DWARF expression, return it.
30016 If it is valid non-empty DWARF expression,
30017 return copy of EXPR with DW_OP_deref appended to it.
30018 If it is DWARF expression followed by DW_OP_reg{N,x}, return
30019 copy of the DWARF expression with DW_OP_breg{N,x} <0> appended.
30020 If it is DWARF expression followed by DW_OP_stack_value, return
30021 copy of the DWARF expression without anything appended.
30022 Otherwise, return NULL. */
30023
30024 static dw_loc_descr_ref
30025 copy_deref_exprloc (dw_loc_descr_ref expr)
30026 {
30027 dw_loc_descr_ref tail = NULL;
30028
30029 if (expr == NULL)
30030 return NULL;
30031
30032 dw_loc_descr_ref l = non_dwarf_expression (expr);
30033 if (l && l->dw_loc_next)
30034 return NULL;
30035
30036 if (l)
30037 {
30038 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
30039 tail = new_loc_descr ((enum dwarf_location_atom)
30040 (DW_OP_breg0 + (l->dw_loc_opc - DW_OP_reg0)),
30041 0, 0);
30042 else
30043 switch (l->dw_loc_opc)
30044 {
30045 case DW_OP_regx:
30046 tail = new_loc_descr (DW_OP_bregx,
30047 l->dw_loc_oprnd1.v.val_unsigned, 0);
30048 break;
30049 case DW_OP_stack_value:
30050 break;
30051 default:
30052 return NULL;
30053 }
30054 }
30055 else
30056 tail = new_loc_descr (DW_OP_deref, 0, 0);
30057
30058 dw_loc_descr_ref ret = NULL, *p = &ret;
30059 while (expr != l)
30060 {
30061 *p = new_loc_descr (expr->dw_loc_opc, 0, 0);
30062 (*p)->dw_loc_oprnd1 = expr->dw_loc_oprnd1;
30063 (*p)->dw_loc_oprnd2 = expr->dw_loc_oprnd2;
30064 p = &(*p)->dw_loc_next;
30065 expr = expr->dw_loc_next;
30066 }
30067 *p = tail;
30068 return ret;
30069 }
30070
30071 /* For DW_AT_string_length attribute with DW_OP_GNU_variable_value
30072 reference to a variable or argument, adjust it if needed and return:
30073 -1 if the DW_AT_string_length attribute and DW_AT_{string_length_,}byte_size
30074 attribute if present should be removed
30075 0 keep the attribute perhaps with minor modifications, no need to rescan
30076 1 if the attribute has been successfully adjusted. */
30077
30078 static int
30079 optimize_string_length (dw_attr_node *a)
30080 {
30081 dw_loc_descr_ref l = AT_loc (a), lv;
30082 dw_die_ref die;
30083 if (l->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
30084 {
30085 tree decl = l->dw_loc_oprnd1.v.val_decl_ref;
30086 die = lookup_decl_die (decl);
30087 if (die)
30088 {
30089 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30090 l->dw_loc_oprnd1.v.val_die_ref.die = die;
30091 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
30092 }
30093 else
30094 return -1;
30095 }
30096 else
30097 die = l->dw_loc_oprnd1.v.val_die_ref.die;
30098
30099 /* DWARF5 allows reference class, so we can then reference the DIE.
30100 Only do this for DW_OP_GNU_variable_value DW_OP_stack_value. */
30101 if (l->dw_loc_next != NULL && dwarf_version >= 5)
30102 {
30103 a->dw_attr_val.val_class = dw_val_class_die_ref;
30104 a->dw_attr_val.val_entry = NULL;
30105 a->dw_attr_val.v.val_die_ref.die = die;
30106 a->dw_attr_val.v.val_die_ref.external = 0;
30107 return 0;
30108 }
30109
30110 dw_attr_node *av = get_AT (die, DW_AT_location);
30111 dw_loc_list_ref d;
30112 bool non_dwarf_expr = false;
30113
30114 if (av == NULL)
30115 return dwarf_strict ? -1 : 0;
30116 switch (AT_class (av))
30117 {
30118 case dw_val_class_loc_list:
30119 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30120 if (d->expr && non_dwarf_expression (d->expr))
30121 non_dwarf_expr = true;
30122 break;
30123 case dw_val_class_view_list:
30124 gcc_unreachable ();
30125 case dw_val_class_loc:
30126 lv = AT_loc (av);
30127 if (lv == NULL)
30128 return dwarf_strict ? -1 : 0;
30129 if (non_dwarf_expression (lv))
30130 non_dwarf_expr = true;
30131 break;
30132 default:
30133 return dwarf_strict ? -1 : 0;
30134 }
30135
30136 /* If it is safe to transform DW_OP_GNU_variable_value DW_OP_stack_value
30137 into DW_OP_call4 or DW_OP_GNU_variable_value into
30138 DW_OP_call4 DW_OP_deref, do so. */
30139 if (!non_dwarf_expr
30140 && (l->dw_loc_next != NULL || AT_class (av) == dw_val_class_loc))
30141 {
30142 l->dw_loc_opc = DW_OP_call4;
30143 if (l->dw_loc_next)
30144 l->dw_loc_next = NULL;
30145 else
30146 l->dw_loc_next = new_loc_descr (DW_OP_deref, 0, 0);
30147 return 0;
30148 }
30149
30150 /* For DW_OP_GNU_variable_value DW_OP_stack_value, we can just
30151 copy over the DW_AT_location attribute from die to a. */
30152 if (l->dw_loc_next != NULL)
30153 {
30154 a->dw_attr_val = av->dw_attr_val;
30155 return 1;
30156 }
30157
30158 dw_loc_list_ref list, *p;
30159 switch (AT_class (av))
30160 {
30161 case dw_val_class_loc_list:
30162 p = &list;
30163 list = NULL;
30164 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30165 {
30166 lv = copy_deref_exprloc (d->expr);
30167 if (lv)
30168 {
30169 *p = new_loc_list (lv, d->begin, d->vbegin, d->end, d->vend, d->section);
30170 p = &(*p)->dw_loc_next;
30171 }
30172 else if (!dwarf_strict && d->expr)
30173 return 0;
30174 }
30175 if (list == NULL)
30176 return dwarf_strict ? -1 : 0;
30177 a->dw_attr_val.val_class = dw_val_class_loc_list;
30178 gen_llsym (list);
30179 *AT_loc_list_ptr (a) = list;
30180 return 1;
30181 case dw_val_class_loc:
30182 lv = copy_deref_exprloc (AT_loc (av));
30183 if (lv == NULL)
30184 return dwarf_strict ? -1 : 0;
30185 a->dw_attr_val.v.val_loc = lv;
30186 return 1;
30187 default:
30188 gcc_unreachable ();
30189 }
30190 }
30191
30192 /* Resolve DW_OP_addr and DW_AT_const_value CONST_STRING arguments to
30193 an address in .rodata section if the string literal is emitted there,
30194 or remove the containing location list or replace DW_AT_const_value
30195 with DW_AT_location and empty location expression, if it isn't found
30196 in .rodata. Similarly for SYMBOL_REFs, keep only those that refer
30197 to something that has been emitted in the current CU. */
30198
30199 static void
30200 resolve_addr (dw_die_ref die)
30201 {
30202 dw_die_ref c;
30203 dw_attr_node *a;
30204 dw_loc_list_ref *curr, *start, loc;
30205 unsigned ix;
30206 bool remove_AT_byte_size = false;
30207
30208 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30209 switch (AT_class (a))
30210 {
30211 case dw_val_class_loc_list:
30212 start = curr = AT_loc_list_ptr (a);
30213 loc = *curr;
30214 gcc_assert (loc);
30215 /* The same list can be referenced more than once. See if we have
30216 already recorded the result from a previous pass. */
30217 if (loc->replaced)
30218 *curr = loc->dw_loc_next;
30219 else if (!loc->resolved_addr)
30220 {
30221 /* As things stand, we do not expect or allow one die to
30222 reference a suffix of another die's location list chain.
30223 References must be identical or completely separate.
30224 There is therefore no need to cache the result of this
30225 pass on any list other than the first; doing so
30226 would lead to unnecessary writes. */
30227 while (*curr)
30228 {
30229 gcc_assert (!(*curr)->replaced && !(*curr)->resolved_addr);
30230 if (!resolve_addr_in_expr (a, (*curr)->expr))
30231 {
30232 dw_loc_list_ref next = (*curr)->dw_loc_next;
30233 dw_loc_descr_ref l = (*curr)->expr;
30234
30235 if (next && (*curr)->ll_symbol)
30236 {
30237 gcc_assert (!next->ll_symbol);
30238 next->ll_symbol = (*curr)->ll_symbol;
30239 next->vl_symbol = (*curr)->vl_symbol;
30240 }
30241 if (dwarf_split_debug_info)
30242 remove_loc_list_addr_table_entries (l);
30243 *curr = next;
30244 }
30245 else
30246 {
30247 mark_base_types ((*curr)->expr);
30248 curr = &(*curr)->dw_loc_next;
30249 }
30250 }
30251 if (loc == *start)
30252 loc->resolved_addr = 1;
30253 else
30254 {
30255 loc->replaced = 1;
30256 loc->dw_loc_next = *start;
30257 }
30258 }
30259 if (!*start)
30260 {
30261 remove_AT (die, a->dw_attr);
30262 ix--;
30263 }
30264 break;
30265 case dw_val_class_view_list:
30266 {
30267 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
30268 gcc_checking_assert (dwarf2out_locviews_in_attribute ());
30269 dw_val_node *llnode
30270 = view_list_to_loc_list_val_node (&a->dw_attr_val);
30271 /* If we no longer have a loclist, or it no longer needs
30272 views, drop this attribute. */
30273 if (!llnode || !llnode->v.val_loc_list->vl_symbol)
30274 {
30275 remove_AT (die, a->dw_attr);
30276 ix--;
30277 }
30278 break;
30279 }
30280 case dw_val_class_loc:
30281 {
30282 dw_loc_descr_ref l = AT_loc (a);
30283 /* DW_OP_GNU_variable_value DW_OP_stack_value or
30284 DW_OP_GNU_variable_value in DW_AT_string_length can be converted
30285 into DW_OP_call4 or DW_OP_call4 DW_OP_deref, which is standard
30286 DWARF4 unlike DW_OP_GNU_variable_value. Or for DWARF5
30287 DW_OP_GNU_variable_value DW_OP_stack_value can be replaced
30288 with DW_FORM_ref referencing the same DIE as
30289 DW_OP_GNU_variable_value used to reference. */
30290 if (a->dw_attr == DW_AT_string_length
30291 && l
30292 && l->dw_loc_opc == DW_OP_GNU_variable_value
30293 && (l->dw_loc_next == NULL
30294 || (l->dw_loc_next->dw_loc_next == NULL
30295 && l->dw_loc_next->dw_loc_opc == DW_OP_stack_value)))
30296 {
30297 switch (optimize_string_length (a))
30298 {
30299 case -1:
30300 remove_AT (die, a->dw_attr);
30301 ix--;
30302 /* If we drop DW_AT_string_length, we need to drop also
30303 DW_AT_{string_length_,}byte_size. */
30304 remove_AT_byte_size = true;
30305 continue;
30306 default:
30307 break;
30308 case 1:
30309 /* Even if we keep the optimized DW_AT_string_length,
30310 it might have changed AT_class, so process it again. */
30311 ix--;
30312 continue;
30313 }
30314 }
30315 /* For -gdwarf-2 don't attempt to optimize
30316 DW_AT_data_member_location containing
30317 DW_OP_plus_uconst - older consumers might
30318 rely on it being that op instead of a more complex,
30319 but shorter, location description. */
30320 if ((dwarf_version > 2
30321 || a->dw_attr != DW_AT_data_member_location
30322 || l == NULL
30323 || l->dw_loc_opc != DW_OP_plus_uconst
30324 || l->dw_loc_next != NULL)
30325 && !resolve_addr_in_expr (a, l))
30326 {
30327 if (dwarf_split_debug_info)
30328 remove_loc_list_addr_table_entries (l);
30329 if (l != NULL
30330 && l->dw_loc_next == NULL
30331 && l->dw_loc_opc == DW_OP_addr
30332 && GET_CODE (l->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF
30333 && SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr)
30334 && a->dw_attr == DW_AT_location)
30335 {
30336 tree decl = SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr);
30337 remove_AT (die, a->dw_attr);
30338 ix--;
30339 optimize_location_into_implicit_ptr (die, decl);
30340 break;
30341 }
30342 if (a->dw_attr == DW_AT_string_length)
30343 /* If we drop DW_AT_string_length, we need to drop also
30344 DW_AT_{string_length_,}byte_size. */
30345 remove_AT_byte_size = true;
30346 remove_AT (die, a->dw_attr);
30347 ix--;
30348 }
30349 else
30350 mark_base_types (l);
30351 }
30352 break;
30353 case dw_val_class_addr:
30354 if (a->dw_attr == DW_AT_const_value
30355 && !resolve_one_addr (&a->dw_attr_val.v.val_addr))
30356 {
30357 if (AT_index (a) != NOT_INDEXED)
30358 remove_addr_table_entry (a->dw_attr_val.val_entry);
30359 remove_AT (die, a->dw_attr);
30360 ix--;
30361 }
30362 if ((die->die_tag == DW_TAG_call_site
30363 && a->dw_attr == DW_AT_call_origin)
30364 || (die->die_tag == DW_TAG_GNU_call_site
30365 && a->dw_attr == DW_AT_abstract_origin))
30366 {
30367 tree tdecl = SYMBOL_REF_DECL (a->dw_attr_val.v.val_addr);
30368 dw_die_ref tdie = lookup_decl_die (tdecl);
30369 dw_die_ref cdie;
30370 if (tdie == NULL
30371 && DECL_EXTERNAL (tdecl)
30372 && DECL_ABSTRACT_ORIGIN (tdecl) == NULL_TREE
30373 && (cdie = lookup_context_die (DECL_CONTEXT (tdecl))))
30374 {
30375 dw_die_ref pdie = cdie;
30376 /* Make sure we don't add these DIEs into type units.
30377 We could emit skeleton DIEs for context (namespaces,
30378 outer structs/classes) and a skeleton DIE for the
30379 innermost context with DW_AT_signature pointing to the
30380 type unit. See PR78835. */
30381 while (pdie && pdie->die_tag != DW_TAG_type_unit)
30382 pdie = pdie->die_parent;
30383 if (pdie == NULL)
30384 {
30385 /* Creating a full DIE for tdecl is overly expensive and
30386 at this point even wrong when in the LTO phase
30387 as it can end up generating new type DIEs we didn't
30388 output and thus optimize_external_refs will crash. */
30389 tdie = new_die (DW_TAG_subprogram, cdie, NULL_TREE);
30390 add_AT_flag (tdie, DW_AT_external, 1);
30391 add_AT_flag (tdie, DW_AT_declaration, 1);
30392 add_linkage_attr (tdie, tdecl);
30393 add_name_and_src_coords_attributes (tdie, tdecl, true);
30394 equate_decl_number_to_die (tdecl, tdie);
30395 }
30396 }
30397 if (tdie)
30398 {
30399 a->dw_attr_val.val_class = dw_val_class_die_ref;
30400 a->dw_attr_val.v.val_die_ref.die = tdie;
30401 a->dw_attr_val.v.val_die_ref.external = 0;
30402 }
30403 else
30404 {
30405 if (AT_index (a) != NOT_INDEXED)
30406 remove_addr_table_entry (a->dw_attr_val.val_entry);
30407 remove_AT (die, a->dw_attr);
30408 ix--;
30409 }
30410 }
30411 break;
30412 default:
30413 break;
30414 }
30415
30416 if (remove_AT_byte_size)
30417 remove_AT (die, dwarf_version >= 5
30418 ? DW_AT_string_length_byte_size
30419 : DW_AT_byte_size);
30420
30421 FOR_EACH_CHILD (die, c, resolve_addr (c));
30422 }
30423 \f
30424 /* Helper routines for optimize_location_lists.
30425 This pass tries to share identical local lists in .debug_loc
30426 section. */
30427
30428 /* Iteratively hash operands of LOC opcode into HSTATE. */
30429
30430 static void
30431 hash_loc_operands (dw_loc_descr_ref loc, inchash::hash &hstate)
30432 {
30433 dw_val_ref val1 = &loc->dw_loc_oprnd1;
30434 dw_val_ref val2 = &loc->dw_loc_oprnd2;
30435
30436 switch (loc->dw_loc_opc)
30437 {
30438 case DW_OP_const4u:
30439 case DW_OP_const8u:
30440 if (loc->dtprel)
30441 goto hash_addr;
30442 /* FALLTHRU */
30443 case DW_OP_const1u:
30444 case DW_OP_const1s:
30445 case DW_OP_const2u:
30446 case DW_OP_const2s:
30447 case DW_OP_const4s:
30448 case DW_OP_const8s:
30449 case DW_OP_constu:
30450 case DW_OP_consts:
30451 case DW_OP_pick:
30452 case DW_OP_plus_uconst:
30453 case DW_OP_breg0:
30454 case DW_OP_breg1:
30455 case DW_OP_breg2:
30456 case DW_OP_breg3:
30457 case DW_OP_breg4:
30458 case DW_OP_breg5:
30459 case DW_OP_breg6:
30460 case DW_OP_breg7:
30461 case DW_OP_breg8:
30462 case DW_OP_breg9:
30463 case DW_OP_breg10:
30464 case DW_OP_breg11:
30465 case DW_OP_breg12:
30466 case DW_OP_breg13:
30467 case DW_OP_breg14:
30468 case DW_OP_breg15:
30469 case DW_OP_breg16:
30470 case DW_OP_breg17:
30471 case DW_OP_breg18:
30472 case DW_OP_breg19:
30473 case DW_OP_breg20:
30474 case DW_OP_breg21:
30475 case DW_OP_breg22:
30476 case DW_OP_breg23:
30477 case DW_OP_breg24:
30478 case DW_OP_breg25:
30479 case DW_OP_breg26:
30480 case DW_OP_breg27:
30481 case DW_OP_breg28:
30482 case DW_OP_breg29:
30483 case DW_OP_breg30:
30484 case DW_OP_breg31:
30485 case DW_OP_regx:
30486 case DW_OP_fbreg:
30487 case DW_OP_piece:
30488 case DW_OP_deref_size:
30489 case DW_OP_xderef_size:
30490 hstate.add_object (val1->v.val_int);
30491 break;
30492 case DW_OP_skip:
30493 case DW_OP_bra:
30494 {
30495 int offset;
30496
30497 gcc_assert (val1->val_class == dw_val_class_loc);
30498 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
30499 hstate.add_object (offset);
30500 }
30501 break;
30502 case DW_OP_implicit_value:
30503 hstate.add_object (val1->v.val_unsigned);
30504 switch (val2->val_class)
30505 {
30506 case dw_val_class_const:
30507 hstate.add_object (val2->v.val_int);
30508 break;
30509 case dw_val_class_vec:
30510 {
30511 unsigned int elt_size = val2->v.val_vec.elt_size;
30512 unsigned int len = val2->v.val_vec.length;
30513
30514 hstate.add_int (elt_size);
30515 hstate.add_int (len);
30516 hstate.add (val2->v.val_vec.array, len * elt_size);
30517 }
30518 break;
30519 case dw_val_class_const_double:
30520 hstate.add_object (val2->v.val_double.low);
30521 hstate.add_object (val2->v.val_double.high);
30522 break;
30523 case dw_val_class_wide_int:
30524 hstate.add (val2->v.val_wide->get_val (),
30525 get_full_len (*val2->v.val_wide)
30526 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30527 break;
30528 case dw_val_class_addr:
30529 inchash::add_rtx (val2->v.val_addr, hstate);
30530 break;
30531 default:
30532 gcc_unreachable ();
30533 }
30534 break;
30535 case DW_OP_bregx:
30536 case DW_OP_bit_piece:
30537 hstate.add_object (val1->v.val_int);
30538 hstate.add_object (val2->v.val_int);
30539 break;
30540 case DW_OP_addr:
30541 hash_addr:
30542 if (loc->dtprel)
30543 {
30544 unsigned char dtprel = 0xd1;
30545 hstate.add_object (dtprel);
30546 }
30547 inchash::add_rtx (val1->v.val_addr, hstate);
30548 break;
30549 case DW_OP_GNU_addr_index:
30550 case DW_OP_addrx:
30551 case DW_OP_GNU_const_index:
30552 case DW_OP_constx:
30553 {
30554 if (loc->dtprel)
30555 {
30556 unsigned char dtprel = 0xd1;
30557 hstate.add_object (dtprel);
30558 }
30559 inchash::add_rtx (val1->val_entry->addr.rtl, hstate);
30560 }
30561 break;
30562 case DW_OP_implicit_pointer:
30563 case DW_OP_GNU_implicit_pointer:
30564 hstate.add_int (val2->v.val_int);
30565 break;
30566 case DW_OP_entry_value:
30567 case DW_OP_GNU_entry_value:
30568 hstate.add_object (val1->v.val_loc);
30569 break;
30570 case DW_OP_regval_type:
30571 case DW_OP_deref_type:
30572 case DW_OP_GNU_regval_type:
30573 case DW_OP_GNU_deref_type:
30574 {
30575 unsigned int byte_size
30576 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_byte_size);
30577 unsigned int encoding
30578 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_encoding);
30579 hstate.add_object (val1->v.val_int);
30580 hstate.add_object (byte_size);
30581 hstate.add_object (encoding);
30582 }
30583 break;
30584 case DW_OP_convert:
30585 case DW_OP_reinterpret:
30586 case DW_OP_GNU_convert:
30587 case DW_OP_GNU_reinterpret:
30588 if (val1->val_class == dw_val_class_unsigned_const)
30589 {
30590 hstate.add_object (val1->v.val_unsigned);
30591 break;
30592 }
30593 /* FALLTHRU */
30594 case DW_OP_const_type:
30595 case DW_OP_GNU_const_type:
30596 {
30597 unsigned int byte_size
30598 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_byte_size);
30599 unsigned int encoding
30600 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_encoding);
30601 hstate.add_object (byte_size);
30602 hstate.add_object (encoding);
30603 if (loc->dw_loc_opc != DW_OP_const_type
30604 && loc->dw_loc_opc != DW_OP_GNU_const_type)
30605 break;
30606 hstate.add_object (val2->val_class);
30607 switch (val2->val_class)
30608 {
30609 case dw_val_class_const:
30610 hstate.add_object (val2->v.val_int);
30611 break;
30612 case dw_val_class_vec:
30613 {
30614 unsigned int elt_size = val2->v.val_vec.elt_size;
30615 unsigned int len = val2->v.val_vec.length;
30616
30617 hstate.add_object (elt_size);
30618 hstate.add_object (len);
30619 hstate.add (val2->v.val_vec.array, len * elt_size);
30620 }
30621 break;
30622 case dw_val_class_const_double:
30623 hstate.add_object (val2->v.val_double.low);
30624 hstate.add_object (val2->v.val_double.high);
30625 break;
30626 case dw_val_class_wide_int:
30627 hstate.add (val2->v.val_wide->get_val (),
30628 get_full_len (*val2->v.val_wide)
30629 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30630 break;
30631 default:
30632 gcc_unreachable ();
30633 }
30634 }
30635 break;
30636
30637 default:
30638 /* Other codes have no operands. */
30639 break;
30640 }
30641 }
30642
30643 /* Iteratively hash the whole DWARF location expression LOC into HSTATE. */
30644
30645 static inline void
30646 hash_locs (dw_loc_descr_ref loc, inchash::hash &hstate)
30647 {
30648 dw_loc_descr_ref l;
30649 bool sizes_computed = false;
30650 /* Compute sizes, so that DW_OP_skip/DW_OP_bra can be checksummed. */
30651 size_of_locs (loc);
30652
30653 for (l = loc; l != NULL; l = l->dw_loc_next)
30654 {
30655 enum dwarf_location_atom opc = l->dw_loc_opc;
30656 hstate.add_object (opc);
30657 if ((opc == DW_OP_skip || opc == DW_OP_bra) && !sizes_computed)
30658 {
30659 size_of_locs (loc);
30660 sizes_computed = true;
30661 }
30662 hash_loc_operands (l, hstate);
30663 }
30664 }
30665
30666 /* Compute hash of the whole location list LIST_HEAD. */
30667
30668 static inline void
30669 hash_loc_list (dw_loc_list_ref list_head)
30670 {
30671 dw_loc_list_ref curr = list_head;
30672 inchash::hash hstate;
30673
30674 for (curr = list_head; curr != NULL; curr = curr->dw_loc_next)
30675 {
30676 hstate.add (curr->begin, strlen (curr->begin) + 1);
30677 hstate.add (curr->end, strlen (curr->end) + 1);
30678 hstate.add_object (curr->vbegin);
30679 hstate.add_object (curr->vend);
30680 if (curr->section)
30681 hstate.add (curr->section, strlen (curr->section) + 1);
30682 hash_locs (curr->expr, hstate);
30683 }
30684 list_head->hash = hstate.end ();
30685 }
30686
30687 /* Return true if X and Y opcodes have the same operands. */
30688
30689 static inline bool
30690 compare_loc_operands (dw_loc_descr_ref x, dw_loc_descr_ref y)
30691 {
30692 dw_val_ref valx1 = &x->dw_loc_oprnd1;
30693 dw_val_ref valx2 = &x->dw_loc_oprnd2;
30694 dw_val_ref valy1 = &y->dw_loc_oprnd1;
30695 dw_val_ref valy2 = &y->dw_loc_oprnd2;
30696
30697 switch (x->dw_loc_opc)
30698 {
30699 case DW_OP_const4u:
30700 case DW_OP_const8u:
30701 if (x->dtprel)
30702 goto hash_addr;
30703 /* FALLTHRU */
30704 case DW_OP_const1u:
30705 case DW_OP_const1s:
30706 case DW_OP_const2u:
30707 case DW_OP_const2s:
30708 case DW_OP_const4s:
30709 case DW_OP_const8s:
30710 case DW_OP_constu:
30711 case DW_OP_consts:
30712 case DW_OP_pick:
30713 case DW_OP_plus_uconst:
30714 case DW_OP_breg0:
30715 case DW_OP_breg1:
30716 case DW_OP_breg2:
30717 case DW_OP_breg3:
30718 case DW_OP_breg4:
30719 case DW_OP_breg5:
30720 case DW_OP_breg6:
30721 case DW_OP_breg7:
30722 case DW_OP_breg8:
30723 case DW_OP_breg9:
30724 case DW_OP_breg10:
30725 case DW_OP_breg11:
30726 case DW_OP_breg12:
30727 case DW_OP_breg13:
30728 case DW_OP_breg14:
30729 case DW_OP_breg15:
30730 case DW_OP_breg16:
30731 case DW_OP_breg17:
30732 case DW_OP_breg18:
30733 case DW_OP_breg19:
30734 case DW_OP_breg20:
30735 case DW_OP_breg21:
30736 case DW_OP_breg22:
30737 case DW_OP_breg23:
30738 case DW_OP_breg24:
30739 case DW_OP_breg25:
30740 case DW_OP_breg26:
30741 case DW_OP_breg27:
30742 case DW_OP_breg28:
30743 case DW_OP_breg29:
30744 case DW_OP_breg30:
30745 case DW_OP_breg31:
30746 case DW_OP_regx:
30747 case DW_OP_fbreg:
30748 case DW_OP_piece:
30749 case DW_OP_deref_size:
30750 case DW_OP_xderef_size:
30751 return valx1->v.val_int == valy1->v.val_int;
30752 case DW_OP_skip:
30753 case DW_OP_bra:
30754 /* If splitting debug info, the use of DW_OP_GNU_addr_index
30755 can cause irrelevant differences in dw_loc_addr. */
30756 gcc_assert (valx1->val_class == dw_val_class_loc
30757 && valy1->val_class == dw_val_class_loc
30758 && (dwarf_split_debug_info
30759 || x->dw_loc_addr == y->dw_loc_addr));
30760 return valx1->v.val_loc->dw_loc_addr == valy1->v.val_loc->dw_loc_addr;
30761 case DW_OP_implicit_value:
30762 if (valx1->v.val_unsigned != valy1->v.val_unsigned
30763 || valx2->val_class != valy2->val_class)
30764 return false;
30765 switch (valx2->val_class)
30766 {
30767 case dw_val_class_const:
30768 return valx2->v.val_int == valy2->v.val_int;
30769 case dw_val_class_vec:
30770 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
30771 && valx2->v.val_vec.length == valy2->v.val_vec.length
30772 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
30773 valx2->v.val_vec.elt_size
30774 * valx2->v.val_vec.length) == 0;
30775 case dw_val_class_const_double:
30776 return valx2->v.val_double.low == valy2->v.val_double.low
30777 && valx2->v.val_double.high == valy2->v.val_double.high;
30778 case dw_val_class_wide_int:
30779 return *valx2->v.val_wide == *valy2->v.val_wide;
30780 case dw_val_class_addr:
30781 return rtx_equal_p (valx2->v.val_addr, valy2->v.val_addr);
30782 default:
30783 gcc_unreachable ();
30784 }
30785 case DW_OP_bregx:
30786 case DW_OP_bit_piece:
30787 return valx1->v.val_int == valy1->v.val_int
30788 && valx2->v.val_int == valy2->v.val_int;
30789 case DW_OP_addr:
30790 hash_addr:
30791 return rtx_equal_p (valx1->v.val_addr, valy1->v.val_addr);
30792 case DW_OP_GNU_addr_index:
30793 case DW_OP_addrx:
30794 case DW_OP_GNU_const_index:
30795 case DW_OP_constx:
30796 {
30797 rtx ax1 = valx1->val_entry->addr.rtl;
30798 rtx ay1 = valy1->val_entry->addr.rtl;
30799 return rtx_equal_p (ax1, ay1);
30800 }
30801 case DW_OP_implicit_pointer:
30802 case DW_OP_GNU_implicit_pointer:
30803 return valx1->val_class == dw_val_class_die_ref
30804 && valx1->val_class == valy1->val_class
30805 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die
30806 && valx2->v.val_int == valy2->v.val_int;
30807 case DW_OP_entry_value:
30808 case DW_OP_GNU_entry_value:
30809 return compare_loc_operands (valx1->v.val_loc, valy1->v.val_loc);
30810 case DW_OP_const_type:
30811 case DW_OP_GNU_const_type:
30812 if (valx1->v.val_die_ref.die != valy1->v.val_die_ref.die
30813 || valx2->val_class != valy2->val_class)
30814 return false;
30815 switch (valx2->val_class)
30816 {
30817 case dw_val_class_const:
30818 return valx2->v.val_int == valy2->v.val_int;
30819 case dw_val_class_vec:
30820 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
30821 && valx2->v.val_vec.length == valy2->v.val_vec.length
30822 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
30823 valx2->v.val_vec.elt_size
30824 * valx2->v.val_vec.length) == 0;
30825 case dw_val_class_const_double:
30826 return valx2->v.val_double.low == valy2->v.val_double.low
30827 && valx2->v.val_double.high == valy2->v.val_double.high;
30828 case dw_val_class_wide_int:
30829 return *valx2->v.val_wide == *valy2->v.val_wide;
30830 default:
30831 gcc_unreachable ();
30832 }
30833 case DW_OP_regval_type:
30834 case DW_OP_deref_type:
30835 case DW_OP_GNU_regval_type:
30836 case DW_OP_GNU_deref_type:
30837 return valx1->v.val_int == valy1->v.val_int
30838 && valx2->v.val_die_ref.die == valy2->v.val_die_ref.die;
30839 case DW_OP_convert:
30840 case DW_OP_reinterpret:
30841 case DW_OP_GNU_convert:
30842 case DW_OP_GNU_reinterpret:
30843 if (valx1->val_class != valy1->val_class)
30844 return false;
30845 if (valx1->val_class == dw_val_class_unsigned_const)
30846 return valx1->v.val_unsigned == valy1->v.val_unsigned;
30847 return valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
30848 case DW_OP_GNU_parameter_ref:
30849 return valx1->val_class == dw_val_class_die_ref
30850 && valx1->val_class == valy1->val_class
30851 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
30852 default:
30853 /* Other codes have no operands. */
30854 return true;
30855 }
30856 }
30857
30858 /* Return true if DWARF location expressions X and Y are the same. */
30859
30860 static inline bool
30861 compare_locs (dw_loc_descr_ref x, dw_loc_descr_ref y)
30862 {
30863 for (; x != NULL && y != NULL; x = x->dw_loc_next, y = y->dw_loc_next)
30864 if (x->dw_loc_opc != y->dw_loc_opc
30865 || x->dtprel != y->dtprel
30866 || !compare_loc_operands (x, y))
30867 break;
30868 return x == NULL && y == NULL;
30869 }
30870
30871 /* Hashtable helpers. */
30872
30873 struct loc_list_hasher : nofree_ptr_hash <dw_loc_list_struct>
30874 {
30875 static inline hashval_t hash (const dw_loc_list_struct *);
30876 static inline bool equal (const dw_loc_list_struct *,
30877 const dw_loc_list_struct *);
30878 };
30879
30880 /* Return precomputed hash of location list X. */
30881
30882 inline hashval_t
30883 loc_list_hasher::hash (const dw_loc_list_struct *x)
30884 {
30885 return x->hash;
30886 }
30887
30888 /* Return true if location lists A and B are the same. */
30889
30890 inline bool
30891 loc_list_hasher::equal (const dw_loc_list_struct *a,
30892 const dw_loc_list_struct *b)
30893 {
30894 if (a == b)
30895 return 1;
30896 if (a->hash != b->hash)
30897 return 0;
30898 for (; a != NULL && b != NULL; a = a->dw_loc_next, b = b->dw_loc_next)
30899 if (strcmp (a->begin, b->begin) != 0
30900 || strcmp (a->end, b->end) != 0
30901 || (a->section == NULL) != (b->section == NULL)
30902 || (a->section && strcmp (a->section, b->section) != 0)
30903 || a->vbegin != b->vbegin || a->vend != b->vend
30904 || !compare_locs (a->expr, b->expr))
30905 break;
30906 return a == NULL && b == NULL;
30907 }
30908
30909 typedef hash_table<loc_list_hasher> loc_list_hash_type;
30910
30911
30912 /* Recursively optimize location lists referenced from DIE
30913 children and share them whenever possible. */
30914
30915 static void
30916 optimize_location_lists_1 (dw_die_ref die, loc_list_hash_type *htab)
30917 {
30918 dw_die_ref c;
30919 dw_attr_node *a;
30920 unsigned ix;
30921 dw_loc_list_struct **slot;
30922 bool drop_locviews = false;
30923 bool has_locviews = false;
30924
30925 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30926 if (AT_class (a) == dw_val_class_loc_list)
30927 {
30928 dw_loc_list_ref list = AT_loc_list (a);
30929 /* TODO: perform some optimizations here, before hashing
30930 it and storing into the hash table. */
30931 hash_loc_list (list);
30932 slot = htab->find_slot_with_hash (list, list->hash, INSERT);
30933 if (*slot == NULL)
30934 {
30935 *slot = list;
30936 if (loc_list_has_views (list))
30937 gcc_assert (list->vl_symbol);
30938 else if (list->vl_symbol)
30939 {
30940 drop_locviews = true;
30941 list->vl_symbol = NULL;
30942 }
30943 }
30944 else
30945 {
30946 if (list->vl_symbol && !(*slot)->vl_symbol)
30947 drop_locviews = true;
30948 a->dw_attr_val.v.val_loc_list = *slot;
30949 }
30950 }
30951 else if (AT_class (a) == dw_val_class_view_list)
30952 {
30953 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
30954 has_locviews = true;
30955 }
30956
30957
30958 if (drop_locviews && has_locviews)
30959 remove_AT (die, DW_AT_GNU_locviews);
30960
30961 FOR_EACH_CHILD (die, c, optimize_location_lists_1 (c, htab));
30962 }
30963
30964
30965 /* Recursively assign each location list a unique index into the debug_addr
30966 section. */
30967
30968 static void
30969 index_location_lists (dw_die_ref die)
30970 {
30971 dw_die_ref c;
30972 dw_attr_node *a;
30973 unsigned ix;
30974
30975 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30976 if (AT_class (a) == dw_val_class_loc_list)
30977 {
30978 dw_loc_list_ref list = AT_loc_list (a);
30979 dw_loc_list_ref curr;
30980 for (curr = list; curr != NULL; curr = curr->dw_loc_next)
30981 {
30982 /* Don't index an entry that has already been indexed
30983 or won't be output. Make sure skip_loc_list_entry doesn't
30984 call size_of_locs, because that might cause circular dependency,
30985 index_location_lists requiring address table indexes to be
30986 computed, but adding new indexes through add_addr_table_entry
30987 and address table index computation requiring no new additions
30988 to the hash table. In the rare case of DWARF[234] >= 64KB
30989 location expression, we'll just waste unused address table entry
30990 for it. */
30991 if (curr->begin_entry != NULL
30992 || skip_loc_list_entry (curr))
30993 continue;
30994
30995 curr->begin_entry
30996 = add_addr_table_entry (xstrdup (curr->begin), ate_kind_label);
30997 }
30998 }
30999
31000 FOR_EACH_CHILD (die, c, index_location_lists (c));
31001 }
31002
31003 /* Optimize location lists referenced from DIE
31004 children and share them whenever possible. */
31005
31006 static void
31007 optimize_location_lists (dw_die_ref die)
31008 {
31009 loc_list_hash_type htab (500);
31010 optimize_location_lists_1 (die, &htab);
31011 }
31012 \f
31013 /* Traverse the limbo die list, and add parent/child links. The only
31014 dies without parents that should be here are concrete instances of
31015 inline functions, and the comp_unit_die. We can ignore the comp_unit_die.
31016 For concrete instances, we can get the parent die from the abstract
31017 instance. */
31018
31019 static void
31020 flush_limbo_die_list (void)
31021 {
31022 limbo_die_node *node;
31023
31024 /* get_context_die calls force_decl_die, which can put new DIEs on the
31025 limbo list in LTO mode when nested functions are put in a different
31026 partition than that of their parent function. */
31027 while ((node = limbo_die_list))
31028 {
31029 dw_die_ref die = node->die;
31030 limbo_die_list = node->next;
31031
31032 if (die->die_parent == NULL)
31033 {
31034 dw_die_ref origin = get_AT_ref (die, DW_AT_abstract_origin);
31035
31036 if (origin && origin->die_parent)
31037 add_child_die (origin->die_parent, die);
31038 else if (is_cu_die (die))
31039 ;
31040 else if (seen_error ())
31041 /* It's OK to be confused by errors in the input. */
31042 add_child_die (comp_unit_die (), die);
31043 else
31044 {
31045 /* In certain situations, the lexical block containing a
31046 nested function can be optimized away, which results
31047 in the nested function die being orphaned. Likewise
31048 with the return type of that nested function. Force
31049 this to be a child of the containing function.
31050
31051 It may happen that even the containing function got fully
31052 inlined and optimized out. In that case we are lost and
31053 assign the empty child. This should not be big issue as
31054 the function is likely unreachable too. */
31055 gcc_assert (node->created_for);
31056
31057 if (DECL_P (node->created_for))
31058 origin = get_context_die (DECL_CONTEXT (node->created_for));
31059 else if (TYPE_P (node->created_for))
31060 origin = scope_die_for (node->created_for, comp_unit_die ());
31061 else
31062 origin = comp_unit_die ();
31063
31064 add_child_die (origin, die);
31065 }
31066 }
31067 }
31068 }
31069
31070 /* Reset DIEs so we can output them again. */
31071
31072 static void
31073 reset_dies (dw_die_ref die)
31074 {
31075 dw_die_ref c;
31076
31077 /* Remove stuff we re-generate. */
31078 die->die_mark = 0;
31079 die->die_offset = 0;
31080 die->die_abbrev = 0;
31081 remove_AT (die, DW_AT_sibling);
31082
31083 FOR_EACH_CHILD (die, c, reset_dies (c));
31084 }
31085
31086 /* Output stuff that dwarf requires at the end of every file,
31087 and generate the DWARF-2 debugging info. */
31088
31089 static void
31090 dwarf2out_finish (const char *filename)
31091 {
31092 comdat_type_node *ctnode;
31093 dw_die_ref main_comp_unit_die;
31094 unsigned char checksum[16];
31095 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
31096
31097 /* Flush out any latecomers to the limbo party. */
31098 flush_limbo_die_list ();
31099
31100 if (inline_entry_data_table)
31101 gcc_assert (inline_entry_data_table->elements () == 0);
31102
31103 if (flag_checking)
31104 {
31105 verify_die (comp_unit_die ());
31106 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31107 verify_die (node->die);
31108 }
31109
31110 /* We shouldn't have any symbols with delayed asm names for
31111 DIEs generated after early finish. */
31112 gcc_assert (deferred_asm_name == NULL);
31113
31114 gen_remaining_tmpl_value_param_die_attribute ();
31115
31116 if (flag_generate_lto || flag_generate_offload)
31117 {
31118 gcc_assert (flag_fat_lto_objects || flag_generate_offload);
31119
31120 /* Prune stuff so that dwarf2out_finish runs successfully
31121 for the fat part of the object. */
31122 reset_dies (comp_unit_die ());
31123 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31124 reset_dies (node->die);
31125
31126 hash_table<comdat_type_hasher> comdat_type_table (100);
31127 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31128 {
31129 comdat_type_node **slot
31130 = comdat_type_table.find_slot (ctnode, INSERT);
31131
31132 /* Don't reset types twice. */
31133 if (*slot != HTAB_EMPTY_ENTRY)
31134 continue;
31135
31136 /* Remove the pointer to the line table. */
31137 remove_AT (ctnode->root_die, DW_AT_stmt_list);
31138
31139 if (debug_info_level >= DINFO_LEVEL_TERSE)
31140 reset_dies (ctnode->root_die);
31141
31142 *slot = ctnode;
31143 }
31144
31145 /* Reset die CU symbol so we don't output it twice. */
31146 comp_unit_die ()->die_id.die_symbol = NULL;
31147
31148 /* Remove DW_AT_macro and DW_AT_stmt_list from the early output. */
31149 remove_AT (comp_unit_die (), DW_AT_stmt_list);
31150 if (have_macinfo)
31151 remove_AT (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE);
31152
31153 /* Remove indirect string decisions. */
31154 debug_str_hash->traverse<void *, reset_indirect_string> (NULL);
31155 if (debug_line_str_hash)
31156 {
31157 debug_line_str_hash->traverse<void *, reset_indirect_string> (NULL);
31158 debug_line_str_hash = NULL;
31159 }
31160 }
31161
31162 #if ENABLE_ASSERT_CHECKING
31163 {
31164 dw_die_ref die = comp_unit_die (), c;
31165 FOR_EACH_CHILD (die, c, gcc_assert (! c->die_mark));
31166 }
31167 #endif
31168 resolve_addr (comp_unit_die ());
31169 move_marked_base_types ();
31170
31171 if (dump_file)
31172 {
31173 fprintf (dump_file, "DWARF for %s\n", filename);
31174 print_die (comp_unit_die (), dump_file);
31175 }
31176
31177 /* Initialize sections and labels used for actual assembler output. */
31178 unsigned generation = init_sections_and_labels (false);
31179
31180 /* Traverse the DIE's and add sibling attributes to those DIE's that
31181 have children. */
31182 add_sibling_attributes (comp_unit_die ());
31183 limbo_die_node *node;
31184 for (node = cu_die_list; node; node = node->next)
31185 add_sibling_attributes (node->die);
31186 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31187 add_sibling_attributes (ctnode->root_die);
31188
31189 /* When splitting DWARF info, we put some attributes in the
31190 skeleton compile_unit DIE that remains in the .o, while
31191 most attributes go in the DWO compile_unit_die. */
31192 if (dwarf_split_debug_info)
31193 {
31194 limbo_die_node *cu;
31195 main_comp_unit_die = gen_compile_unit_die (NULL);
31196 if (dwarf_version >= 5)
31197 main_comp_unit_die->die_tag = DW_TAG_skeleton_unit;
31198 cu = limbo_die_list;
31199 gcc_assert (cu->die == main_comp_unit_die);
31200 limbo_die_list = limbo_die_list->next;
31201 cu->next = cu_die_list;
31202 cu_die_list = cu;
31203 }
31204 else
31205 main_comp_unit_die = comp_unit_die ();
31206
31207 /* Output a terminator label for the .text section. */
31208 switch_to_section (text_section);
31209 targetm.asm_out.internal_label (asm_out_file, TEXT_END_LABEL, 0);
31210 if (cold_text_section)
31211 {
31212 switch_to_section (cold_text_section);
31213 targetm.asm_out.internal_label (asm_out_file, COLD_END_LABEL, 0);
31214 }
31215
31216 /* We can only use the low/high_pc attributes if all of the code was
31217 in .text. */
31218 if (!have_multiple_function_sections
31219 || (dwarf_version < 3 && dwarf_strict))
31220 {
31221 /* Don't add if the CU has no associated code. */
31222 if (text_section_used)
31223 add_AT_low_high_pc (main_comp_unit_die, text_section_label,
31224 text_end_label, true);
31225 }
31226 else
31227 {
31228 unsigned fde_idx;
31229 dw_fde_ref fde;
31230 bool range_list_added = false;
31231
31232 if (text_section_used)
31233 add_ranges_by_labels (main_comp_unit_die, text_section_label,
31234 text_end_label, &range_list_added, true);
31235 if (cold_text_section_used)
31236 add_ranges_by_labels (main_comp_unit_die, cold_text_section_label,
31237 cold_end_label, &range_list_added, true);
31238
31239 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
31240 {
31241 if (DECL_IGNORED_P (fde->decl))
31242 continue;
31243 if (!fde->in_std_section)
31244 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_begin,
31245 fde->dw_fde_end, &range_list_added,
31246 true);
31247 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
31248 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_second_begin,
31249 fde->dw_fde_second_end, &range_list_added,
31250 true);
31251 }
31252
31253 if (range_list_added)
31254 {
31255 /* We need to give .debug_loc and .debug_ranges an appropriate
31256 "base address". Use zero so that these addresses become
31257 absolute. Historically, we've emitted the unexpected
31258 DW_AT_entry_pc instead of DW_AT_low_pc for this purpose.
31259 Emit both to give time for other tools to adapt. */
31260 add_AT_addr (main_comp_unit_die, DW_AT_low_pc, const0_rtx, true);
31261 if (! dwarf_strict && dwarf_version < 4)
31262 add_AT_addr (main_comp_unit_die, DW_AT_entry_pc, const0_rtx, true);
31263
31264 add_ranges (NULL);
31265 }
31266 }
31267
31268 /* AIX Assembler inserts the length, so adjust the reference to match the
31269 offset expected by debuggers. */
31270 strcpy (dl_section_ref, debug_line_section_label);
31271 if (XCOFF_DEBUGGING_INFO)
31272 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
31273
31274 if (debug_info_level >= DINFO_LEVEL_TERSE)
31275 add_AT_lineptr (main_comp_unit_die, DW_AT_stmt_list,
31276 dl_section_ref);
31277
31278 if (have_macinfo)
31279 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
31280 macinfo_section_label);
31281
31282 if (dwarf_split_debug_info)
31283 {
31284 if (have_location_lists)
31285 {
31286 /* Since we generate the loclists in the split DWARF .dwo
31287 file itself, we don't need to generate a loclists_base
31288 attribute for the split compile unit DIE. That attribute
31289 (and using relocatable sec_offset FORMs) isn't allowed
31290 for a split compile unit. Only if the .debug_loclists
31291 section was in the main file, would we need to generate a
31292 loclists_base attribute here (for the full or skeleton
31293 unit DIE). */
31294
31295 /* optimize_location_lists calculates the size of the lists,
31296 so index them first, and assign indices to the entries.
31297 Although optimize_location_lists will remove entries from
31298 the table, it only does so for duplicates, and therefore
31299 only reduces ref_counts to 1. */
31300 index_location_lists (comp_unit_die ());
31301 }
31302
31303 if (addr_index_table != NULL)
31304 {
31305 unsigned int index = 0;
31306 addr_index_table
31307 ->traverse_noresize<unsigned int *, index_addr_table_entry>
31308 (&index);
31309 }
31310 }
31311
31312 loc_list_idx = 0;
31313 if (have_location_lists)
31314 {
31315 optimize_location_lists (comp_unit_die ());
31316 /* And finally assign indexes to the entries for -gsplit-dwarf. */
31317 if (dwarf_version >= 5 && dwarf_split_debug_info)
31318 assign_location_list_indexes (comp_unit_die ());
31319 }
31320
31321 save_macinfo_strings ();
31322
31323 if (dwarf_split_debug_info)
31324 {
31325 unsigned int index = 0;
31326
31327 /* Add attributes common to skeleton compile_units and
31328 type_units. Because these attributes include strings, it
31329 must be done before freezing the string table. Top-level
31330 skeleton die attrs are added when the skeleton type unit is
31331 created, so ensure it is created by this point. */
31332 add_top_level_skeleton_die_attrs (main_comp_unit_die);
31333 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
31334 }
31335
31336 /* Output all of the compilation units. We put the main one last so that
31337 the offsets are available to output_pubnames. */
31338 for (node = cu_die_list; node; node = node->next)
31339 output_comp_unit (node->die, 0, NULL);
31340
31341 hash_table<comdat_type_hasher> comdat_type_table (100);
31342 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31343 {
31344 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
31345
31346 /* Don't output duplicate types. */
31347 if (*slot != HTAB_EMPTY_ENTRY)
31348 continue;
31349
31350 /* Add a pointer to the line table for the main compilation unit
31351 so that the debugger can make sense of DW_AT_decl_file
31352 attributes. */
31353 if (debug_info_level >= DINFO_LEVEL_TERSE)
31354 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
31355 (!dwarf_split_debug_info
31356 ? dl_section_ref
31357 : debug_skeleton_line_section_label));
31358
31359 output_comdat_type_unit (ctnode);
31360 *slot = ctnode;
31361 }
31362
31363 if (dwarf_split_debug_info)
31364 {
31365 int mark;
31366 struct md5_ctx ctx;
31367
31368 if (dwarf_version >= 5 && !vec_safe_is_empty (ranges_table))
31369 index_rnglists ();
31370
31371 /* Compute a checksum of the comp_unit to use as the dwo_id. */
31372 md5_init_ctx (&ctx);
31373 mark = 0;
31374 die_checksum (comp_unit_die (), &ctx, &mark);
31375 unmark_all_dies (comp_unit_die ());
31376 md5_finish_ctx (&ctx, checksum);
31377
31378 if (dwarf_version < 5)
31379 {
31380 /* Use the first 8 bytes of the checksum as the dwo_id,
31381 and add it to both comp-unit DIEs. */
31382 add_AT_data8 (main_comp_unit_die, DW_AT_GNU_dwo_id, checksum);
31383 add_AT_data8 (comp_unit_die (), DW_AT_GNU_dwo_id, checksum);
31384 }
31385
31386 /* Add the base offset of the ranges table to the skeleton
31387 comp-unit DIE. */
31388 if (!vec_safe_is_empty (ranges_table))
31389 {
31390 if (dwarf_version >= 5)
31391 add_AT_lineptr (main_comp_unit_die, DW_AT_rnglists_base,
31392 ranges_base_label);
31393 else
31394 add_AT_lineptr (main_comp_unit_die, DW_AT_GNU_ranges_base,
31395 ranges_section_label);
31396 }
31397
31398 switch_to_section (debug_addr_section);
31399 /* GNU DebugFission https://gcc.gnu.org/wiki/DebugFission
31400 which GCC uses to implement -gsplit-dwarf as DWARF GNU extension
31401 before DWARF5, didn't have a header for .debug_addr units.
31402 DWARF5 specifies a small header when address tables are used. */
31403 if (dwarf_version >= 5)
31404 {
31405 unsigned int last_idx = 0;
31406 unsigned long addrs_length;
31407
31408 addr_index_table->traverse_noresize
31409 <unsigned int *, count_index_addrs> (&last_idx);
31410 addrs_length = last_idx * DWARF2_ADDR_SIZE + 4;
31411
31412 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
31413 dw2_asm_output_data (4, 0xffffffff,
31414 "Escape value for 64-bit DWARF extension");
31415 dw2_asm_output_data (DWARF_OFFSET_SIZE, addrs_length,
31416 "Length of Address Unit");
31417 dw2_asm_output_data (2, 5, "DWARF addr version");
31418 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
31419 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
31420 }
31421 ASM_OUTPUT_LABEL (asm_out_file, debug_addr_section_label);
31422 output_addr_table ();
31423 }
31424
31425 /* Output the main compilation unit if non-empty or if .debug_macinfo
31426 or .debug_macro will be emitted. */
31427 output_comp_unit (comp_unit_die (), have_macinfo,
31428 dwarf_split_debug_info ? checksum : NULL);
31429
31430 if (dwarf_split_debug_info && info_section_emitted)
31431 output_skeleton_debug_sections (main_comp_unit_die, checksum);
31432
31433 /* Output the abbreviation table. */
31434 if (vec_safe_length (abbrev_die_table) != 1)
31435 {
31436 switch_to_section (debug_abbrev_section);
31437 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
31438 output_abbrev_section ();
31439 }
31440
31441 /* Output location list section if necessary. */
31442 if (have_location_lists)
31443 {
31444 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
31445 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
31446 /* Output the location lists info. */
31447 switch_to_section (debug_loc_section);
31448 if (dwarf_version >= 5)
31449 {
31450 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_LOC_SECTION_LABEL, 2);
31451 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_LOC_SECTION_LABEL, 3);
31452 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
31453 dw2_asm_output_data (4, 0xffffffff,
31454 "Initial length escape value indicating "
31455 "64-bit DWARF extension");
31456 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
31457 "Length of Location Lists");
31458 ASM_OUTPUT_LABEL (asm_out_file, l1);
31459 output_dwarf_version ();
31460 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
31461 dw2_asm_output_data (1, 0, "Segment Size");
31462 dw2_asm_output_data (4, dwarf_split_debug_info ? loc_list_idx : 0,
31463 "Offset Entry Count");
31464 }
31465 ASM_OUTPUT_LABEL (asm_out_file, loc_section_label);
31466 if (dwarf_version >= 5 && dwarf_split_debug_info)
31467 {
31468 unsigned int save_loc_list_idx = loc_list_idx;
31469 loc_list_idx = 0;
31470 output_loclists_offsets (comp_unit_die ());
31471 gcc_assert (save_loc_list_idx == loc_list_idx);
31472 }
31473 output_location_lists (comp_unit_die ());
31474 if (dwarf_version >= 5)
31475 ASM_OUTPUT_LABEL (asm_out_file, l2);
31476 }
31477
31478 output_pubtables ();
31479
31480 /* Output the address range information if a CU (.debug_info section)
31481 was emitted. We output an empty table even if we had no functions
31482 to put in it. This because the consumer has no way to tell the
31483 difference between an empty table that we omitted and failure to
31484 generate a table that would have contained data. */
31485 if (info_section_emitted)
31486 {
31487 switch_to_section (debug_aranges_section);
31488 output_aranges ();
31489 }
31490
31491 /* Output ranges section if necessary. */
31492 if (!vec_safe_is_empty (ranges_table))
31493 {
31494 if (dwarf_version >= 5)
31495 output_rnglists (generation);
31496 else
31497 output_ranges ();
31498 }
31499
31500 /* Have to end the macro section. */
31501 if (have_macinfo)
31502 {
31503 switch_to_section (debug_macinfo_section);
31504 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
31505 output_macinfo (!dwarf_split_debug_info ? debug_line_section_label
31506 : debug_skeleton_line_section_label, false);
31507 dw2_asm_output_data (1, 0, "End compilation unit");
31508 }
31509
31510 /* Output the source line correspondence table. We must do this
31511 even if there is no line information. Otherwise, on an empty
31512 translation unit, we will generate a present, but empty,
31513 .debug_info section. IRIX 6.5 `nm' will then complain when
31514 examining the file. This is done late so that any filenames
31515 used by the debug_info section are marked as 'used'. */
31516 switch_to_section (debug_line_section);
31517 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
31518 if (! output_asm_line_debug_info ())
31519 output_line_info (false);
31520
31521 if (dwarf_split_debug_info && info_section_emitted)
31522 {
31523 switch_to_section (debug_skeleton_line_section);
31524 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_line_section_label);
31525 output_line_info (true);
31526 }
31527
31528 /* If we emitted any indirect strings, output the string table too. */
31529 if (debug_str_hash || skeleton_debug_str_hash)
31530 output_indirect_strings ();
31531 if (debug_line_str_hash)
31532 {
31533 switch_to_section (debug_line_str_section);
31534 const enum dwarf_form form = DW_FORM_line_strp;
31535 debug_line_str_hash->traverse<enum dwarf_form,
31536 output_indirect_string> (form);
31537 }
31538
31539 /* ??? Move lvugid out of dwarf2out_source_line and reset it too? */
31540 symview_upper_bound = 0;
31541 if (zero_view_p)
31542 bitmap_clear (zero_view_p);
31543 }
31544
31545 /* Returns a hash value for X (which really is a variable_value_struct). */
31546
31547 inline hashval_t
31548 variable_value_hasher::hash (variable_value_struct *x)
31549 {
31550 return (hashval_t) x->decl_id;
31551 }
31552
31553 /* Return nonzero if decl_id of variable_value_struct X is the same as
31554 UID of decl Y. */
31555
31556 inline bool
31557 variable_value_hasher::equal (variable_value_struct *x, tree y)
31558 {
31559 return x->decl_id == DECL_UID (y);
31560 }
31561
31562 /* Helper function for resolve_variable_value, handle
31563 DW_OP_GNU_variable_value in one location expression.
31564 Return true if exprloc has been changed into loclist. */
31565
31566 static bool
31567 resolve_variable_value_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
31568 {
31569 dw_loc_descr_ref next;
31570 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = next)
31571 {
31572 next = loc->dw_loc_next;
31573 if (loc->dw_loc_opc != DW_OP_GNU_variable_value
31574 || loc->dw_loc_oprnd1.val_class != dw_val_class_decl_ref)
31575 continue;
31576
31577 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31578 if (DECL_CONTEXT (decl) != current_function_decl)
31579 continue;
31580
31581 dw_die_ref ref = lookup_decl_die (decl);
31582 if (ref)
31583 {
31584 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31585 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31586 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31587 continue;
31588 }
31589 dw_loc_list_ref l = loc_list_from_tree (decl, 0, NULL);
31590 if (l == NULL)
31591 continue;
31592 if (l->dw_loc_next)
31593 {
31594 if (AT_class (a) != dw_val_class_loc)
31595 continue;
31596 switch (a->dw_attr)
31597 {
31598 /* Following attributes allow both exprloc and loclist
31599 classes, so we can change them into a loclist. */
31600 case DW_AT_location:
31601 case DW_AT_string_length:
31602 case DW_AT_return_addr:
31603 case DW_AT_data_member_location:
31604 case DW_AT_frame_base:
31605 case DW_AT_segment:
31606 case DW_AT_static_link:
31607 case DW_AT_use_location:
31608 case DW_AT_vtable_elem_location:
31609 if (prev)
31610 {
31611 prev->dw_loc_next = NULL;
31612 prepend_loc_descr_to_each (l, AT_loc (a));
31613 }
31614 if (next)
31615 add_loc_descr_to_each (l, next);
31616 a->dw_attr_val.val_class = dw_val_class_loc_list;
31617 a->dw_attr_val.val_entry = NULL;
31618 a->dw_attr_val.v.val_loc_list = l;
31619 have_location_lists = true;
31620 return true;
31621 /* Following attributes allow both exprloc and reference,
31622 so if the whole expression is DW_OP_GNU_variable_value alone
31623 we could transform it into reference. */
31624 case DW_AT_byte_size:
31625 case DW_AT_bit_size:
31626 case DW_AT_lower_bound:
31627 case DW_AT_upper_bound:
31628 case DW_AT_bit_stride:
31629 case DW_AT_count:
31630 case DW_AT_allocated:
31631 case DW_AT_associated:
31632 case DW_AT_byte_stride:
31633 if (prev == NULL && next == NULL)
31634 break;
31635 /* FALLTHRU */
31636 default:
31637 if (dwarf_strict)
31638 continue;
31639 break;
31640 }
31641 /* Create DW_TAG_variable that we can refer to. */
31642 gen_decl_die (decl, NULL_TREE, NULL,
31643 lookup_decl_die (current_function_decl));
31644 ref = lookup_decl_die (decl);
31645 if (ref)
31646 {
31647 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31648 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31649 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31650 }
31651 continue;
31652 }
31653 if (prev)
31654 {
31655 prev->dw_loc_next = l->expr;
31656 add_loc_descr (&prev->dw_loc_next, next);
31657 free_loc_descr (loc, NULL);
31658 next = prev->dw_loc_next;
31659 }
31660 else
31661 {
31662 memcpy (loc, l->expr, sizeof (dw_loc_descr_node));
31663 add_loc_descr (&loc, next);
31664 next = loc;
31665 }
31666 loc = prev;
31667 }
31668 return false;
31669 }
31670
31671 /* Attempt to resolve DW_OP_GNU_variable_value using loc_list_from_tree. */
31672
31673 static void
31674 resolve_variable_value (dw_die_ref die)
31675 {
31676 dw_attr_node *a;
31677 dw_loc_list_ref loc;
31678 unsigned ix;
31679
31680 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31681 switch (AT_class (a))
31682 {
31683 case dw_val_class_loc:
31684 if (!resolve_variable_value_in_expr (a, AT_loc (a)))
31685 break;
31686 /* FALLTHRU */
31687 case dw_val_class_loc_list:
31688 loc = AT_loc_list (a);
31689 gcc_assert (loc);
31690 for (; loc; loc = loc->dw_loc_next)
31691 resolve_variable_value_in_expr (a, loc->expr);
31692 break;
31693 default:
31694 break;
31695 }
31696 }
31697
31698 /* Attempt to optimize DW_OP_GNU_variable_value refering to
31699 temporaries in the current function. */
31700
31701 static void
31702 resolve_variable_values (void)
31703 {
31704 if (!variable_value_hash || !current_function_decl)
31705 return;
31706
31707 struct variable_value_struct *node
31708 = variable_value_hash->find_with_hash (current_function_decl,
31709 DECL_UID (current_function_decl));
31710
31711 if (node == NULL)
31712 return;
31713
31714 unsigned int i;
31715 dw_die_ref die;
31716 FOR_EACH_VEC_SAFE_ELT (node->dies, i, die)
31717 resolve_variable_value (die);
31718 }
31719
31720 /* Helper function for note_variable_value, handle one location
31721 expression. */
31722
31723 static void
31724 note_variable_value_in_expr (dw_die_ref die, dw_loc_descr_ref loc)
31725 {
31726 for (; loc; loc = loc->dw_loc_next)
31727 if (loc->dw_loc_opc == DW_OP_GNU_variable_value
31728 && loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
31729 {
31730 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31731 dw_die_ref ref = lookup_decl_die (decl);
31732 if (! ref && (flag_generate_lto || flag_generate_offload))
31733 {
31734 /* ??? This is somewhat a hack because we do not create DIEs
31735 for variables not in BLOCK trees early but when generating
31736 early LTO output we need the dw_val_class_decl_ref to be
31737 fully resolved. For fat LTO objects we'd also like to
31738 undo this after LTO dwarf output. */
31739 gcc_assert (DECL_CONTEXT (decl));
31740 dw_die_ref ctx = lookup_decl_die (DECL_CONTEXT (decl));
31741 gcc_assert (ctx != NULL);
31742 gen_decl_die (decl, NULL_TREE, NULL, ctx);
31743 ref = lookup_decl_die (decl);
31744 gcc_assert (ref != NULL);
31745 }
31746 if (ref)
31747 {
31748 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31749 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31750 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31751 continue;
31752 }
31753 if (VAR_P (decl)
31754 && DECL_CONTEXT (decl)
31755 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
31756 && lookup_decl_die (DECL_CONTEXT (decl)))
31757 {
31758 if (!variable_value_hash)
31759 variable_value_hash
31760 = hash_table<variable_value_hasher>::create_ggc (10);
31761
31762 tree fndecl = DECL_CONTEXT (decl);
31763 struct variable_value_struct *node;
31764 struct variable_value_struct **slot
31765 = variable_value_hash->find_slot_with_hash (fndecl,
31766 DECL_UID (fndecl),
31767 INSERT);
31768 if (*slot == NULL)
31769 {
31770 node = ggc_cleared_alloc<variable_value_struct> ();
31771 node->decl_id = DECL_UID (fndecl);
31772 *slot = node;
31773 }
31774 else
31775 node = *slot;
31776
31777 vec_safe_push (node->dies, die);
31778 }
31779 }
31780 }
31781
31782 /* Walk the tree DIE and note DIEs with DW_OP_GNU_variable_value still
31783 with dw_val_class_decl_ref operand. */
31784
31785 static void
31786 note_variable_value (dw_die_ref die)
31787 {
31788 dw_die_ref c;
31789 dw_attr_node *a;
31790 dw_loc_list_ref loc;
31791 unsigned ix;
31792
31793 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31794 switch (AT_class (a))
31795 {
31796 case dw_val_class_loc_list:
31797 loc = AT_loc_list (a);
31798 gcc_assert (loc);
31799 if (!loc->noted_variable_value)
31800 {
31801 loc->noted_variable_value = 1;
31802 for (; loc; loc = loc->dw_loc_next)
31803 note_variable_value_in_expr (die, loc->expr);
31804 }
31805 break;
31806 case dw_val_class_loc:
31807 note_variable_value_in_expr (die, AT_loc (a));
31808 break;
31809 default:
31810 break;
31811 }
31812
31813 /* Mark children. */
31814 FOR_EACH_CHILD (die, c, note_variable_value (c));
31815 }
31816
31817 /* Perform any cleanups needed after the early debug generation pass
31818 has run. */
31819
31820 static void
31821 dwarf2out_early_finish (const char *filename)
31822 {
31823 set_early_dwarf s;
31824 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
31825
31826 /* PCH might result in DW_AT_producer string being restored from the
31827 header compilation, so always fill it with empty string initially
31828 and overwrite only here. */
31829 dw_attr_node *producer = get_AT (comp_unit_die (), DW_AT_producer);
31830 producer_string = gen_producer_string ();
31831 producer->dw_attr_val.v.val_str->refcount--;
31832 producer->dw_attr_val.v.val_str = find_AT_string (producer_string);
31833
31834 /* Add the name for the main input file now. We delayed this from
31835 dwarf2out_init to avoid complications with PCH. */
31836 add_name_attribute (comp_unit_die (), remap_debug_filename (filename));
31837 add_comp_dir_attribute (comp_unit_die ());
31838
31839 /* When emitting DWARF5 .debug_line_str, move DW_AT_name and
31840 DW_AT_comp_dir into .debug_line_str section. */
31841 if (!output_asm_line_debug_info ()
31842 && dwarf_version >= 5
31843 && DWARF5_USE_DEBUG_LINE_STR)
31844 {
31845 for (int i = 0; i < 2; i++)
31846 {
31847 dw_attr_node *a = get_AT (comp_unit_die (),
31848 i ? DW_AT_comp_dir : DW_AT_name);
31849 if (a == NULL
31850 || AT_class (a) != dw_val_class_str
31851 || strlen (AT_string (a)) + 1 <= DWARF_OFFSET_SIZE)
31852 continue;
31853
31854 if (! debug_line_str_hash)
31855 debug_line_str_hash
31856 = hash_table<indirect_string_hasher>::create_ggc (10);
31857
31858 struct indirect_string_node *node
31859 = find_AT_string_in_table (AT_string (a), debug_line_str_hash);
31860 set_indirect_string (node);
31861 node->form = DW_FORM_line_strp;
31862 a->dw_attr_val.v.val_str->refcount--;
31863 a->dw_attr_val.v.val_str = node;
31864 }
31865 }
31866
31867 /* With LTO early dwarf was really finished at compile-time, so make
31868 sure to adjust the phase after annotating the LTRANS CU DIE. */
31869 if (in_lto_p)
31870 {
31871 /* Force DW_TAG_imported_unit to be created now, otherwise
31872 we might end up without it or ordered after DW_TAG_inlined_subroutine
31873 referencing DIEs from it. */
31874 if (! flag_wpa && flag_incremental_link != INCREMENTAL_LINK_LTO)
31875 {
31876 unsigned i;
31877 tree tu;
31878 if (external_die_map)
31879 FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, tu)
31880 if (sym_off_pair *desc = external_die_map->get (tu))
31881 {
31882 dw_die_ref import = new_die (DW_TAG_imported_unit,
31883 comp_unit_die (), NULL_TREE);
31884 add_AT_external_die_ref (import, DW_AT_import,
31885 desc->sym, desc->off);
31886 }
31887 }
31888
31889 early_dwarf_finished = true;
31890 if (dump_file)
31891 {
31892 fprintf (dump_file, "LTO EARLY DWARF for %s\n", filename);
31893 print_die (comp_unit_die (), dump_file);
31894 }
31895 return;
31896 }
31897
31898 /* Walk through the list of incomplete types again, trying once more to
31899 emit full debugging info for them. */
31900 retry_incomplete_types ();
31901
31902 /* The point here is to flush out the limbo list so that it is empty
31903 and we don't need to stream it for LTO. */
31904 flush_limbo_die_list ();
31905
31906 gen_scheduled_generic_parms_dies ();
31907 gen_remaining_tmpl_value_param_die_attribute ();
31908
31909 /* Add DW_AT_linkage_name for all deferred DIEs. */
31910 for (limbo_die_node *node = deferred_asm_name; node; node = node->next)
31911 {
31912 tree decl = node->created_for;
31913 if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl)
31914 /* A missing DECL_ASSEMBLER_NAME can be a constant DIE that
31915 ended up in deferred_asm_name before we knew it was
31916 constant and never written to disk. */
31917 && DECL_ASSEMBLER_NAME (decl))
31918 {
31919 add_linkage_attr (node->die, decl);
31920 move_linkage_attr (node->die);
31921 }
31922 }
31923 deferred_asm_name = NULL;
31924
31925 if (flag_eliminate_unused_debug_types)
31926 prune_unused_types ();
31927
31928 /* Generate separate COMDAT sections for type DIEs. */
31929 if (use_debug_types)
31930 {
31931 break_out_comdat_types (comp_unit_die ());
31932
31933 /* Each new type_unit DIE was added to the limbo die list when created.
31934 Since these have all been added to comdat_type_list, clear the
31935 limbo die list. */
31936 limbo_die_list = NULL;
31937
31938 /* For each new comdat type unit, copy declarations for incomplete
31939 types to make the new unit self-contained (i.e., no direct
31940 references to the main compile unit). */
31941 for (comdat_type_node *ctnode = comdat_type_list;
31942 ctnode != NULL; ctnode = ctnode->next)
31943 copy_decls_for_unworthy_types (ctnode->root_die);
31944 copy_decls_for_unworthy_types (comp_unit_die ());
31945
31946 /* In the process of copying declarations from one unit to another,
31947 we may have left some declarations behind that are no longer
31948 referenced. Prune them. */
31949 prune_unused_types ();
31950 }
31951
31952 /* Traverse the DIE's and note DIEs with DW_OP_GNU_variable_value still
31953 with dw_val_class_decl_ref operand. */
31954 note_variable_value (comp_unit_die ());
31955 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31956 note_variable_value (node->die);
31957 for (comdat_type_node *ctnode = comdat_type_list; ctnode != NULL;
31958 ctnode = ctnode->next)
31959 note_variable_value (ctnode->root_die);
31960 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
31961 note_variable_value (node->die);
31962
31963 /* The AT_pubnames attribute needs to go in all skeleton dies, including
31964 both the main_cu and all skeleton TUs. Making this call unconditional
31965 would end up either adding a second copy of the AT_pubnames attribute, or
31966 requiring a special case in add_top_level_skeleton_die_attrs. */
31967 if (!dwarf_split_debug_info)
31968 add_AT_pubnames (comp_unit_die ());
31969
31970 /* The early debug phase is now finished. */
31971 early_dwarf_finished = true;
31972 if (dump_file)
31973 {
31974 fprintf (dump_file, "EARLY DWARF for %s\n", filename);
31975 print_die (comp_unit_die (), dump_file);
31976 }
31977
31978 /* Do not generate DWARF assembler now when not producing LTO bytecode. */
31979 if ((!flag_generate_lto && !flag_generate_offload)
31980 /* FIXME: Disable debug info generation for (PE-)COFF targets since the
31981 copy_lto_debug_sections operation of the simple object support in
31982 libiberty is not implemented for them yet. */
31983 || TARGET_PECOFF || TARGET_COFF)
31984 return;
31985
31986 /* Now as we are going to output for LTO initialize sections and labels
31987 to the LTO variants. We don't need a random-seed postfix as other
31988 LTO sections as linking the LTO debug sections into one in a partial
31989 link is fine. */
31990 init_sections_and_labels (true);
31991
31992 /* The output below is modeled after dwarf2out_finish with all
31993 location related output removed and some LTO specific changes.
31994 Some refactoring might make both smaller and easier to match up. */
31995
31996 /* Traverse the DIE's and add add sibling attributes to those DIE's
31997 that have children. */
31998 add_sibling_attributes (comp_unit_die ());
31999 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
32000 add_sibling_attributes (node->die);
32001 for (comdat_type_node *ctnode = comdat_type_list;
32002 ctnode != NULL; ctnode = ctnode->next)
32003 add_sibling_attributes (ctnode->root_die);
32004
32005 /* AIX Assembler inserts the length, so adjust the reference to match the
32006 offset expected by debuggers. */
32007 strcpy (dl_section_ref, debug_line_section_label);
32008 if (XCOFF_DEBUGGING_INFO)
32009 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
32010
32011 if (debug_info_level >= DINFO_LEVEL_TERSE)
32012 add_AT_lineptr (comp_unit_die (), DW_AT_stmt_list, dl_section_ref);
32013
32014 if (have_macinfo)
32015 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
32016 macinfo_section_label);
32017
32018 save_macinfo_strings ();
32019
32020 if (dwarf_split_debug_info)
32021 {
32022 unsigned int index = 0;
32023 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
32024 }
32025
32026 /* Output all of the compilation units. We put the main one last so that
32027 the offsets are available to output_pubnames. */
32028 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
32029 output_comp_unit (node->die, 0, NULL);
32030
32031 hash_table<comdat_type_hasher> comdat_type_table (100);
32032 for (comdat_type_node *ctnode = comdat_type_list;
32033 ctnode != NULL; ctnode = ctnode->next)
32034 {
32035 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
32036
32037 /* Don't output duplicate types. */
32038 if (*slot != HTAB_EMPTY_ENTRY)
32039 continue;
32040
32041 /* Add a pointer to the line table for the main compilation unit
32042 so that the debugger can make sense of DW_AT_decl_file
32043 attributes. */
32044 if (debug_info_level >= DINFO_LEVEL_TERSE)
32045 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
32046 (!dwarf_split_debug_info
32047 ? debug_line_section_label
32048 : debug_skeleton_line_section_label));
32049
32050 output_comdat_type_unit (ctnode);
32051 *slot = ctnode;
32052 }
32053
32054 /* Stick a unique symbol to the main debuginfo section. */
32055 compute_comp_unit_symbol (comp_unit_die ());
32056
32057 /* Output the main compilation unit. We always need it if only for
32058 the CU symbol. */
32059 output_comp_unit (comp_unit_die (), true, NULL);
32060
32061 /* Output the abbreviation table. */
32062 if (vec_safe_length (abbrev_die_table) != 1)
32063 {
32064 switch_to_section (debug_abbrev_section);
32065 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
32066 output_abbrev_section ();
32067 }
32068
32069 /* Have to end the macro section. */
32070 if (have_macinfo)
32071 {
32072 /* We have to save macinfo state if we need to output it again
32073 for the FAT part of the object. */
32074 vec<macinfo_entry, va_gc> *saved_macinfo_table = macinfo_table;
32075 if (flag_fat_lto_objects)
32076 macinfo_table = macinfo_table->copy ();
32077
32078 switch_to_section (debug_macinfo_section);
32079 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
32080 output_macinfo (debug_line_section_label, true);
32081 dw2_asm_output_data (1, 0, "End compilation unit");
32082
32083 if (flag_fat_lto_objects)
32084 {
32085 vec_free (macinfo_table);
32086 macinfo_table = saved_macinfo_table;
32087 }
32088 }
32089
32090 /* Emit a skeleton debug_line section. */
32091 switch_to_section (debug_line_section);
32092 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
32093 output_line_info (true);
32094
32095 /* If we emitted any indirect strings, output the string table too. */
32096 if (debug_str_hash || skeleton_debug_str_hash)
32097 output_indirect_strings ();
32098 if (debug_line_str_hash)
32099 {
32100 switch_to_section (debug_line_str_section);
32101 const enum dwarf_form form = DW_FORM_line_strp;
32102 debug_line_str_hash->traverse<enum dwarf_form,
32103 output_indirect_string> (form);
32104 }
32105
32106 /* Switch back to the text section. */
32107 switch_to_section (text_section);
32108 }
32109
32110 /* Reset all state within dwarf2out.c so that we can rerun the compiler
32111 within the same process. For use by toplev::finalize. */
32112
32113 void
32114 dwarf2out_c_finalize (void)
32115 {
32116 last_var_location_insn = NULL;
32117 cached_next_real_insn = NULL;
32118 used_rtx_array = NULL;
32119 incomplete_types = NULL;
32120 debug_info_section = NULL;
32121 debug_skeleton_info_section = NULL;
32122 debug_abbrev_section = NULL;
32123 debug_skeleton_abbrev_section = NULL;
32124 debug_aranges_section = NULL;
32125 debug_addr_section = NULL;
32126 debug_macinfo_section = NULL;
32127 debug_line_section = NULL;
32128 debug_skeleton_line_section = NULL;
32129 debug_loc_section = NULL;
32130 debug_pubnames_section = NULL;
32131 debug_pubtypes_section = NULL;
32132 debug_str_section = NULL;
32133 debug_line_str_section = NULL;
32134 debug_str_dwo_section = NULL;
32135 debug_str_offsets_section = NULL;
32136 debug_ranges_section = NULL;
32137 debug_frame_section = NULL;
32138 fde_vec = NULL;
32139 debug_str_hash = NULL;
32140 debug_line_str_hash = NULL;
32141 skeleton_debug_str_hash = NULL;
32142 dw2_string_counter = 0;
32143 have_multiple_function_sections = false;
32144 text_section_used = false;
32145 cold_text_section_used = false;
32146 cold_text_section = NULL;
32147 current_unit_personality = NULL;
32148
32149 early_dwarf = false;
32150 early_dwarf_finished = false;
32151
32152 next_die_offset = 0;
32153 single_comp_unit_die = NULL;
32154 comdat_type_list = NULL;
32155 limbo_die_list = NULL;
32156 file_table = NULL;
32157 decl_die_table = NULL;
32158 common_block_die_table = NULL;
32159 decl_loc_table = NULL;
32160 call_arg_locations = NULL;
32161 call_arg_loc_last = NULL;
32162 call_site_count = -1;
32163 tail_call_site_count = -1;
32164 cached_dw_loc_list_table = NULL;
32165 abbrev_die_table = NULL;
32166 delete dwarf_proc_stack_usage_map;
32167 dwarf_proc_stack_usage_map = NULL;
32168 line_info_label_num = 0;
32169 cur_line_info_table = NULL;
32170 text_section_line_info = NULL;
32171 cold_text_section_line_info = NULL;
32172 separate_line_info = NULL;
32173 info_section_emitted = false;
32174 pubname_table = NULL;
32175 pubtype_table = NULL;
32176 macinfo_table = NULL;
32177 ranges_table = NULL;
32178 ranges_by_label = NULL;
32179 rnglist_idx = 0;
32180 have_location_lists = false;
32181 loclabel_num = 0;
32182 poc_label_num = 0;
32183 last_emitted_file = NULL;
32184 label_num = 0;
32185 tmpl_value_parm_die_table = NULL;
32186 generic_type_instances = NULL;
32187 frame_pointer_fb_offset = 0;
32188 frame_pointer_fb_offset_valid = false;
32189 base_types.release ();
32190 XDELETEVEC (producer_string);
32191 producer_string = NULL;
32192 }
32193
32194 #include "gt-dwarf2out.h"
This page took 1.412556 seconds and 6 git commands to generate.