]> gcc.gnu.org Git - gcc.git/blob - gcc/dwarf2out.c
[AArch64] Implement usadv16qi and ssadv16qi standard names
[gcc.git] / gcc / dwarf2out.c
1 /* Output Dwarf2 format symbol table information from GCC.
2 Copyright (C) 1992-2018 Free Software Foundation, Inc.
3 Contributed by Gary Funck (gary@intrepid.com).
4 Derived from DWARF 1 implementation of Ron Guilmette (rfg@monkeys.com).
5 Extensively modified by Jason Merrill (jason@cygnus.com).
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* TODO: Emit .debug_line header even when there are no functions, since
24 the file numbers are used by .debug_info. Alternately, leave
25 out locations for types and decls.
26 Avoid talking about ctors and op= for PODs.
27 Factor out common prologue sequences into multiple CIEs. */
28
29 /* The first part of this file deals with the DWARF 2 frame unwind
30 information, which is also used by the GCC efficient exception handling
31 mechanism. The second part, controlled only by an #ifdef
32 DWARF2_DEBUGGING_INFO, deals with the other DWARF 2 debugging
33 information. */
34
35 /* DWARF2 Abbreviation Glossary:
36
37 CFA = Canonical Frame Address
38 a fixed address on the stack which identifies a call frame.
39 We define it to be the value of SP just before the call insn.
40 The CFA register and offset, which may change during the course
41 of the function, are used to calculate its value at runtime.
42
43 CFI = Call Frame Instruction
44 an instruction for the DWARF2 abstract machine
45
46 CIE = Common Information Entry
47 information describing information common to one or more FDEs
48
49 DIE = Debugging Information Entry
50
51 FDE = Frame Description Entry
52 information describing the stack call frame, in particular,
53 how to restore registers
54
55 DW_CFA_... = DWARF2 CFA call frame instruction
56 DW_TAG_... = DWARF2 DIE tag */
57
58 #include "config.h"
59 #include "system.h"
60 #include "coretypes.h"
61 #include "target.h"
62 #include "function.h"
63 #include "rtl.h"
64 #include "tree.h"
65 #include "memmodel.h"
66 #include "tm_p.h"
67 #include "stringpool.h"
68 #include "insn-config.h"
69 #include "ira.h"
70 #include "cgraph.h"
71 #include "diagnostic.h"
72 #include "fold-const.h"
73 #include "stor-layout.h"
74 #include "varasm.h"
75 #include "version.h"
76 #include "flags.h"
77 #include "rtlhash.h"
78 #include "reload.h"
79 #include "output.h"
80 #include "expr.h"
81 #include "dwarf2out.h"
82 #include "dwarf2asm.h"
83 #include "toplev.h"
84 #include "md5.h"
85 #include "tree-pretty-print.h"
86 #include "print-rtl.h"
87 #include "debug.h"
88 #include "common/common-target.h"
89 #include "langhooks.h"
90 #include "lra.h"
91 #include "dumpfile.h"
92 #include "opts.h"
93 #include "tree-dfa.h"
94 #include "gdb/gdb-index.h"
95 #include "rtl-iter.h"
96 #include "stringpool.h"
97 #include "attribs.h"
98 #include "file-prefix-map.h" /* remap_debug_filename() */
99
100 static void dwarf2out_source_line (unsigned int, unsigned int, const char *,
101 int, bool);
102 static rtx_insn *last_var_location_insn;
103 static rtx_insn *cached_next_real_insn;
104 static void dwarf2out_decl (tree);
105
106 #ifndef XCOFF_DEBUGGING_INFO
107 #define XCOFF_DEBUGGING_INFO 0
108 #endif
109
110 #ifndef HAVE_XCOFF_DWARF_EXTRAS
111 #define HAVE_XCOFF_DWARF_EXTRAS 0
112 #endif
113
114 #ifdef VMS_DEBUGGING_INFO
115 int vms_file_stats_name (const char *, long long *, long *, char *, int *);
116
117 /* Define this macro to be a nonzero value if the directory specifications
118 which are output in the debug info should end with a separator. */
119 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 1
120 /* Define this macro to evaluate to a nonzero value if GCC should refrain
121 from generating indirect strings in DWARF2 debug information, for instance
122 if your target is stuck with an old version of GDB that is unable to
123 process them properly or uses VMS Debug. */
124 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 1
125 #else
126 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 0
127 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 0
128 #endif
129
130 /* ??? Poison these here until it can be done generically. They've been
131 totally replaced in this file; make sure it stays that way. */
132 #undef DWARF2_UNWIND_INFO
133 #undef DWARF2_FRAME_INFO
134 #if (GCC_VERSION >= 3000)
135 #pragma GCC poison DWARF2_UNWIND_INFO DWARF2_FRAME_INFO
136 #endif
137
138 /* The size of the target's pointer type. */
139 #ifndef PTR_SIZE
140 #define PTR_SIZE (POINTER_SIZE / BITS_PER_UNIT)
141 #endif
142
143 /* Array of RTXes referenced by the debugging information, which therefore
144 must be kept around forever. */
145 static GTY(()) vec<rtx, va_gc> *used_rtx_array;
146
147 /* A pointer to the base of a list of incomplete types which might be
148 completed at some later time. incomplete_types_list needs to be a
149 vec<tree, va_gc> *because we want to tell the garbage collector about
150 it. */
151 static GTY(()) vec<tree, va_gc> *incomplete_types;
152
153 /* A pointer to the base of a table of references to declaration
154 scopes. This table is a display which tracks the nesting
155 of declaration scopes at the current scope and containing
156 scopes. This table is used to find the proper place to
157 define type declaration DIE's. */
158 static GTY(()) vec<tree, va_gc> *decl_scope_table;
159
160 /* Pointers to various DWARF2 sections. */
161 static GTY(()) section *debug_info_section;
162 static GTY(()) section *debug_skeleton_info_section;
163 static GTY(()) section *debug_abbrev_section;
164 static GTY(()) section *debug_skeleton_abbrev_section;
165 static GTY(()) section *debug_aranges_section;
166 static GTY(()) section *debug_addr_section;
167 static GTY(()) section *debug_macinfo_section;
168 static const char *debug_macinfo_section_name;
169 static unsigned macinfo_label_base = 1;
170 static GTY(()) section *debug_line_section;
171 static GTY(()) section *debug_skeleton_line_section;
172 static GTY(()) section *debug_loc_section;
173 static GTY(()) section *debug_pubnames_section;
174 static GTY(()) section *debug_pubtypes_section;
175 static GTY(()) section *debug_str_section;
176 static GTY(()) section *debug_line_str_section;
177 static GTY(()) section *debug_str_dwo_section;
178 static GTY(()) section *debug_str_offsets_section;
179 static GTY(()) section *debug_ranges_section;
180 static GTY(()) section *debug_frame_section;
181
182 /* Maximum size (in bytes) of an artificially generated label. */
183 #define MAX_ARTIFICIAL_LABEL_BYTES 40
184
185 /* According to the (draft) DWARF 3 specification, the initial length
186 should either be 4 or 12 bytes. When it's 12 bytes, the first 4
187 bytes are 0xffffffff, followed by the length stored in the next 8
188 bytes.
189
190 However, the SGI/MIPS ABI uses an initial length which is equal to
191 DWARF_OFFSET_SIZE. It is defined (elsewhere) accordingly. */
192
193 #ifndef DWARF_INITIAL_LENGTH_SIZE
194 #define DWARF_INITIAL_LENGTH_SIZE (DWARF_OFFSET_SIZE == 4 ? 4 : 12)
195 #endif
196
197 #ifndef DWARF_INITIAL_LENGTH_SIZE_STR
198 #define DWARF_INITIAL_LENGTH_SIZE_STR (DWARF_OFFSET_SIZE == 4 ? "-4" : "-12")
199 #endif
200
201 /* Round SIZE up to the nearest BOUNDARY. */
202 #define DWARF_ROUND(SIZE,BOUNDARY) \
203 ((((SIZE) + (BOUNDARY) - 1) / (BOUNDARY)) * (BOUNDARY))
204
205 /* CIE identifier. */
206 #if HOST_BITS_PER_WIDE_INT >= 64
207 #define DWARF_CIE_ID \
208 (unsigned HOST_WIDE_INT) (DWARF_OFFSET_SIZE == 4 ? DW_CIE_ID : DW64_CIE_ID)
209 #else
210 #define DWARF_CIE_ID DW_CIE_ID
211 #endif
212
213
214 /* A vector for a table that contains frame description
215 information for each routine. */
216 #define NOT_INDEXED (-1U)
217 #define NO_INDEX_ASSIGNED (-2U)
218
219 static GTY(()) vec<dw_fde_ref, va_gc> *fde_vec;
220
221 struct GTY((for_user)) indirect_string_node {
222 const char *str;
223 unsigned int refcount;
224 enum dwarf_form form;
225 char *label;
226 unsigned int index;
227 };
228
229 struct indirect_string_hasher : ggc_ptr_hash<indirect_string_node>
230 {
231 typedef const char *compare_type;
232
233 static hashval_t hash (indirect_string_node *);
234 static bool equal (indirect_string_node *, const char *);
235 };
236
237 static GTY (()) hash_table<indirect_string_hasher> *debug_str_hash;
238
239 static GTY (()) hash_table<indirect_string_hasher> *debug_line_str_hash;
240
241 /* With split_debug_info, both the comp_dir and dwo_name go in the
242 main object file, rather than the dwo, similar to the force_direct
243 parameter elsewhere but with additional complications:
244
245 1) The string is needed in both the main object file and the dwo.
246 That is, the comp_dir and dwo_name will appear in both places.
247
248 2) Strings can use four forms: DW_FORM_string, DW_FORM_strp,
249 DW_FORM_line_strp or DW_FORM_strx/GNU_str_index.
250
251 3) GCC chooses the form to use late, depending on the size and
252 reference count.
253
254 Rather than forcing the all debug string handling functions and
255 callers to deal with these complications, simply use a separate,
256 special-cased string table for any attribute that should go in the
257 main object file. This limits the complexity to just the places
258 that need it. */
259
260 static GTY (()) hash_table<indirect_string_hasher> *skeleton_debug_str_hash;
261
262 static GTY(()) int dw2_string_counter;
263
264 /* True if the compilation unit places functions in more than one section. */
265 static GTY(()) bool have_multiple_function_sections = false;
266
267 /* Whether the default text and cold text sections have been used at all. */
268 static GTY(()) bool text_section_used = false;
269 static GTY(()) bool cold_text_section_used = false;
270
271 /* The default cold text section. */
272 static GTY(()) section *cold_text_section;
273
274 /* The DIE for C++14 'auto' in a function return type. */
275 static GTY(()) dw_die_ref auto_die;
276
277 /* The DIE for C++14 'decltype(auto)' in a function return type. */
278 static GTY(()) dw_die_ref decltype_auto_die;
279
280 /* Forward declarations for functions defined in this file. */
281
282 static void output_call_frame_info (int);
283 static void dwarf2out_note_section_used (void);
284
285 /* Personality decl of current unit. Used only when assembler does not support
286 personality CFI. */
287 static GTY(()) rtx current_unit_personality;
288
289 /* Whether an eh_frame section is required. */
290 static GTY(()) bool do_eh_frame = false;
291
292 /* .debug_rnglists next index. */
293 static unsigned int rnglist_idx;
294
295 /* Data and reference forms for relocatable data. */
296 #define DW_FORM_data (DWARF_OFFSET_SIZE == 8 ? DW_FORM_data8 : DW_FORM_data4)
297 #define DW_FORM_ref (DWARF_OFFSET_SIZE == 8 ? DW_FORM_ref8 : DW_FORM_ref4)
298
299 #ifndef DEBUG_FRAME_SECTION
300 #define DEBUG_FRAME_SECTION ".debug_frame"
301 #endif
302
303 #ifndef FUNC_BEGIN_LABEL
304 #define FUNC_BEGIN_LABEL "LFB"
305 #endif
306
307 #ifndef FUNC_END_LABEL
308 #define FUNC_END_LABEL "LFE"
309 #endif
310
311 #ifndef PROLOGUE_END_LABEL
312 #define PROLOGUE_END_LABEL "LPE"
313 #endif
314
315 #ifndef EPILOGUE_BEGIN_LABEL
316 #define EPILOGUE_BEGIN_LABEL "LEB"
317 #endif
318
319 #ifndef FRAME_BEGIN_LABEL
320 #define FRAME_BEGIN_LABEL "Lframe"
321 #endif
322 #define CIE_AFTER_SIZE_LABEL "LSCIE"
323 #define CIE_END_LABEL "LECIE"
324 #define FDE_LABEL "LSFDE"
325 #define FDE_AFTER_SIZE_LABEL "LASFDE"
326 #define FDE_END_LABEL "LEFDE"
327 #define LINE_NUMBER_BEGIN_LABEL "LSLT"
328 #define LINE_NUMBER_END_LABEL "LELT"
329 #define LN_PROLOG_AS_LABEL "LASLTP"
330 #define LN_PROLOG_END_LABEL "LELTP"
331 #define DIE_LABEL_PREFIX "DW"
332 \f
333 /* Match the base name of a file to the base name of a compilation unit. */
334
335 static int
336 matches_main_base (const char *path)
337 {
338 /* Cache the last query. */
339 static const char *last_path = NULL;
340 static int last_match = 0;
341 if (path != last_path)
342 {
343 const char *base;
344 int length = base_of_path (path, &base);
345 last_path = path;
346 last_match = (length == main_input_baselength
347 && memcmp (base, main_input_basename, length) == 0);
348 }
349 return last_match;
350 }
351
352 #ifdef DEBUG_DEBUG_STRUCT
353
354 static int
355 dump_struct_debug (tree type, enum debug_info_usage usage,
356 enum debug_struct_file criterion, int generic,
357 int matches, int result)
358 {
359 /* Find the type name. */
360 tree type_decl = TYPE_STUB_DECL (type);
361 tree t = type_decl;
362 const char *name = 0;
363 if (TREE_CODE (t) == TYPE_DECL)
364 t = DECL_NAME (t);
365 if (t)
366 name = IDENTIFIER_POINTER (t);
367
368 fprintf (stderr, " struct %d %s %s %s %s %d %p %s\n",
369 criterion,
370 DECL_IN_SYSTEM_HEADER (type_decl) ? "sys" : "usr",
371 matches ? "bas" : "hdr",
372 generic ? "gen" : "ord",
373 usage == DINFO_USAGE_DFN ? ";" :
374 usage == DINFO_USAGE_DIR_USE ? "." : "*",
375 result,
376 (void*) type_decl, name);
377 return result;
378 }
379 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
380 dump_struct_debug (type, usage, criterion, generic, matches, result)
381
382 #else
383
384 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
385 (result)
386
387 #endif
388
389 /* Get the number of HOST_WIDE_INTs needed to represent the precision
390 of the number. Some constants have a large uniform precision, so
391 we get the precision needed for the actual value of the number. */
392
393 static unsigned int
394 get_full_len (const wide_int &op)
395 {
396 int prec = wi::min_precision (op, UNSIGNED);
397 return ((prec + HOST_BITS_PER_WIDE_INT - 1)
398 / HOST_BITS_PER_WIDE_INT);
399 }
400
401 static bool
402 should_emit_struct_debug (tree type, enum debug_info_usage usage)
403 {
404 enum debug_struct_file criterion;
405 tree type_decl;
406 bool generic = lang_hooks.types.generic_p (type);
407
408 if (generic)
409 criterion = debug_struct_generic[usage];
410 else
411 criterion = debug_struct_ordinary[usage];
412
413 if (criterion == DINFO_STRUCT_FILE_NONE)
414 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
415 if (criterion == DINFO_STRUCT_FILE_ANY)
416 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
417
418 type_decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (type));
419
420 if (type_decl != NULL)
421 {
422 if (criterion == DINFO_STRUCT_FILE_SYS && DECL_IN_SYSTEM_HEADER (type_decl))
423 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
424
425 if (matches_main_base (DECL_SOURCE_FILE (type_decl)))
426 return DUMP_GSTRUCT (type, usage, criterion, generic, true, true);
427 }
428
429 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
430 }
431 \f
432 /* Switch [BACK] to eh_frame_section. If we don't have an eh_frame_section,
433 switch to the data section instead, and write out a synthetic start label
434 for collect2 the first time around. */
435
436 static void
437 switch_to_eh_frame_section (bool back ATTRIBUTE_UNUSED)
438 {
439 if (eh_frame_section == 0)
440 {
441 int flags;
442
443 if (EH_TABLES_CAN_BE_READ_ONLY)
444 {
445 int fde_encoding;
446 int per_encoding;
447 int lsda_encoding;
448
449 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1,
450 /*global=*/0);
451 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2,
452 /*global=*/1);
453 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0,
454 /*global=*/0);
455 flags = ((! flag_pic
456 || ((fde_encoding & 0x70) != DW_EH_PE_absptr
457 && (fde_encoding & 0x70) != DW_EH_PE_aligned
458 && (per_encoding & 0x70) != DW_EH_PE_absptr
459 && (per_encoding & 0x70) != DW_EH_PE_aligned
460 && (lsda_encoding & 0x70) != DW_EH_PE_absptr
461 && (lsda_encoding & 0x70) != DW_EH_PE_aligned))
462 ? 0 : SECTION_WRITE);
463 }
464 else
465 flags = SECTION_WRITE;
466
467 #ifdef EH_FRAME_SECTION_NAME
468 eh_frame_section = get_section (EH_FRAME_SECTION_NAME, flags, NULL);
469 #else
470 eh_frame_section = ((flags == SECTION_WRITE)
471 ? data_section : readonly_data_section);
472 #endif /* EH_FRAME_SECTION_NAME */
473 }
474
475 switch_to_section (eh_frame_section);
476
477 #ifdef EH_FRAME_THROUGH_COLLECT2
478 /* We have no special eh_frame section. Emit special labels to guide
479 collect2. */
480 if (!back)
481 {
482 tree label = get_file_function_name ("F");
483 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
484 targetm.asm_out.globalize_label (asm_out_file,
485 IDENTIFIER_POINTER (label));
486 ASM_OUTPUT_LABEL (asm_out_file, IDENTIFIER_POINTER (label));
487 }
488 #endif
489 }
490
491 /* Switch [BACK] to the eh or debug frame table section, depending on
492 FOR_EH. */
493
494 static void
495 switch_to_frame_table_section (int for_eh, bool back)
496 {
497 if (for_eh)
498 switch_to_eh_frame_section (back);
499 else
500 {
501 if (!debug_frame_section)
502 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
503 SECTION_DEBUG, NULL);
504 switch_to_section (debug_frame_section);
505 }
506 }
507
508 /* Describe for the GTY machinery what parts of dw_cfi_oprnd1 are used. */
509
510 enum dw_cfi_oprnd_type
511 dw_cfi_oprnd1_desc (enum dwarf_call_frame_info cfi)
512 {
513 switch (cfi)
514 {
515 case DW_CFA_nop:
516 case DW_CFA_GNU_window_save:
517 case DW_CFA_remember_state:
518 case DW_CFA_restore_state:
519 return dw_cfi_oprnd_unused;
520
521 case DW_CFA_set_loc:
522 case DW_CFA_advance_loc1:
523 case DW_CFA_advance_loc2:
524 case DW_CFA_advance_loc4:
525 case DW_CFA_MIPS_advance_loc8:
526 return dw_cfi_oprnd_addr;
527
528 case DW_CFA_offset:
529 case DW_CFA_offset_extended:
530 case DW_CFA_def_cfa:
531 case DW_CFA_offset_extended_sf:
532 case DW_CFA_def_cfa_sf:
533 case DW_CFA_restore:
534 case DW_CFA_restore_extended:
535 case DW_CFA_undefined:
536 case DW_CFA_same_value:
537 case DW_CFA_def_cfa_register:
538 case DW_CFA_register:
539 case DW_CFA_expression:
540 case DW_CFA_val_expression:
541 return dw_cfi_oprnd_reg_num;
542
543 case DW_CFA_def_cfa_offset:
544 case DW_CFA_GNU_args_size:
545 case DW_CFA_def_cfa_offset_sf:
546 return dw_cfi_oprnd_offset;
547
548 case DW_CFA_def_cfa_expression:
549 return dw_cfi_oprnd_loc;
550
551 default:
552 gcc_unreachable ();
553 }
554 }
555
556 /* Describe for the GTY machinery what parts of dw_cfi_oprnd2 are used. */
557
558 enum dw_cfi_oprnd_type
559 dw_cfi_oprnd2_desc (enum dwarf_call_frame_info cfi)
560 {
561 switch (cfi)
562 {
563 case DW_CFA_def_cfa:
564 case DW_CFA_def_cfa_sf:
565 case DW_CFA_offset:
566 case DW_CFA_offset_extended_sf:
567 case DW_CFA_offset_extended:
568 return dw_cfi_oprnd_offset;
569
570 case DW_CFA_register:
571 return dw_cfi_oprnd_reg_num;
572
573 case DW_CFA_expression:
574 case DW_CFA_val_expression:
575 return dw_cfi_oprnd_loc;
576
577 case DW_CFA_def_cfa_expression:
578 return dw_cfi_oprnd_cfa_loc;
579
580 default:
581 return dw_cfi_oprnd_unused;
582 }
583 }
584
585 /* Output one FDE. */
586
587 static void
588 output_fde (dw_fde_ref fde, bool for_eh, bool second,
589 char *section_start_label, int fde_encoding, char *augmentation,
590 bool any_lsda_needed, int lsda_encoding)
591 {
592 const char *begin, *end;
593 static unsigned int j;
594 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
595
596 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, for_eh,
597 /* empty */ 0);
598 targetm.asm_out.internal_label (asm_out_file, FDE_LABEL,
599 for_eh + j);
600 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_AFTER_SIZE_LABEL, for_eh + j);
601 ASM_GENERATE_INTERNAL_LABEL (l2, FDE_END_LABEL, for_eh + j);
602 if (!XCOFF_DEBUGGING_INFO || for_eh)
603 {
604 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
605 dw2_asm_output_data (4, 0xffffffff, "Initial length escape value"
606 " indicating 64-bit DWARF extension");
607 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
608 "FDE Length");
609 }
610 ASM_OUTPUT_LABEL (asm_out_file, l1);
611
612 if (for_eh)
613 dw2_asm_output_delta (4, l1, section_start_label, "FDE CIE offset");
614 else
615 dw2_asm_output_offset (DWARF_OFFSET_SIZE, section_start_label,
616 debug_frame_section, "FDE CIE offset");
617
618 begin = second ? fde->dw_fde_second_begin : fde->dw_fde_begin;
619 end = second ? fde->dw_fde_second_end : fde->dw_fde_end;
620
621 if (for_eh)
622 {
623 rtx sym_ref = gen_rtx_SYMBOL_REF (Pmode, begin);
624 SYMBOL_REF_FLAGS (sym_ref) |= SYMBOL_FLAG_LOCAL;
625 dw2_asm_output_encoded_addr_rtx (fde_encoding, sym_ref, false,
626 "FDE initial location");
627 dw2_asm_output_delta (size_of_encoded_value (fde_encoding),
628 end, begin, "FDE address range");
629 }
630 else
631 {
632 dw2_asm_output_addr (DWARF2_ADDR_SIZE, begin, "FDE initial location");
633 dw2_asm_output_delta (DWARF2_ADDR_SIZE, end, begin, "FDE address range");
634 }
635
636 if (augmentation[0])
637 {
638 if (any_lsda_needed)
639 {
640 int size = size_of_encoded_value (lsda_encoding);
641
642 if (lsda_encoding == DW_EH_PE_aligned)
643 {
644 int offset = ( 4 /* Length */
645 + 4 /* CIE offset */
646 + 2 * size_of_encoded_value (fde_encoding)
647 + 1 /* Augmentation size */ );
648 int pad = -offset & (PTR_SIZE - 1);
649
650 size += pad;
651 gcc_assert (size_of_uleb128 (size) == 1);
652 }
653
654 dw2_asm_output_data_uleb128 (size, "Augmentation size");
655
656 if (fde->uses_eh_lsda)
657 {
658 ASM_GENERATE_INTERNAL_LABEL (l1, second ? "LLSDAC" : "LLSDA",
659 fde->funcdef_number);
660 dw2_asm_output_encoded_addr_rtx (lsda_encoding,
661 gen_rtx_SYMBOL_REF (Pmode, l1),
662 false,
663 "Language Specific Data Area");
664 }
665 else
666 {
667 if (lsda_encoding == DW_EH_PE_aligned)
668 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
669 dw2_asm_output_data (size_of_encoded_value (lsda_encoding), 0,
670 "Language Specific Data Area (none)");
671 }
672 }
673 else
674 dw2_asm_output_data_uleb128 (0, "Augmentation size");
675 }
676
677 /* Loop through the Call Frame Instructions associated with this FDE. */
678 fde->dw_fde_current_label = begin;
679 {
680 size_t from, until, i;
681
682 from = 0;
683 until = vec_safe_length (fde->dw_fde_cfi);
684
685 if (fde->dw_fde_second_begin == NULL)
686 ;
687 else if (!second)
688 until = fde->dw_fde_switch_cfi_index;
689 else
690 from = fde->dw_fde_switch_cfi_index;
691
692 for (i = from; i < until; i++)
693 output_cfi ((*fde->dw_fde_cfi)[i], fde, for_eh);
694 }
695
696 /* If we are to emit a ref/link from function bodies to their frame tables,
697 do it now. This is typically performed to make sure that tables
698 associated with functions are dragged with them and not discarded in
699 garbage collecting links. We need to do this on a per function basis to
700 cope with -ffunction-sections. */
701
702 #ifdef ASM_OUTPUT_DWARF_TABLE_REF
703 /* Switch to the function section, emit the ref to the tables, and
704 switch *back* into the table section. */
705 switch_to_section (function_section (fde->decl));
706 ASM_OUTPUT_DWARF_TABLE_REF (section_start_label);
707 switch_to_frame_table_section (for_eh, true);
708 #endif
709
710 /* Pad the FDE out to an address sized boundary. */
711 ASM_OUTPUT_ALIGN (asm_out_file,
712 floor_log2 ((for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE)));
713 ASM_OUTPUT_LABEL (asm_out_file, l2);
714
715 j += 2;
716 }
717
718 /* Return true if frame description entry FDE is needed for EH. */
719
720 static bool
721 fde_needed_for_eh_p (dw_fde_ref fde)
722 {
723 if (flag_asynchronous_unwind_tables)
724 return true;
725
726 if (TARGET_USES_WEAK_UNWIND_INFO && DECL_WEAK (fde->decl))
727 return true;
728
729 if (fde->uses_eh_lsda)
730 return true;
731
732 /* If exceptions are enabled, we have collected nothrow info. */
733 if (flag_exceptions && (fde->all_throwers_are_sibcalls || fde->nothrow))
734 return false;
735
736 return true;
737 }
738
739 /* Output the call frame information used to record information
740 that relates to calculating the frame pointer, and records the
741 location of saved registers. */
742
743 static void
744 output_call_frame_info (int for_eh)
745 {
746 unsigned int i;
747 dw_fde_ref fde;
748 dw_cfi_ref cfi;
749 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
750 char section_start_label[MAX_ARTIFICIAL_LABEL_BYTES];
751 bool any_lsda_needed = false;
752 char augmentation[6];
753 int augmentation_size;
754 int fde_encoding = DW_EH_PE_absptr;
755 int per_encoding = DW_EH_PE_absptr;
756 int lsda_encoding = DW_EH_PE_absptr;
757 int return_reg;
758 rtx personality = NULL;
759 int dw_cie_version;
760
761 /* Don't emit a CIE if there won't be any FDEs. */
762 if (!fde_vec)
763 return;
764
765 /* Nothing to do if the assembler's doing it all. */
766 if (dwarf2out_do_cfi_asm ())
767 return;
768
769 /* If we don't have any functions we'll want to unwind out of, don't emit
770 any EH unwind information. If we make FDEs linkonce, we may have to
771 emit an empty label for an FDE that wouldn't otherwise be emitted. We
772 want to avoid having an FDE kept around when the function it refers to
773 is discarded. Example where this matters: a primary function template
774 in C++ requires EH information, an explicit specialization doesn't. */
775 if (for_eh)
776 {
777 bool any_eh_needed = false;
778
779 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
780 {
781 if (fde->uses_eh_lsda)
782 any_eh_needed = any_lsda_needed = true;
783 else if (fde_needed_for_eh_p (fde))
784 any_eh_needed = true;
785 else if (TARGET_USES_WEAK_UNWIND_INFO)
786 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, 1, 1);
787 }
788
789 if (!any_eh_needed)
790 return;
791 }
792
793 /* We're going to be generating comments, so turn on app. */
794 if (flag_debug_asm)
795 app_enable ();
796
797 /* Switch to the proper frame section, first time. */
798 switch_to_frame_table_section (for_eh, false);
799
800 ASM_GENERATE_INTERNAL_LABEL (section_start_label, FRAME_BEGIN_LABEL, for_eh);
801 ASM_OUTPUT_LABEL (asm_out_file, section_start_label);
802
803 /* Output the CIE. */
804 ASM_GENERATE_INTERNAL_LABEL (l1, CIE_AFTER_SIZE_LABEL, for_eh);
805 ASM_GENERATE_INTERNAL_LABEL (l2, CIE_END_LABEL, for_eh);
806 if (!XCOFF_DEBUGGING_INFO || for_eh)
807 {
808 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
809 dw2_asm_output_data (4, 0xffffffff,
810 "Initial length escape value indicating 64-bit DWARF extension");
811 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
812 "Length of Common Information Entry");
813 }
814 ASM_OUTPUT_LABEL (asm_out_file, l1);
815
816 /* Now that the CIE pointer is PC-relative for EH,
817 use 0 to identify the CIE. */
818 dw2_asm_output_data ((for_eh ? 4 : DWARF_OFFSET_SIZE),
819 (for_eh ? 0 : DWARF_CIE_ID),
820 "CIE Identifier Tag");
821
822 /* Use the CIE version 3 for DWARF3; allow DWARF2 to continue to
823 use CIE version 1, unless that would produce incorrect results
824 due to overflowing the return register column. */
825 return_reg = DWARF2_FRAME_REG_OUT (DWARF_FRAME_RETURN_COLUMN, for_eh);
826 dw_cie_version = 1;
827 if (return_reg >= 256 || dwarf_version > 2)
828 dw_cie_version = 3;
829 dw2_asm_output_data (1, dw_cie_version, "CIE Version");
830
831 augmentation[0] = 0;
832 augmentation_size = 0;
833
834 personality = current_unit_personality;
835 if (for_eh)
836 {
837 char *p;
838
839 /* Augmentation:
840 z Indicates that a uleb128 is present to size the
841 augmentation section.
842 L Indicates the encoding (and thus presence) of
843 an LSDA pointer in the FDE augmentation.
844 R Indicates a non-default pointer encoding for
845 FDE code pointers.
846 P Indicates the presence of an encoding + language
847 personality routine in the CIE augmentation. */
848
849 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1, /*global=*/0);
850 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
851 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
852
853 p = augmentation + 1;
854 if (personality)
855 {
856 *p++ = 'P';
857 augmentation_size += 1 + size_of_encoded_value (per_encoding);
858 assemble_external_libcall (personality);
859 }
860 if (any_lsda_needed)
861 {
862 *p++ = 'L';
863 augmentation_size += 1;
864 }
865 if (fde_encoding != DW_EH_PE_absptr)
866 {
867 *p++ = 'R';
868 augmentation_size += 1;
869 }
870 if (p > augmentation + 1)
871 {
872 augmentation[0] = 'z';
873 *p = '\0';
874 }
875
876 /* Ug. Some platforms can't do unaligned dynamic relocations at all. */
877 if (personality && per_encoding == DW_EH_PE_aligned)
878 {
879 int offset = ( 4 /* Length */
880 + 4 /* CIE Id */
881 + 1 /* CIE version */
882 + strlen (augmentation) + 1 /* Augmentation */
883 + size_of_uleb128 (1) /* Code alignment */
884 + size_of_sleb128 (DWARF_CIE_DATA_ALIGNMENT)
885 + 1 /* RA column */
886 + 1 /* Augmentation size */
887 + 1 /* Personality encoding */ );
888 int pad = -offset & (PTR_SIZE - 1);
889
890 augmentation_size += pad;
891
892 /* Augmentations should be small, so there's scarce need to
893 iterate for a solution. Die if we exceed one uleb128 byte. */
894 gcc_assert (size_of_uleb128 (augmentation_size) == 1);
895 }
896 }
897
898 dw2_asm_output_nstring (augmentation, -1, "CIE Augmentation");
899 if (dw_cie_version >= 4)
900 {
901 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "CIE Address Size");
902 dw2_asm_output_data (1, 0, "CIE Segment Size");
903 }
904 dw2_asm_output_data_uleb128 (1, "CIE Code Alignment Factor");
905 dw2_asm_output_data_sleb128 (DWARF_CIE_DATA_ALIGNMENT,
906 "CIE Data Alignment Factor");
907
908 if (dw_cie_version == 1)
909 dw2_asm_output_data (1, return_reg, "CIE RA Column");
910 else
911 dw2_asm_output_data_uleb128 (return_reg, "CIE RA Column");
912
913 if (augmentation[0])
914 {
915 dw2_asm_output_data_uleb128 (augmentation_size, "Augmentation size");
916 if (personality)
917 {
918 dw2_asm_output_data (1, per_encoding, "Personality (%s)",
919 eh_data_format_name (per_encoding));
920 dw2_asm_output_encoded_addr_rtx (per_encoding,
921 personality,
922 true, NULL);
923 }
924
925 if (any_lsda_needed)
926 dw2_asm_output_data (1, lsda_encoding, "LSDA Encoding (%s)",
927 eh_data_format_name (lsda_encoding));
928
929 if (fde_encoding != DW_EH_PE_absptr)
930 dw2_asm_output_data (1, fde_encoding, "FDE Encoding (%s)",
931 eh_data_format_name (fde_encoding));
932 }
933
934 FOR_EACH_VEC_ELT (*cie_cfi_vec, i, cfi)
935 output_cfi (cfi, NULL, for_eh);
936
937 /* Pad the CIE out to an address sized boundary. */
938 ASM_OUTPUT_ALIGN (asm_out_file,
939 floor_log2 (for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE));
940 ASM_OUTPUT_LABEL (asm_out_file, l2);
941
942 /* Loop through all of the FDE's. */
943 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
944 {
945 unsigned int k;
946
947 /* Don't emit EH unwind info for leaf functions that don't need it. */
948 if (for_eh && !fde_needed_for_eh_p (fde))
949 continue;
950
951 for (k = 0; k < (fde->dw_fde_second_begin ? 2 : 1); k++)
952 output_fde (fde, for_eh, k, section_start_label, fde_encoding,
953 augmentation, any_lsda_needed, lsda_encoding);
954 }
955
956 if (for_eh && targetm.terminate_dw2_eh_frame_info)
957 dw2_asm_output_data (4, 0, "End of Table");
958
959 /* Turn off app to make assembly quicker. */
960 if (flag_debug_asm)
961 app_disable ();
962 }
963
964 /* Emit .cfi_startproc and .cfi_personality/.cfi_lsda if needed. */
965
966 static void
967 dwarf2out_do_cfi_startproc (bool second)
968 {
969 int enc;
970 rtx ref;
971
972 fprintf (asm_out_file, "\t.cfi_startproc\n");
973
974 /* .cfi_personality and .cfi_lsda are only relevant to DWARF2
975 eh unwinders. */
976 if (targetm_common.except_unwind_info (&global_options) != UI_DWARF2)
977 return;
978
979 rtx personality = get_personality_function (current_function_decl);
980
981 if (personality)
982 {
983 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
984 ref = personality;
985
986 /* ??? The GAS support isn't entirely consistent. We have to
987 handle indirect support ourselves, but PC-relative is done
988 in the assembler. Further, the assembler can't handle any
989 of the weirder relocation types. */
990 if (enc & DW_EH_PE_indirect)
991 ref = dw2_force_const_mem (ref, true);
992
993 fprintf (asm_out_file, "\t.cfi_personality %#x,", enc);
994 output_addr_const (asm_out_file, ref);
995 fputc ('\n', asm_out_file);
996 }
997
998 if (crtl->uses_eh_lsda)
999 {
1000 char lab[MAX_ARTIFICIAL_LABEL_BYTES];
1001
1002 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
1003 ASM_GENERATE_INTERNAL_LABEL (lab, second ? "LLSDAC" : "LLSDA",
1004 current_function_funcdef_no);
1005 ref = gen_rtx_SYMBOL_REF (Pmode, lab);
1006 SYMBOL_REF_FLAGS (ref) = SYMBOL_FLAG_LOCAL;
1007
1008 if (enc & DW_EH_PE_indirect)
1009 ref = dw2_force_const_mem (ref, true);
1010
1011 fprintf (asm_out_file, "\t.cfi_lsda %#x,", enc);
1012 output_addr_const (asm_out_file, ref);
1013 fputc ('\n', asm_out_file);
1014 }
1015 }
1016
1017 /* Allocate CURRENT_FDE. Immediately initialize all we can, noting that
1018 this allocation may be done before pass_final. */
1019
1020 dw_fde_ref
1021 dwarf2out_alloc_current_fde (void)
1022 {
1023 dw_fde_ref fde;
1024
1025 fde = ggc_cleared_alloc<dw_fde_node> ();
1026 fde->decl = current_function_decl;
1027 fde->funcdef_number = current_function_funcdef_no;
1028 fde->fde_index = vec_safe_length (fde_vec);
1029 fde->all_throwers_are_sibcalls = crtl->all_throwers_are_sibcalls;
1030 fde->uses_eh_lsda = crtl->uses_eh_lsda;
1031 fde->nothrow = crtl->nothrow;
1032 fde->drap_reg = INVALID_REGNUM;
1033 fde->vdrap_reg = INVALID_REGNUM;
1034
1035 /* Record the FDE associated with this function. */
1036 cfun->fde = fde;
1037 vec_safe_push (fde_vec, fde);
1038
1039 return fde;
1040 }
1041
1042 /* Output a marker (i.e. a label) for the beginning of a function, before
1043 the prologue. */
1044
1045 void
1046 dwarf2out_begin_prologue (unsigned int line ATTRIBUTE_UNUSED,
1047 unsigned int column ATTRIBUTE_UNUSED,
1048 const char *file ATTRIBUTE_UNUSED)
1049 {
1050 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1051 char * dup_label;
1052 dw_fde_ref fde;
1053 section *fnsec;
1054 bool do_frame;
1055
1056 current_function_func_begin_label = NULL;
1057
1058 do_frame = dwarf2out_do_frame ();
1059
1060 /* ??? current_function_func_begin_label is also used by except.c for
1061 call-site information. We must emit this label if it might be used. */
1062 if (!do_frame
1063 && (!flag_exceptions
1064 || targetm_common.except_unwind_info (&global_options) == UI_SJLJ))
1065 return;
1066
1067 fnsec = function_section (current_function_decl);
1068 switch_to_section (fnsec);
1069 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_BEGIN_LABEL,
1070 current_function_funcdef_no);
1071 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, FUNC_BEGIN_LABEL,
1072 current_function_funcdef_no);
1073 dup_label = xstrdup (label);
1074 current_function_func_begin_label = dup_label;
1075
1076 /* We can elide FDE allocation if we're not emitting frame unwind info. */
1077 if (!do_frame)
1078 return;
1079
1080 /* Unlike the debug version, the EH version of frame unwind info is a per-
1081 function setting so we need to record whether we need it for the unit. */
1082 do_eh_frame |= dwarf2out_do_eh_frame ();
1083
1084 /* Cater to the various TARGET_ASM_OUTPUT_MI_THUNK implementations that
1085 emit insns as rtx but bypass the bulk of rest_of_compilation, which
1086 would include pass_dwarf2_frame. If we've not created the FDE yet,
1087 do so now. */
1088 fde = cfun->fde;
1089 if (fde == NULL)
1090 fde = dwarf2out_alloc_current_fde ();
1091
1092 /* Initialize the bits of CURRENT_FDE that were not available earlier. */
1093 fde->dw_fde_begin = dup_label;
1094 fde->dw_fde_current_label = dup_label;
1095 fde->in_std_section = (fnsec == text_section
1096 || (cold_text_section && fnsec == cold_text_section));
1097
1098 /* We only want to output line number information for the genuine dwarf2
1099 prologue case, not the eh frame case. */
1100 #ifdef DWARF2_DEBUGGING_INFO
1101 if (file)
1102 dwarf2out_source_line (line, column, file, 0, true);
1103 #endif
1104
1105 if (dwarf2out_do_cfi_asm ())
1106 dwarf2out_do_cfi_startproc (false);
1107 else
1108 {
1109 rtx personality = get_personality_function (current_function_decl);
1110 if (!current_unit_personality)
1111 current_unit_personality = personality;
1112
1113 /* We cannot keep a current personality per function as without CFI
1114 asm, at the point where we emit the CFI data, there is no current
1115 function anymore. */
1116 if (personality && current_unit_personality != personality)
1117 sorry ("multiple EH personalities are supported only with assemblers "
1118 "supporting .cfi_personality directive");
1119 }
1120 }
1121
1122 /* Output a marker (i.e. a label) for the end of the generated code
1123 for a function prologue. This gets called *after* the prologue code has
1124 been generated. */
1125
1126 void
1127 dwarf2out_vms_end_prologue (unsigned int line ATTRIBUTE_UNUSED,
1128 const char *file ATTRIBUTE_UNUSED)
1129 {
1130 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1131
1132 /* Output a label to mark the endpoint of the code generated for this
1133 function. */
1134 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
1135 current_function_funcdef_no);
1136 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, PROLOGUE_END_LABEL,
1137 current_function_funcdef_no);
1138 cfun->fde->dw_fde_vms_end_prologue = xstrdup (label);
1139 }
1140
1141 /* Output a marker (i.e. a label) for the beginning of the generated code
1142 for a function epilogue. This gets called *before* the prologue code has
1143 been generated. */
1144
1145 void
1146 dwarf2out_vms_begin_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1147 const char *file ATTRIBUTE_UNUSED)
1148 {
1149 dw_fde_ref fde = cfun->fde;
1150 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1151
1152 if (fde->dw_fde_vms_begin_epilogue)
1153 return;
1154
1155 /* Output a label to mark the endpoint of the code generated for this
1156 function. */
1157 ASM_GENERATE_INTERNAL_LABEL (label, EPILOGUE_BEGIN_LABEL,
1158 current_function_funcdef_no);
1159 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, EPILOGUE_BEGIN_LABEL,
1160 current_function_funcdef_no);
1161 fde->dw_fde_vms_begin_epilogue = xstrdup (label);
1162 }
1163
1164 /* Output a marker (i.e. a label) for the absolute end of the generated code
1165 for a function definition. This gets called *after* the epilogue code has
1166 been generated. */
1167
1168 void
1169 dwarf2out_end_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1170 const char *file ATTRIBUTE_UNUSED)
1171 {
1172 dw_fde_ref fde;
1173 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1174
1175 last_var_location_insn = NULL;
1176 cached_next_real_insn = NULL;
1177
1178 if (dwarf2out_do_cfi_asm ())
1179 fprintf (asm_out_file, "\t.cfi_endproc\n");
1180
1181 /* Output a label to mark the endpoint of the code generated for this
1182 function. */
1183 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
1184 current_function_funcdef_no);
1185 ASM_OUTPUT_LABEL (asm_out_file, label);
1186 fde = cfun->fde;
1187 gcc_assert (fde != NULL);
1188 if (fde->dw_fde_second_begin == NULL)
1189 fde->dw_fde_end = xstrdup (label);
1190 }
1191
1192 void
1193 dwarf2out_frame_finish (void)
1194 {
1195 /* Output call frame information. */
1196 if (targetm.debug_unwind_info () == UI_DWARF2)
1197 output_call_frame_info (0);
1198
1199 /* Output another copy for the unwinder. */
1200 if (do_eh_frame)
1201 output_call_frame_info (1);
1202 }
1203
1204 /* Note that the current function section is being used for code. */
1205
1206 static void
1207 dwarf2out_note_section_used (void)
1208 {
1209 section *sec = current_function_section ();
1210 if (sec == text_section)
1211 text_section_used = true;
1212 else if (sec == cold_text_section)
1213 cold_text_section_used = true;
1214 }
1215
1216 static void var_location_switch_text_section (void);
1217 static void set_cur_line_info_table (section *);
1218
1219 void
1220 dwarf2out_switch_text_section (void)
1221 {
1222 section *sect;
1223 dw_fde_ref fde = cfun->fde;
1224
1225 gcc_assert (cfun && fde && fde->dw_fde_second_begin == NULL);
1226
1227 if (!in_cold_section_p)
1228 {
1229 fde->dw_fde_end = crtl->subsections.cold_section_end_label;
1230 fde->dw_fde_second_begin = crtl->subsections.hot_section_label;
1231 fde->dw_fde_second_end = crtl->subsections.hot_section_end_label;
1232 }
1233 else
1234 {
1235 fde->dw_fde_end = crtl->subsections.hot_section_end_label;
1236 fde->dw_fde_second_begin = crtl->subsections.cold_section_label;
1237 fde->dw_fde_second_end = crtl->subsections.cold_section_end_label;
1238 }
1239 have_multiple_function_sections = true;
1240
1241 /* There is no need to mark used sections when not debugging. */
1242 if (cold_text_section != NULL)
1243 dwarf2out_note_section_used ();
1244
1245 if (dwarf2out_do_cfi_asm ())
1246 fprintf (asm_out_file, "\t.cfi_endproc\n");
1247
1248 /* Now do the real section switch. */
1249 sect = current_function_section ();
1250 switch_to_section (sect);
1251
1252 fde->second_in_std_section
1253 = (sect == text_section
1254 || (cold_text_section && sect == cold_text_section));
1255
1256 if (dwarf2out_do_cfi_asm ())
1257 dwarf2out_do_cfi_startproc (true);
1258
1259 var_location_switch_text_section ();
1260
1261 if (cold_text_section != NULL)
1262 set_cur_line_info_table (sect);
1263 }
1264 \f
1265 /* And now, the subset of the debugging information support code necessary
1266 for emitting location expressions. */
1267
1268 /* Data about a single source file. */
1269 struct GTY((for_user)) dwarf_file_data {
1270 const char * filename;
1271 int emitted_number;
1272 };
1273
1274 /* Describe an entry into the .debug_addr section. */
1275
1276 enum ate_kind {
1277 ate_kind_rtx,
1278 ate_kind_rtx_dtprel,
1279 ate_kind_label
1280 };
1281
1282 struct GTY((for_user)) addr_table_entry {
1283 enum ate_kind kind;
1284 unsigned int refcount;
1285 unsigned int index;
1286 union addr_table_entry_struct_union
1287 {
1288 rtx GTY ((tag ("0"))) rtl;
1289 char * GTY ((tag ("1"))) label;
1290 }
1291 GTY ((desc ("%1.kind"))) addr;
1292 };
1293
1294 typedef unsigned int var_loc_view;
1295
1296 /* Location lists are ranges + location descriptions for that range,
1297 so you can track variables that are in different places over
1298 their entire life. */
1299 typedef struct GTY(()) dw_loc_list_struct {
1300 dw_loc_list_ref dw_loc_next;
1301 const char *begin; /* Label and addr_entry for start of range */
1302 addr_table_entry *begin_entry;
1303 const char *end; /* Label for end of range */
1304 char *ll_symbol; /* Label for beginning of location list.
1305 Only on head of list. */
1306 char *vl_symbol; /* Label for beginning of view list. Ditto. */
1307 const char *section; /* Section this loclist is relative to */
1308 dw_loc_descr_ref expr;
1309 var_loc_view vbegin, vend;
1310 hashval_t hash;
1311 /* True if all addresses in this and subsequent lists are known to be
1312 resolved. */
1313 bool resolved_addr;
1314 /* True if this list has been replaced by dw_loc_next. */
1315 bool replaced;
1316 /* True if it has been emitted into .debug_loc* / .debug_loclists*
1317 section. */
1318 unsigned char emitted : 1;
1319 /* True if hash field is index rather than hash value. */
1320 unsigned char num_assigned : 1;
1321 /* True if .debug_loclists.dwo offset has been emitted for it already. */
1322 unsigned char offset_emitted : 1;
1323 /* True if note_variable_value_in_expr has been called on it. */
1324 unsigned char noted_variable_value : 1;
1325 /* True if the range should be emitted even if begin and end
1326 are the same. */
1327 bool force;
1328 } dw_loc_list_node;
1329
1330 static dw_loc_descr_ref int_loc_descriptor (poly_int64);
1331 static dw_loc_descr_ref uint_loc_descriptor (unsigned HOST_WIDE_INT);
1332
1333 /* Convert a DWARF stack opcode into its string name. */
1334
1335 static const char *
1336 dwarf_stack_op_name (unsigned int op)
1337 {
1338 const char *name = get_DW_OP_name (op);
1339
1340 if (name != NULL)
1341 return name;
1342
1343 return "OP_<unknown>";
1344 }
1345
1346 /* Return TRUE iff we're to output location view lists as a separate
1347 attribute next to the location lists, as an extension compatible
1348 with DWARF 2 and above. */
1349
1350 static inline bool
1351 dwarf2out_locviews_in_attribute ()
1352 {
1353 return debug_variable_location_views == 1;
1354 }
1355
1356 /* Return TRUE iff we're to output location view lists as part of the
1357 location lists, as proposed for standardization after DWARF 5. */
1358
1359 static inline bool
1360 dwarf2out_locviews_in_loclist ()
1361 {
1362 #ifndef DW_LLE_view_pair
1363 return false;
1364 #else
1365 return debug_variable_location_views == -1;
1366 #endif
1367 }
1368
1369 /* Return a pointer to a newly allocated location description. Location
1370 descriptions are simple expression terms that can be strung
1371 together to form more complicated location (address) descriptions. */
1372
1373 static inline dw_loc_descr_ref
1374 new_loc_descr (enum dwarf_location_atom op, unsigned HOST_WIDE_INT oprnd1,
1375 unsigned HOST_WIDE_INT oprnd2)
1376 {
1377 dw_loc_descr_ref descr = ggc_cleared_alloc<dw_loc_descr_node> ();
1378
1379 descr->dw_loc_opc = op;
1380 descr->dw_loc_oprnd1.val_class = dw_val_class_unsigned_const;
1381 descr->dw_loc_oprnd1.val_entry = NULL;
1382 descr->dw_loc_oprnd1.v.val_unsigned = oprnd1;
1383 descr->dw_loc_oprnd2.val_class = dw_val_class_unsigned_const;
1384 descr->dw_loc_oprnd2.val_entry = NULL;
1385 descr->dw_loc_oprnd2.v.val_unsigned = oprnd2;
1386
1387 return descr;
1388 }
1389
1390 /* Add a location description term to a location description expression. */
1391
1392 static inline void
1393 add_loc_descr (dw_loc_descr_ref *list_head, dw_loc_descr_ref descr)
1394 {
1395 dw_loc_descr_ref *d;
1396
1397 /* Find the end of the chain. */
1398 for (d = list_head; (*d) != NULL; d = &(*d)->dw_loc_next)
1399 ;
1400
1401 *d = descr;
1402 }
1403
1404 /* Compare two location operands for exact equality. */
1405
1406 static bool
1407 dw_val_equal_p (dw_val_node *a, dw_val_node *b)
1408 {
1409 if (a->val_class != b->val_class)
1410 return false;
1411 switch (a->val_class)
1412 {
1413 case dw_val_class_none:
1414 return true;
1415 case dw_val_class_addr:
1416 return rtx_equal_p (a->v.val_addr, b->v.val_addr);
1417
1418 case dw_val_class_offset:
1419 case dw_val_class_unsigned_const:
1420 case dw_val_class_const:
1421 case dw_val_class_unsigned_const_implicit:
1422 case dw_val_class_const_implicit:
1423 case dw_val_class_range_list:
1424 /* These are all HOST_WIDE_INT, signed or unsigned. */
1425 return a->v.val_unsigned == b->v.val_unsigned;
1426
1427 case dw_val_class_loc:
1428 return a->v.val_loc == b->v.val_loc;
1429 case dw_val_class_loc_list:
1430 return a->v.val_loc_list == b->v.val_loc_list;
1431 case dw_val_class_view_list:
1432 return a->v.val_view_list == b->v.val_view_list;
1433 case dw_val_class_die_ref:
1434 return a->v.val_die_ref.die == b->v.val_die_ref.die;
1435 case dw_val_class_fde_ref:
1436 return a->v.val_fde_index == b->v.val_fde_index;
1437 case dw_val_class_symview:
1438 return strcmp (a->v.val_symbolic_view, b->v.val_symbolic_view) == 0;
1439 case dw_val_class_lbl_id:
1440 case dw_val_class_lineptr:
1441 case dw_val_class_macptr:
1442 case dw_val_class_loclistsptr:
1443 case dw_val_class_high_pc:
1444 return strcmp (a->v.val_lbl_id, b->v.val_lbl_id) == 0;
1445 case dw_val_class_str:
1446 return a->v.val_str == b->v.val_str;
1447 case dw_val_class_flag:
1448 return a->v.val_flag == b->v.val_flag;
1449 case dw_val_class_file:
1450 case dw_val_class_file_implicit:
1451 return a->v.val_file == b->v.val_file;
1452 case dw_val_class_decl_ref:
1453 return a->v.val_decl_ref == b->v.val_decl_ref;
1454
1455 case dw_val_class_const_double:
1456 return (a->v.val_double.high == b->v.val_double.high
1457 && a->v.val_double.low == b->v.val_double.low);
1458
1459 case dw_val_class_wide_int:
1460 return *a->v.val_wide == *b->v.val_wide;
1461
1462 case dw_val_class_vec:
1463 {
1464 size_t a_len = a->v.val_vec.elt_size * a->v.val_vec.length;
1465 size_t b_len = b->v.val_vec.elt_size * b->v.val_vec.length;
1466
1467 return (a_len == b_len
1468 && !memcmp (a->v.val_vec.array, b->v.val_vec.array, a_len));
1469 }
1470
1471 case dw_val_class_data8:
1472 return memcmp (a->v.val_data8, b->v.val_data8, 8) == 0;
1473
1474 case dw_val_class_vms_delta:
1475 return (!strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1)
1476 && !strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1));
1477
1478 case dw_val_class_discr_value:
1479 return (a->v.val_discr_value.pos == b->v.val_discr_value.pos
1480 && a->v.val_discr_value.v.uval == b->v.val_discr_value.v.uval);
1481 case dw_val_class_discr_list:
1482 /* It makes no sense comparing two discriminant value lists. */
1483 return false;
1484 }
1485 gcc_unreachable ();
1486 }
1487
1488 /* Compare two location atoms for exact equality. */
1489
1490 static bool
1491 loc_descr_equal_p_1 (dw_loc_descr_ref a, dw_loc_descr_ref b)
1492 {
1493 if (a->dw_loc_opc != b->dw_loc_opc)
1494 return false;
1495
1496 /* ??? This is only ever set for DW_OP_constNu, for N equal to the
1497 address size, but since we always allocate cleared storage it
1498 should be zero for other types of locations. */
1499 if (a->dtprel != b->dtprel)
1500 return false;
1501
1502 return (dw_val_equal_p (&a->dw_loc_oprnd1, &b->dw_loc_oprnd1)
1503 && dw_val_equal_p (&a->dw_loc_oprnd2, &b->dw_loc_oprnd2));
1504 }
1505
1506 /* Compare two complete location expressions for exact equality. */
1507
1508 bool
1509 loc_descr_equal_p (dw_loc_descr_ref a, dw_loc_descr_ref b)
1510 {
1511 while (1)
1512 {
1513 if (a == b)
1514 return true;
1515 if (a == NULL || b == NULL)
1516 return false;
1517 if (!loc_descr_equal_p_1 (a, b))
1518 return false;
1519
1520 a = a->dw_loc_next;
1521 b = b->dw_loc_next;
1522 }
1523 }
1524
1525
1526 /* Add a constant POLY_OFFSET to a location expression. */
1527
1528 static void
1529 loc_descr_plus_const (dw_loc_descr_ref *list_head, poly_int64 poly_offset)
1530 {
1531 dw_loc_descr_ref loc;
1532 HOST_WIDE_INT *p;
1533
1534 gcc_assert (*list_head != NULL);
1535
1536 if (known_eq (poly_offset, 0))
1537 return;
1538
1539 /* Find the end of the chain. */
1540 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
1541 ;
1542
1543 HOST_WIDE_INT offset;
1544 if (!poly_offset.is_constant (&offset))
1545 {
1546 loc->dw_loc_next = int_loc_descriptor (poly_offset);
1547 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_plus, 0, 0));
1548 return;
1549 }
1550
1551 p = NULL;
1552 if (loc->dw_loc_opc == DW_OP_fbreg
1553 || (loc->dw_loc_opc >= DW_OP_breg0 && loc->dw_loc_opc <= DW_OP_breg31))
1554 p = &loc->dw_loc_oprnd1.v.val_int;
1555 else if (loc->dw_loc_opc == DW_OP_bregx)
1556 p = &loc->dw_loc_oprnd2.v.val_int;
1557
1558 /* If the last operation is fbreg, breg{0..31,x}, optimize by adjusting its
1559 offset. Don't optimize if an signed integer overflow would happen. */
1560 if (p != NULL
1561 && ((offset > 0 && *p <= INTTYPE_MAXIMUM (HOST_WIDE_INT) - offset)
1562 || (offset < 0 && *p >= INTTYPE_MINIMUM (HOST_WIDE_INT) - offset)))
1563 *p += offset;
1564
1565 else if (offset > 0)
1566 loc->dw_loc_next = new_loc_descr (DW_OP_plus_uconst, offset, 0);
1567
1568 else
1569 {
1570 loc->dw_loc_next
1571 = uint_loc_descriptor (-(unsigned HOST_WIDE_INT) offset);
1572 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_minus, 0, 0));
1573 }
1574 }
1575
1576 /* Return a pointer to a newly allocated location description for
1577 REG and OFFSET. */
1578
1579 static inline dw_loc_descr_ref
1580 new_reg_loc_descr (unsigned int reg, poly_int64 offset)
1581 {
1582 HOST_WIDE_INT const_offset;
1583 if (offset.is_constant (&const_offset))
1584 {
1585 if (reg <= 31)
1586 return new_loc_descr ((enum dwarf_location_atom) (DW_OP_breg0 + reg),
1587 const_offset, 0);
1588 else
1589 return new_loc_descr (DW_OP_bregx, reg, const_offset);
1590 }
1591 else
1592 {
1593 dw_loc_descr_ref ret = new_reg_loc_descr (reg, 0);
1594 loc_descr_plus_const (&ret, offset);
1595 return ret;
1596 }
1597 }
1598
1599 /* Add a constant OFFSET to a location list. */
1600
1601 static void
1602 loc_list_plus_const (dw_loc_list_ref list_head, poly_int64 offset)
1603 {
1604 dw_loc_list_ref d;
1605 for (d = list_head; d != NULL; d = d->dw_loc_next)
1606 loc_descr_plus_const (&d->expr, offset);
1607 }
1608
1609 #define DWARF_REF_SIZE \
1610 (dwarf_version == 2 ? DWARF2_ADDR_SIZE : DWARF_OFFSET_SIZE)
1611
1612 /* The number of bits that can be encoded by largest DW_FORM_dataN.
1613 In DWARF4 and earlier it is DW_FORM_data8 with 64 bits, in DWARF5
1614 DW_FORM_data16 with 128 bits. */
1615 #define DWARF_LARGEST_DATA_FORM_BITS \
1616 (dwarf_version >= 5 ? 128 : 64)
1617
1618 /* Utility inline function for construction of ops that were GNU extension
1619 before DWARF 5. */
1620 static inline enum dwarf_location_atom
1621 dwarf_OP (enum dwarf_location_atom op)
1622 {
1623 switch (op)
1624 {
1625 case DW_OP_implicit_pointer:
1626 if (dwarf_version < 5)
1627 return DW_OP_GNU_implicit_pointer;
1628 break;
1629
1630 case DW_OP_entry_value:
1631 if (dwarf_version < 5)
1632 return DW_OP_GNU_entry_value;
1633 break;
1634
1635 case DW_OP_const_type:
1636 if (dwarf_version < 5)
1637 return DW_OP_GNU_const_type;
1638 break;
1639
1640 case DW_OP_regval_type:
1641 if (dwarf_version < 5)
1642 return DW_OP_GNU_regval_type;
1643 break;
1644
1645 case DW_OP_deref_type:
1646 if (dwarf_version < 5)
1647 return DW_OP_GNU_deref_type;
1648 break;
1649
1650 case DW_OP_convert:
1651 if (dwarf_version < 5)
1652 return DW_OP_GNU_convert;
1653 break;
1654
1655 case DW_OP_reinterpret:
1656 if (dwarf_version < 5)
1657 return DW_OP_GNU_reinterpret;
1658 break;
1659
1660 case DW_OP_addrx:
1661 if (dwarf_version < 5)
1662 return DW_OP_GNU_addr_index;
1663 break;
1664
1665 case DW_OP_constx:
1666 if (dwarf_version < 5)
1667 return DW_OP_GNU_const_index;
1668 break;
1669
1670 default:
1671 break;
1672 }
1673 return op;
1674 }
1675
1676 /* Similarly for attributes. */
1677 static inline enum dwarf_attribute
1678 dwarf_AT (enum dwarf_attribute at)
1679 {
1680 switch (at)
1681 {
1682 case DW_AT_call_return_pc:
1683 if (dwarf_version < 5)
1684 return DW_AT_low_pc;
1685 break;
1686
1687 case DW_AT_call_tail_call:
1688 if (dwarf_version < 5)
1689 return DW_AT_GNU_tail_call;
1690 break;
1691
1692 case DW_AT_call_origin:
1693 if (dwarf_version < 5)
1694 return DW_AT_abstract_origin;
1695 break;
1696
1697 case DW_AT_call_target:
1698 if (dwarf_version < 5)
1699 return DW_AT_GNU_call_site_target;
1700 break;
1701
1702 case DW_AT_call_target_clobbered:
1703 if (dwarf_version < 5)
1704 return DW_AT_GNU_call_site_target_clobbered;
1705 break;
1706
1707 case DW_AT_call_parameter:
1708 if (dwarf_version < 5)
1709 return DW_AT_abstract_origin;
1710 break;
1711
1712 case DW_AT_call_value:
1713 if (dwarf_version < 5)
1714 return DW_AT_GNU_call_site_value;
1715 break;
1716
1717 case DW_AT_call_data_value:
1718 if (dwarf_version < 5)
1719 return DW_AT_GNU_call_site_data_value;
1720 break;
1721
1722 case DW_AT_call_all_calls:
1723 if (dwarf_version < 5)
1724 return DW_AT_GNU_all_call_sites;
1725 break;
1726
1727 case DW_AT_call_all_tail_calls:
1728 if (dwarf_version < 5)
1729 return DW_AT_GNU_all_tail_call_sites;
1730 break;
1731
1732 case DW_AT_dwo_name:
1733 if (dwarf_version < 5)
1734 return DW_AT_GNU_dwo_name;
1735 break;
1736
1737 case DW_AT_addr_base:
1738 if (dwarf_version < 5)
1739 return DW_AT_GNU_addr_base;
1740 break;
1741
1742 default:
1743 break;
1744 }
1745 return at;
1746 }
1747
1748 /* And similarly for tags. */
1749 static inline enum dwarf_tag
1750 dwarf_TAG (enum dwarf_tag tag)
1751 {
1752 switch (tag)
1753 {
1754 case DW_TAG_call_site:
1755 if (dwarf_version < 5)
1756 return DW_TAG_GNU_call_site;
1757 break;
1758
1759 case DW_TAG_call_site_parameter:
1760 if (dwarf_version < 5)
1761 return DW_TAG_GNU_call_site_parameter;
1762 break;
1763
1764 default:
1765 break;
1766 }
1767 return tag;
1768 }
1769
1770 /* And similarly for forms. */
1771 static inline enum dwarf_form
1772 dwarf_FORM (enum dwarf_form form)
1773 {
1774 switch (form)
1775 {
1776 case DW_FORM_addrx:
1777 if (dwarf_version < 5)
1778 return DW_FORM_GNU_addr_index;
1779 break;
1780
1781 case DW_FORM_strx:
1782 if (dwarf_version < 5)
1783 return DW_FORM_GNU_str_index;
1784 break;
1785
1786 default:
1787 break;
1788 }
1789 return form;
1790 }
1791
1792 static unsigned long int get_base_type_offset (dw_die_ref);
1793
1794 /* Return the size of a location descriptor. */
1795
1796 static unsigned long
1797 size_of_loc_descr (dw_loc_descr_ref loc)
1798 {
1799 unsigned long size = 1;
1800
1801 switch (loc->dw_loc_opc)
1802 {
1803 case DW_OP_addr:
1804 size += DWARF2_ADDR_SIZE;
1805 break;
1806 case DW_OP_GNU_addr_index:
1807 case DW_OP_addrx:
1808 case DW_OP_GNU_const_index:
1809 case DW_OP_constx:
1810 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
1811 size += size_of_uleb128 (loc->dw_loc_oprnd1.val_entry->index);
1812 break;
1813 case DW_OP_const1u:
1814 case DW_OP_const1s:
1815 size += 1;
1816 break;
1817 case DW_OP_const2u:
1818 case DW_OP_const2s:
1819 size += 2;
1820 break;
1821 case DW_OP_const4u:
1822 case DW_OP_const4s:
1823 size += 4;
1824 break;
1825 case DW_OP_const8u:
1826 case DW_OP_const8s:
1827 size += 8;
1828 break;
1829 case DW_OP_constu:
1830 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1831 break;
1832 case DW_OP_consts:
1833 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1834 break;
1835 case DW_OP_pick:
1836 size += 1;
1837 break;
1838 case DW_OP_plus_uconst:
1839 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1840 break;
1841 case DW_OP_skip:
1842 case DW_OP_bra:
1843 size += 2;
1844 break;
1845 case DW_OP_breg0:
1846 case DW_OP_breg1:
1847 case DW_OP_breg2:
1848 case DW_OP_breg3:
1849 case DW_OP_breg4:
1850 case DW_OP_breg5:
1851 case DW_OP_breg6:
1852 case DW_OP_breg7:
1853 case DW_OP_breg8:
1854 case DW_OP_breg9:
1855 case DW_OP_breg10:
1856 case DW_OP_breg11:
1857 case DW_OP_breg12:
1858 case DW_OP_breg13:
1859 case DW_OP_breg14:
1860 case DW_OP_breg15:
1861 case DW_OP_breg16:
1862 case DW_OP_breg17:
1863 case DW_OP_breg18:
1864 case DW_OP_breg19:
1865 case DW_OP_breg20:
1866 case DW_OP_breg21:
1867 case DW_OP_breg22:
1868 case DW_OP_breg23:
1869 case DW_OP_breg24:
1870 case DW_OP_breg25:
1871 case DW_OP_breg26:
1872 case DW_OP_breg27:
1873 case DW_OP_breg28:
1874 case DW_OP_breg29:
1875 case DW_OP_breg30:
1876 case DW_OP_breg31:
1877 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1878 break;
1879 case DW_OP_regx:
1880 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1881 break;
1882 case DW_OP_fbreg:
1883 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1884 break;
1885 case DW_OP_bregx:
1886 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1887 size += size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1888 break;
1889 case DW_OP_piece:
1890 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1891 break;
1892 case DW_OP_bit_piece:
1893 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1894 size += size_of_uleb128 (loc->dw_loc_oprnd2.v.val_unsigned);
1895 break;
1896 case DW_OP_deref_size:
1897 case DW_OP_xderef_size:
1898 size += 1;
1899 break;
1900 case DW_OP_call2:
1901 size += 2;
1902 break;
1903 case DW_OP_call4:
1904 size += 4;
1905 break;
1906 case DW_OP_call_ref:
1907 case DW_OP_GNU_variable_value:
1908 size += DWARF_REF_SIZE;
1909 break;
1910 case DW_OP_implicit_value:
1911 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1912 + loc->dw_loc_oprnd1.v.val_unsigned;
1913 break;
1914 case DW_OP_implicit_pointer:
1915 case DW_OP_GNU_implicit_pointer:
1916 size += DWARF_REF_SIZE + size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1917 break;
1918 case DW_OP_entry_value:
1919 case DW_OP_GNU_entry_value:
1920 {
1921 unsigned long op_size = size_of_locs (loc->dw_loc_oprnd1.v.val_loc);
1922 size += size_of_uleb128 (op_size) + op_size;
1923 break;
1924 }
1925 case DW_OP_const_type:
1926 case DW_OP_GNU_const_type:
1927 {
1928 unsigned long o
1929 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1930 size += size_of_uleb128 (o) + 1;
1931 switch (loc->dw_loc_oprnd2.val_class)
1932 {
1933 case dw_val_class_vec:
1934 size += loc->dw_loc_oprnd2.v.val_vec.length
1935 * loc->dw_loc_oprnd2.v.val_vec.elt_size;
1936 break;
1937 case dw_val_class_const:
1938 size += HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT;
1939 break;
1940 case dw_val_class_const_double:
1941 size += HOST_BITS_PER_DOUBLE_INT / BITS_PER_UNIT;
1942 break;
1943 case dw_val_class_wide_int:
1944 size += (get_full_len (*loc->dw_loc_oprnd2.v.val_wide)
1945 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
1946 break;
1947 default:
1948 gcc_unreachable ();
1949 }
1950 break;
1951 }
1952 case DW_OP_regval_type:
1953 case DW_OP_GNU_regval_type:
1954 {
1955 unsigned long o
1956 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1957 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1958 + size_of_uleb128 (o);
1959 }
1960 break;
1961 case DW_OP_deref_type:
1962 case DW_OP_GNU_deref_type:
1963 {
1964 unsigned long o
1965 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1966 size += 1 + size_of_uleb128 (o);
1967 }
1968 break;
1969 case DW_OP_convert:
1970 case DW_OP_reinterpret:
1971 case DW_OP_GNU_convert:
1972 case DW_OP_GNU_reinterpret:
1973 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
1974 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1975 else
1976 {
1977 unsigned long o
1978 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1979 size += size_of_uleb128 (o);
1980 }
1981 break;
1982 case DW_OP_GNU_parameter_ref:
1983 size += 4;
1984 break;
1985 default:
1986 break;
1987 }
1988
1989 return size;
1990 }
1991
1992 /* Return the size of a series of location descriptors. */
1993
1994 unsigned long
1995 size_of_locs (dw_loc_descr_ref loc)
1996 {
1997 dw_loc_descr_ref l;
1998 unsigned long size;
1999
2000 /* If there are no skip or bra opcodes, don't fill in the dw_loc_addr
2001 field, to avoid writing to a PCH file. */
2002 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
2003 {
2004 if (l->dw_loc_opc == DW_OP_skip || l->dw_loc_opc == DW_OP_bra)
2005 break;
2006 size += size_of_loc_descr (l);
2007 }
2008 if (! l)
2009 return size;
2010
2011 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
2012 {
2013 l->dw_loc_addr = size;
2014 size += size_of_loc_descr (l);
2015 }
2016
2017 return size;
2018 }
2019
2020 /* Return the size of the value in a DW_AT_discr_value attribute. */
2021
2022 static int
2023 size_of_discr_value (dw_discr_value *discr_value)
2024 {
2025 if (discr_value->pos)
2026 return size_of_uleb128 (discr_value->v.uval);
2027 else
2028 return size_of_sleb128 (discr_value->v.sval);
2029 }
2030
2031 /* Return the size of the value in a DW_AT_discr_list attribute. */
2032
2033 static int
2034 size_of_discr_list (dw_discr_list_ref discr_list)
2035 {
2036 int size = 0;
2037
2038 for (dw_discr_list_ref list = discr_list;
2039 list != NULL;
2040 list = list->dw_discr_next)
2041 {
2042 /* One byte for the discriminant value descriptor, and then one or two
2043 LEB128 numbers, depending on whether it's a single case label or a
2044 range label. */
2045 size += 1;
2046 size += size_of_discr_value (&list->dw_discr_lower_bound);
2047 if (list->dw_discr_range != 0)
2048 size += size_of_discr_value (&list->dw_discr_upper_bound);
2049 }
2050 return size;
2051 }
2052
2053 static HOST_WIDE_INT extract_int (const unsigned char *, unsigned);
2054 static void get_ref_die_offset_label (char *, dw_die_ref);
2055 static unsigned long int get_ref_die_offset (dw_die_ref);
2056
2057 /* Output location description stack opcode's operands (if any).
2058 The for_eh_or_skip parameter controls whether register numbers are
2059 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2060 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2061 info). This should be suppressed for the cases that have not been converted
2062 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2063
2064 static void
2065 output_loc_operands (dw_loc_descr_ref loc, int for_eh_or_skip)
2066 {
2067 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2068 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2069
2070 switch (loc->dw_loc_opc)
2071 {
2072 #ifdef DWARF2_DEBUGGING_INFO
2073 case DW_OP_const2u:
2074 case DW_OP_const2s:
2075 dw2_asm_output_data (2, val1->v.val_int, NULL);
2076 break;
2077 case DW_OP_const4u:
2078 if (loc->dtprel)
2079 {
2080 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2081 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 4,
2082 val1->v.val_addr);
2083 fputc ('\n', asm_out_file);
2084 break;
2085 }
2086 /* FALLTHRU */
2087 case DW_OP_const4s:
2088 dw2_asm_output_data (4, val1->v.val_int, NULL);
2089 break;
2090 case DW_OP_const8u:
2091 if (loc->dtprel)
2092 {
2093 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2094 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 8,
2095 val1->v.val_addr);
2096 fputc ('\n', asm_out_file);
2097 break;
2098 }
2099 /* FALLTHRU */
2100 case DW_OP_const8s:
2101 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2102 dw2_asm_output_data (8, val1->v.val_int, NULL);
2103 break;
2104 case DW_OP_skip:
2105 case DW_OP_bra:
2106 {
2107 int offset;
2108
2109 gcc_assert (val1->val_class == dw_val_class_loc);
2110 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2111
2112 dw2_asm_output_data (2, offset, NULL);
2113 }
2114 break;
2115 case DW_OP_implicit_value:
2116 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2117 switch (val2->val_class)
2118 {
2119 case dw_val_class_const:
2120 dw2_asm_output_data (val1->v.val_unsigned, val2->v.val_int, NULL);
2121 break;
2122 case dw_val_class_vec:
2123 {
2124 unsigned int elt_size = val2->v.val_vec.elt_size;
2125 unsigned int len = val2->v.val_vec.length;
2126 unsigned int i;
2127 unsigned char *p;
2128
2129 if (elt_size > sizeof (HOST_WIDE_INT))
2130 {
2131 elt_size /= 2;
2132 len *= 2;
2133 }
2134 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2135 i < len;
2136 i++, p += elt_size)
2137 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2138 "fp or vector constant word %u", i);
2139 }
2140 break;
2141 case dw_val_class_const_double:
2142 {
2143 unsigned HOST_WIDE_INT first, second;
2144
2145 if (WORDS_BIG_ENDIAN)
2146 {
2147 first = val2->v.val_double.high;
2148 second = val2->v.val_double.low;
2149 }
2150 else
2151 {
2152 first = val2->v.val_double.low;
2153 second = val2->v.val_double.high;
2154 }
2155 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2156 first, NULL);
2157 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2158 second, NULL);
2159 }
2160 break;
2161 case dw_val_class_wide_int:
2162 {
2163 int i;
2164 int len = get_full_len (*val2->v.val_wide);
2165 if (WORDS_BIG_ENDIAN)
2166 for (i = len - 1; i >= 0; --i)
2167 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2168 val2->v.val_wide->elt (i), NULL);
2169 else
2170 for (i = 0; i < len; ++i)
2171 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2172 val2->v.val_wide->elt (i), NULL);
2173 }
2174 break;
2175 case dw_val_class_addr:
2176 gcc_assert (val1->v.val_unsigned == DWARF2_ADDR_SIZE);
2177 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val2->v.val_addr, NULL);
2178 break;
2179 default:
2180 gcc_unreachable ();
2181 }
2182 break;
2183 #else
2184 case DW_OP_const2u:
2185 case DW_OP_const2s:
2186 case DW_OP_const4u:
2187 case DW_OP_const4s:
2188 case DW_OP_const8u:
2189 case DW_OP_const8s:
2190 case DW_OP_skip:
2191 case DW_OP_bra:
2192 case DW_OP_implicit_value:
2193 /* We currently don't make any attempt to make sure these are
2194 aligned properly like we do for the main unwind info, so
2195 don't support emitting things larger than a byte if we're
2196 only doing unwinding. */
2197 gcc_unreachable ();
2198 #endif
2199 case DW_OP_const1u:
2200 case DW_OP_const1s:
2201 dw2_asm_output_data (1, val1->v.val_int, NULL);
2202 break;
2203 case DW_OP_constu:
2204 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2205 break;
2206 case DW_OP_consts:
2207 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2208 break;
2209 case DW_OP_pick:
2210 dw2_asm_output_data (1, val1->v.val_int, NULL);
2211 break;
2212 case DW_OP_plus_uconst:
2213 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2214 break;
2215 case DW_OP_breg0:
2216 case DW_OP_breg1:
2217 case DW_OP_breg2:
2218 case DW_OP_breg3:
2219 case DW_OP_breg4:
2220 case DW_OP_breg5:
2221 case DW_OP_breg6:
2222 case DW_OP_breg7:
2223 case DW_OP_breg8:
2224 case DW_OP_breg9:
2225 case DW_OP_breg10:
2226 case DW_OP_breg11:
2227 case DW_OP_breg12:
2228 case DW_OP_breg13:
2229 case DW_OP_breg14:
2230 case DW_OP_breg15:
2231 case DW_OP_breg16:
2232 case DW_OP_breg17:
2233 case DW_OP_breg18:
2234 case DW_OP_breg19:
2235 case DW_OP_breg20:
2236 case DW_OP_breg21:
2237 case DW_OP_breg22:
2238 case DW_OP_breg23:
2239 case DW_OP_breg24:
2240 case DW_OP_breg25:
2241 case DW_OP_breg26:
2242 case DW_OP_breg27:
2243 case DW_OP_breg28:
2244 case DW_OP_breg29:
2245 case DW_OP_breg30:
2246 case DW_OP_breg31:
2247 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2248 break;
2249 case DW_OP_regx:
2250 {
2251 unsigned r = val1->v.val_unsigned;
2252 if (for_eh_or_skip >= 0)
2253 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2254 gcc_assert (size_of_uleb128 (r)
2255 == size_of_uleb128 (val1->v.val_unsigned));
2256 dw2_asm_output_data_uleb128 (r, NULL);
2257 }
2258 break;
2259 case DW_OP_fbreg:
2260 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2261 break;
2262 case DW_OP_bregx:
2263 {
2264 unsigned r = val1->v.val_unsigned;
2265 if (for_eh_or_skip >= 0)
2266 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2267 gcc_assert (size_of_uleb128 (r)
2268 == size_of_uleb128 (val1->v.val_unsigned));
2269 dw2_asm_output_data_uleb128 (r, NULL);
2270 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2271 }
2272 break;
2273 case DW_OP_piece:
2274 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2275 break;
2276 case DW_OP_bit_piece:
2277 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2278 dw2_asm_output_data_uleb128 (val2->v.val_unsigned, NULL);
2279 break;
2280 case DW_OP_deref_size:
2281 case DW_OP_xderef_size:
2282 dw2_asm_output_data (1, val1->v.val_int, NULL);
2283 break;
2284
2285 case DW_OP_addr:
2286 if (loc->dtprel)
2287 {
2288 if (targetm.asm_out.output_dwarf_dtprel)
2289 {
2290 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
2291 DWARF2_ADDR_SIZE,
2292 val1->v.val_addr);
2293 fputc ('\n', asm_out_file);
2294 }
2295 else
2296 gcc_unreachable ();
2297 }
2298 else
2299 {
2300 #ifdef DWARF2_DEBUGGING_INFO
2301 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val1->v.val_addr, NULL);
2302 #else
2303 gcc_unreachable ();
2304 #endif
2305 }
2306 break;
2307
2308 case DW_OP_GNU_addr_index:
2309 case DW_OP_addrx:
2310 case DW_OP_GNU_const_index:
2311 case DW_OP_constx:
2312 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
2313 dw2_asm_output_data_uleb128 (loc->dw_loc_oprnd1.val_entry->index,
2314 "(index into .debug_addr)");
2315 break;
2316
2317 case DW_OP_call2:
2318 case DW_OP_call4:
2319 {
2320 unsigned long die_offset
2321 = get_ref_die_offset (val1->v.val_die_ref.die);
2322 /* Make sure the offset has been computed and that we can encode it as
2323 an operand. */
2324 gcc_assert (die_offset > 0
2325 && die_offset <= (loc->dw_loc_opc == DW_OP_call2
2326 ? 0xffff
2327 : 0xffffffff));
2328 dw2_asm_output_data ((loc->dw_loc_opc == DW_OP_call2) ? 2 : 4,
2329 die_offset, NULL);
2330 }
2331 break;
2332
2333 case DW_OP_call_ref:
2334 case DW_OP_GNU_variable_value:
2335 {
2336 char label[MAX_ARTIFICIAL_LABEL_BYTES
2337 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2338 gcc_assert (val1->val_class == dw_val_class_die_ref);
2339 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2340 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2341 }
2342 break;
2343
2344 case DW_OP_implicit_pointer:
2345 case DW_OP_GNU_implicit_pointer:
2346 {
2347 char label[MAX_ARTIFICIAL_LABEL_BYTES
2348 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2349 gcc_assert (val1->val_class == dw_val_class_die_ref);
2350 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2351 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2352 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2353 }
2354 break;
2355
2356 case DW_OP_entry_value:
2357 case DW_OP_GNU_entry_value:
2358 dw2_asm_output_data_uleb128 (size_of_locs (val1->v.val_loc), NULL);
2359 output_loc_sequence (val1->v.val_loc, for_eh_or_skip);
2360 break;
2361
2362 case DW_OP_const_type:
2363 case DW_OP_GNU_const_type:
2364 {
2365 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die), l;
2366 gcc_assert (o);
2367 dw2_asm_output_data_uleb128 (o, NULL);
2368 switch (val2->val_class)
2369 {
2370 case dw_val_class_const:
2371 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2372 dw2_asm_output_data (1, l, NULL);
2373 dw2_asm_output_data (l, val2->v.val_int, NULL);
2374 break;
2375 case dw_val_class_vec:
2376 {
2377 unsigned int elt_size = val2->v.val_vec.elt_size;
2378 unsigned int len = val2->v.val_vec.length;
2379 unsigned int i;
2380 unsigned char *p;
2381
2382 l = len * elt_size;
2383 dw2_asm_output_data (1, l, NULL);
2384 if (elt_size > sizeof (HOST_WIDE_INT))
2385 {
2386 elt_size /= 2;
2387 len *= 2;
2388 }
2389 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2390 i < len;
2391 i++, p += elt_size)
2392 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2393 "fp or vector constant word %u", i);
2394 }
2395 break;
2396 case dw_val_class_const_double:
2397 {
2398 unsigned HOST_WIDE_INT first, second;
2399 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2400
2401 dw2_asm_output_data (1, 2 * l, NULL);
2402 if (WORDS_BIG_ENDIAN)
2403 {
2404 first = val2->v.val_double.high;
2405 second = val2->v.val_double.low;
2406 }
2407 else
2408 {
2409 first = val2->v.val_double.low;
2410 second = val2->v.val_double.high;
2411 }
2412 dw2_asm_output_data (l, first, NULL);
2413 dw2_asm_output_data (l, second, NULL);
2414 }
2415 break;
2416 case dw_val_class_wide_int:
2417 {
2418 int i;
2419 int len = get_full_len (*val2->v.val_wide);
2420 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2421
2422 dw2_asm_output_data (1, len * l, NULL);
2423 if (WORDS_BIG_ENDIAN)
2424 for (i = len - 1; i >= 0; --i)
2425 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2426 else
2427 for (i = 0; i < len; ++i)
2428 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2429 }
2430 break;
2431 default:
2432 gcc_unreachable ();
2433 }
2434 }
2435 break;
2436 case DW_OP_regval_type:
2437 case DW_OP_GNU_regval_type:
2438 {
2439 unsigned r = val1->v.val_unsigned;
2440 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2441 gcc_assert (o);
2442 if (for_eh_or_skip >= 0)
2443 {
2444 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2445 gcc_assert (size_of_uleb128 (r)
2446 == size_of_uleb128 (val1->v.val_unsigned));
2447 }
2448 dw2_asm_output_data_uleb128 (r, NULL);
2449 dw2_asm_output_data_uleb128 (o, NULL);
2450 }
2451 break;
2452 case DW_OP_deref_type:
2453 case DW_OP_GNU_deref_type:
2454 {
2455 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2456 gcc_assert (o);
2457 dw2_asm_output_data (1, val1->v.val_int, NULL);
2458 dw2_asm_output_data_uleb128 (o, NULL);
2459 }
2460 break;
2461 case DW_OP_convert:
2462 case DW_OP_reinterpret:
2463 case DW_OP_GNU_convert:
2464 case DW_OP_GNU_reinterpret:
2465 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
2466 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2467 else
2468 {
2469 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die);
2470 gcc_assert (o);
2471 dw2_asm_output_data_uleb128 (o, NULL);
2472 }
2473 break;
2474
2475 case DW_OP_GNU_parameter_ref:
2476 {
2477 unsigned long o;
2478 gcc_assert (val1->val_class == dw_val_class_die_ref);
2479 o = get_ref_die_offset (val1->v.val_die_ref.die);
2480 dw2_asm_output_data (4, o, NULL);
2481 }
2482 break;
2483
2484 default:
2485 /* Other codes have no operands. */
2486 break;
2487 }
2488 }
2489
2490 /* Output a sequence of location operations.
2491 The for_eh_or_skip parameter controls whether register numbers are
2492 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2493 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2494 info). This should be suppressed for the cases that have not been converted
2495 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2496
2497 void
2498 output_loc_sequence (dw_loc_descr_ref loc, int for_eh_or_skip)
2499 {
2500 for (; loc != NULL; loc = loc->dw_loc_next)
2501 {
2502 enum dwarf_location_atom opc = loc->dw_loc_opc;
2503 /* Output the opcode. */
2504 if (for_eh_or_skip >= 0
2505 && opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2506 {
2507 unsigned r = (opc - DW_OP_breg0);
2508 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2509 gcc_assert (r <= 31);
2510 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2511 }
2512 else if (for_eh_or_skip >= 0
2513 && opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2514 {
2515 unsigned r = (opc - DW_OP_reg0);
2516 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2517 gcc_assert (r <= 31);
2518 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2519 }
2520
2521 dw2_asm_output_data (1, opc,
2522 "%s", dwarf_stack_op_name (opc));
2523
2524 /* Output the operand(s) (if any). */
2525 output_loc_operands (loc, for_eh_or_skip);
2526 }
2527 }
2528
2529 /* Output location description stack opcode's operands (if any).
2530 The output is single bytes on a line, suitable for .cfi_escape. */
2531
2532 static void
2533 output_loc_operands_raw (dw_loc_descr_ref loc)
2534 {
2535 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2536 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2537
2538 switch (loc->dw_loc_opc)
2539 {
2540 case DW_OP_addr:
2541 case DW_OP_GNU_addr_index:
2542 case DW_OP_addrx:
2543 case DW_OP_GNU_const_index:
2544 case DW_OP_constx:
2545 case DW_OP_implicit_value:
2546 /* We cannot output addresses in .cfi_escape, only bytes. */
2547 gcc_unreachable ();
2548
2549 case DW_OP_const1u:
2550 case DW_OP_const1s:
2551 case DW_OP_pick:
2552 case DW_OP_deref_size:
2553 case DW_OP_xderef_size:
2554 fputc (',', asm_out_file);
2555 dw2_asm_output_data_raw (1, val1->v.val_int);
2556 break;
2557
2558 case DW_OP_const2u:
2559 case DW_OP_const2s:
2560 fputc (',', asm_out_file);
2561 dw2_asm_output_data_raw (2, val1->v.val_int);
2562 break;
2563
2564 case DW_OP_const4u:
2565 case DW_OP_const4s:
2566 fputc (',', asm_out_file);
2567 dw2_asm_output_data_raw (4, val1->v.val_int);
2568 break;
2569
2570 case DW_OP_const8u:
2571 case DW_OP_const8s:
2572 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2573 fputc (',', asm_out_file);
2574 dw2_asm_output_data_raw (8, val1->v.val_int);
2575 break;
2576
2577 case DW_OP_skip:
2578 case DW_OP_bra:
2579 {
2580 int offset;
2581
2582 gcc_assert (val1->val_class == dw_val_class_loc);
2583 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2584
2585 fputc (',', asm_out_file);
2586 dw2_asm_output_data_raw (2, offset);
2587 }
2588 break;
2589
2590 case DW_OP_regx:
2591 {
2592 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2593 gcc_assert (size_of_uleb128 (r)
2594 == size_of_uleb128 (val1->v.val_unsigned));
2595 fputc (',', asm_out_file);
2596 dw2_asm_output_data_uleb128_raw (r);
2597 }
2598 break;
2599
2600 case DW_OP_constu:
2601 case DW_OP_plus_uconst:
2602 case DW_OP_piece:
2603 fputc (',', asm_out_file);
2604 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2605 break;
2606
2607 case DW_OP_bit_piece:
2608 fputc (',', asm_out_file);
2609 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2610 dw2_asm_output_data_uleb128_raw (val2->v.val_unsigned);
2611 break;
2612
2613 case DW_OP_consts:
2614 case DW_OP_breg0:
2615 case DW_OP_breg1:
2616 case DW_OP_breg2:
2617 case DW_OP_breg3:
2618 case DW_OP_breg4:
2619 case DW_OP_breg5:
2620 case DW_OP_breg6:
2621 case DW_OP_breg7:
2622 case DW_OP_breg8:
2623 case DW_OP_breg9:
2624 case DW_OP_breg10:
2625 case DW_OP_breg11:
2626 case DW_OP_breg12:
2627 case DW_OP_breg13:
2628 case DW_OP_breg14:
2629 case DW_OP_breg15:
2630 case DW_OP_breg16:
2631 case DW_OP_breg17:
2632 case DW_OP_breg18:
2633 case DW_OP_breg19:
2634 case DW_OP_breg20:
2635 case DW_OP_breg21:
2636 case DW_OP_breg22:
2637 case DW_OP_breg23:
2638 case DW_OP_breg24:
2639 case DW_OP_breg25:
2640 case DW_OP_breg26:
2641 case DW_OP_breg27:
2642 case DW_OP_breg28:
2643 case DW_OP_breg29:
2644 case DW_OP_breg30:
2645 case DW_OP_breg31:
2646 case DW_OP_fbreg:
2647 fputc (',', asm_out_file);
2648 dw2_asm_output_data_sleb128_raw (val1->v.val_int);
2649 break;
2650
2651 case DW_OP_bregx:
2652 {
2653 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2654 gcc_assert (size_of_uleb128 (r)
2655 == size_of_uleb128 (val1->v.val_unsigned));
2656 fputc (',', asm_out_file);
2657 dw2_asm_output_data_uleb128_raw (r);
2658 fputc (',', asm_out_file);
2659 dw2_asm_output_data_sleb128_raw (val2->v.val_int);
2660 }
2661 break;
2662
2663 case DW_OP_implicit_pointer:
2664 case DW_OP_entry_value:
2665 case DW_OP_const_type:
2666 case DW_OP_regval_type:
2667 case DW_OP_deref_type:
2668 case DW_OP_convert:
2669 case DW_OP_reinterpret:
2670 case DW_OP_GNU_implicit_pointer:
2671 case DW_OP_GNU_entry_value:
2672 case DW_OP_GNU_const_type:
2673 case DW_OP_GNU_regval_type:
2674 case DW_OP_GNU_deref_type:
2675 case DW_OP_GNU_convert:
2676 case DW_OP_GNU_reinterpret:
2677 case DW_OP_GNU_parameter_ref:
2678 gcc_unreachable ();
2679 break;
2680
2681 default:
2682 /* Other codes have no operands. */
2683 break;
2684 }
2685 }
2686
2687 void
2688 output_loc_sequence_raw (dw_loc_descr_ref loc)
2689 {
2690 while (1)
2691 {
2692 enum dwarf_location_atom opc = loc->dw_loc_opc;
2693 /* Output the opcode. */
2694 if (opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2695 {
2696 unsigned r = (opc - DW_OP_breg0);
2697 r = DWARF2_FRAME_REG_OUT (r, 1);
2698 gcc_assert (r <= 31);
2699 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2700 }
2701 else if (opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2702 {
2703 unsigned r = (opc - DW_OP_reg0);
2704 r = DWARF2_FRAME_REG_OUT (r, 1);
2705 gcc_assert (r <= 31);
2706 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2707 }
2708 /* Output the opcode. */
2709 fprintf (asm_out_file, "%#x", opc);
2710 output_loc_operands_raw (loc);
2711
2712 if (!loc->dw_loc_next)
2713 break;
2714 loc = loc->dw_loc_next;
2715
2716 fputc (',', asm_out_file);
2717 }
2718 }
2719
2720 /* This function builds a dwarf location descriptor sequence from a
2721 dw_cfa_location, adding the given OFFSET to the result of the
2722 expression. */
2723
2724 struct dw_loc_descr_node *
2725 build_cfa_loc (dw_cfa_location *cfa, poly_int64 offset)
2726 {
2727 struct dw_loc_descr_node *head, *tmp;
2728
2729 offset += cfa->offset;
2730
2731 if (cfa->indirect)
2732 {
2733 head = new_reg_loc_descr (cfa->reg, cfa->base_offset);
2734 head->dw_loc_oprnd1.val_class = dw_val_class_const;
2735 head->dw_loc_oprnd1.val_entry = NULL;
2736 tmp = new_loc_descr (DW_OP_deref, 0, 0);
2737 add_loc_descr (&head, tmp);
2738 loc_descr_plus_const (&head, offset);
2739 }
2740 else
2741 head = new_reg_loc_descr (cfa->reg, offset);
2742
2743 return head;
2744 }
2745
2746 /* This function builds a dwarf location descriptor sequence for
2747 the address at OFFSET from the CFA when stack is aligned to
2748 ALIGNMENT byte. */
2749
2750 struct dw_loc_descr_node *
2751 build_cfa_aligned_loc (dw_cfa_location *cfa,
2752 poly_int64 offset, HOST_WIDE_INT alignment)
2753 {
2754 struct dw_loc_descr_node *head;
2755 unsigned int dwarf_fp
2756 = DWARF_FRAME_REGNUM (HARD_FRAME_POINTER_REGNUM);
2757
2758 /* When CFA is defined as FP+OFFSET, emulate stack alignment. */
2759 if (cfa->reg == HARD_FRAME_POINTER_REGNUM && cfa->indirect == 0)
2760 {
2761 head = new_reg_loc_descr (dwarf_fp, 0);
2762 add_loc_descr (&head, int_loc_descriptor (alignment));
2763 add_loc_descr (&head, new_loc_descr (DW_OP_and, 0, 0));
2764 loc_descr_plus_const (&head, offset);
2765 }
2766 else
2767 head = new_reg_loc_descr (dwarf_fp, offset);
2768 return head;
2769 }
2770 \f
2771 /* And now, the support for symbolic debugging information. */
2772
2773 /* .debug_str support. */
2774
2775 static void dwarf2out_init (const char *);
2776 static void dwarf2out_finish (const char *);
2777 static void dwarf2out_early_finish (const char *);
2778 static void dwarf2out_assembly_start (void);
2779 static void dwarf2out_define (unsigned int, const char *);
2780 static void dwarf2out_undef (unsigned int, const char *);
2781 static void dwarf2out_start_source_file (unsigned, const char *);
2782 static void dwarf2out_end_source_file (unsigned);
2783 static void dwarf2out_function_decl (tree);
2784 static void dwarf2out_begin_block (unsigned, unsigned);
2785 static void dwarf2out_end_block (unsigned, unsigned);
2786 static bool dwarf2out_ignore_block (const_tree);
2787 static void dwarf2out_early_global_decl (tree);
2788 static void dwarf2out_late_global_decl (tree);
2789 static void dwarf2out_type_decl (tree, int);
2790 static void dwarf2out_imported_module_or_decl (tree, tree, tree, bool, bool);
2791 static void dwarf2out_imported_module_or_decl_1 (tree, tree, tree,
2792 dw_die_ref);
2793 static void dwarf2out_abstract_function (tree);
2794 static void dwarf2out_var_location (rtx_insn *);
2795 static void dwarf2out_inline_entry (tree);
2796 static void dwarf2out_size_function (tree);
2797 static void dwarf2out_begin_function (tree);
2798 static void dwarf2out_end_function (unsigned int);
2799 static void dwarf2out_register_main_translation_unit (tree unit);
2800 static void dwarf2out_set_name (tree, tree);
2801 static void dwarf2out_register_external_die (tree decl, const char *sym,
2802 unsigned HOST_WIDE_INT off);
2803 static bool dwarf2out_die_ref_for_decl (tree decl, const char **sym,
2804 unsigned HOST_WIDE_INT *off);
2805
2806 /* The debug hooks structure. */
2807
2808 const struct gcc_debug_hooks dwarf2_debug_hooks =
2809 {
2810 dwarf2out_init,
2811 dwarf2out_finish,
2812 dwarf2out_early_finish,
2813 dwarf2out_assembly_start,
2814 dwarf2out_define,
2815 dwarf2out_undef,
2816 dwarf2out_start_source_file,
2817 dwarf2out_end_source_file,
2818 dwarf2out_begin_block,
2819 dwarf2out_end_block,
2820 dwarf2out_ignore_block,
2821 dwarf2out_source_line,
2822 dwarf2out_begin_prologue,
2823 #if VMS_DEBUGGING_INFO
2824 dwarf2out_vms_end_prologue,
2825 dwarf2out_vms_begin_epilogue,
2826 #else
2827 debug_nothing_int_charstar,
2828 debug_nothing_int_charstar,
2829 #endif
2830 dwarf2out_end_epilogue,
2831 dwarf2out_begin_function,
2832 dwarf2out_end_function, /* end_function */
2833 dwarf2out_register_main_translation_unit,
2834 dwarf2out_function_decl, /* function_decl */
2835 dwarf2out_early_global_decl,
2836 dwarf2out_late_global_decl,
2837 dwarf2out_type_decl, /* type_decl */
2838 dwarf2out_imported_module_or_decl,
2839 dwarf2out_die_ref_for_decl,
2840 dwarf2out_register_external_die,
2841 debug_nothing_tree, /* deferred_inline_function */
2842 /* The DWARF 2 backend tries to reduce debugging bloat by not
2843 emitting the abstract description of inline functions until
2844 something tries to reference them. */
2845 dwarf2out_abstract_function, /* outlining_inline_function */
2846 debug_nothing_rtx_code_label, /* label */
2847 debug_nothing_int, /* handle_pch */
2848 dwarf2out_var_location,
2849 dwarf2out_inline_entry, /* inline_entry */
2850 dwarf2out_size_function, /* size_function */
2851 dwarf2out_switch_text_section,
2852 dwarf2out_set_name,
2853 1, /* start_end_main_source_file */
2854 TYPE_SYMTAB_IS_DIE /* tree_type_symtab_field */
2855 };
2856
2857 const struct gcc_debug_hooks dwarf2_lineno_debug_hooks =
2858 {
2859 dwarf2out_init,
2860 debug_nothing_charstar,
2861 debug_nothing_charstar,
2862 dwarf2out_assembly_start,
2863 debug_nothing_int_charstar,
2864 debug_nothing_int_charstar,
2865 debug_nothing_int_charstar,
2866 debug_nothing_int,
2867 debug_nothing_int_int, /* begin_block */
2868 debug_nothing_int_int, /* end_block */
2869 debug_true_const_tree, /* ignore_block */
2870 dwarf2out_source_line, /* source_line */
2871 debug_nothing_int_int_charstar, /* begin_prologue */
2872 debug_nothing_int_charstar, /* end_prologue */
2873 debug_nothing_int_charstar, /* begin_epilogue */
2874 debug_nothing_int_charstar, /* end_epilogue */
2875 debug_nothing_tree, /* begin_function */
2876 debug_nothing_int, /* end_function */
2877 debug_nothing_tree, /* register_main_translation_unit */
2878 debug_nothing_tree, /* function_decl */
2879 debug_nothing_tree, /* early_global_decl */
2880 debug_nothing_tree, /* late_global_decl */
2881 debug_nothing_tree_int, /* type_decl */
2882 debug_nothing_tree_tree_tree_bool_bool,/* imported_module_or_decl */
2883 debug_false_tree_charstarstar_uhwistar,/* die_ref_for_decl */
2884 debug_nothing_tree_charstar_uhwi, /* register_external_die */
2885 debug_nothing_tree, /* deferred_inline_function */
2886 debug_nothing_tree, /* outlining_inline_function */
2887 debug_nothing_rtx_code_label, /* label */
2888 debug_nothing_int, /* handle_pch */
2889 debug_nothing_rtx_insn, /* var_location */
2890 debug_nothing_tree, /* inline_entry */
2891 debug_nothing_tree, /* size_function */
2892 debug_nothing_void, /* switch_text_section */
2893 debug_nothing_tree_tree, /* set_name */
2894 0, /* start_end_main_source_file */
2895 TYPE_SYMTAB_IS_ADDRESS /* tree_type_symtab_field */
2896 };
2897 \f
2898 /* NOTE: In the comments in this file, many references are made to
2899 "Debugging Information Entries". This term is abbreviated as `DIE'
2900 throughout the remainder of this file. */
2901
2902 /* An internal representation of the DWARF output is built, and then
2903 walked to generate the DWARF debugging info. The walk of the internal
2904 representation is done after the entire program has been compiled.
2905 The types below are used to describe the internal representation. */
2906
2907 /* Whether to put type DIEs into their own section .debug_types instead
2908 of making them part of the .debug_info section. Only supported for
2909 Dwarf V4 or higher and the user didn't disable them through
2910 -fno-debug-types-section. It is more efficient to put them in a
2911 separate comdat sections since the linker will then be able to
2912 remove duplicates. But not all tools support .debug_types sections
2913 yet. For Dwarf V5 or higher .debug_types doesn't exist any more,
2914 it is DW_UT_type unit type in .debug_info section. */
2915
2916 #define use_debug_types (dwarf_version >= 4 && flag_debug_types_section)
2917
2918 /* Various DIE's use offsets relative to the beginning of the
2919 .debug_info section to refer to each other. */
2920
2921 typedef long int dw_offset;
2922
2923 struct comdat_type_node;
2924
2925 /* The entries in the line_info table more-or-less mirror the opcodes
2926 that are used in the real dwarf line table. Arrays of these entries
2927 are collected per section when DWARF2_ASM_LINE_DEBUG_INFO is not
2928 supported. */
2929
2930 enum dw_line_info_opcode {
2931 /* Emit DW_LNE_set_address; the operand is the label index. */
2932 LI_set_address,
2933
2934 /* Emit a row to the matrix with the given line. This may be done
2935 via any combination of DW_LNS_copy, DW_LNS_advance_line, and
2936 special opcodes. */
2937 LI_set_line,
2938
2939 /* Emit a DW_LNS_set_file. */
2940 LI_set_file,
2941
2942 /* Emit a DW_LNS_set_column. */
2943 LI_set_column,
2944
2945 /* Emit a DW_LNS_negate_stmt; the operand is ignored. */
2946 LI_negate_stmt,
2947
2948 /* Emit a DW_LNS_set_prologue_end/epilogue_begin; the operand is ignored. */
2949 LI_set_prologue_end,
2950 LI_set_epilogue_begin,
2951
2952 /* Emit a DW_LNE_set_discriminator. */
2953 LI_set_discriminator,
2954
2955 /* Output a Fixed Advance PC; the target PC is the label index; the
2956 base PC is the previous LI_adv_address or LI_set_address entry.
2957 We only use this when emitting debug views without assembler
2958 support, at explicit user request. Ideally, we should only use
2959 it when the offset might be zero but we can't tell: it's the only
2960 way to maybe change the PC without resetting the view number. */
2961 LI_adv_address
2962 };
2963
2964 typedef struct GTY(()) dw_line_info_struct {
2965 enum dw_line_info_opcode opcode;
2966 unsigned int val;
2967 } dw_line_info_entry;
2968
2969
2970 struct GTY(()) dw_line_info_table {
2971 /* The label that marks the end of this section. */
2972 const char *end_label;
2973
2974 /* The values for the last row of the matrix, as collected in the table.
2975 These are used to minimize the changes to the next row. */
2976 unsigned int file_num;
2977 unsigned int line_num;
2978 unsigned int column_num;
2979 int discrim_num;
2980 bool is_stmt;
2981 bool in_use;
2982
2983 /* This denotes the NEXT view number.
2984
2985 If it is 0, it is known that the NEXT view will be the first view
2986 at the given PC.
2987
2988 If it is -1, we're forcing the view number to be reset, e.g. at a
2989 function entry.
2990
2991 The meaning of other nonzero values depends on whether we're
2992 computing views internally or leaving it for the assembler to do
2993 so. If we're emitting them internally, view denotes the view
2994 number since the last known advance of PC. If we're leaving it
2995 for the assembler, it denotes the LVU label number that we're
2996 going to ask the assembler to assign. */
2997 var_loc_view view;
2998
2999 /* This counts the number of symbolic views emitted in this table
3000 since the latest view reset. Its max value, over all tables,
3001 sets symview_upper_bound. */
3002 var_loc_view symviews_since_reset;
3003
3004 #define FORCE_RESET_NEXT_VIEW(x) ((x) = (var_loc_view)-1)
3005 #define RESET_NEXT_VIEW(x) ((x) = (var_loc_view)0)
3006 #define FORCE_RESETTING_VIEW_P(x) ((x) == (var_loc_view)-1)
3007 #define RESETTING_VIEW_P(x) ((x) == (var_loc_view)0 || FORCE_RESETTING_VIEW_P (x))
3008
3009 vec<dw_line_info_entry, va_gc> *entries;
3010 };
3011
3012 /* This is an upper bound for view numbers that the assembler may
3013 assign to symbolic views output in this translation. It is used to
3014 decide how big a field to use to represent view numbers in
3015 symview-classed attributes. */
3016
3017 static var_loc_view symview_upper_bound;
3018
3019 /* If we're keep track of location views and their reset points, and
3020 INSN is a reset point (i.e., it necessarily advances the PC), mark
3021 the next view in TABLE as reset. */
3022
3023 static void
3024 maybe_reset_location_view (rtx_insn *insn, dw_line_info_table *table)
3025 {
3026 if (!debug_internal_reset_location_views)
3027 return;
3028
3029 /* Maybe turn (part of?) this test into a default target hook. */
3030 int reset = 0;
3031
3032 if (targetm.reset_location_view)
3033 reset = targetm.reset_location_view (insn);
3034
3035 if (reset)
3036 ;
3037 else if (JUMP_TABLE_DATA_P (insn))
3038 reset = 1;
3039 else if (GET_CODE (insn) == USE
3040 || GET_CODE (insn) == CLOBBER
3041 || GET_CODE (insn) == ASM_INPUT
3042 || asm_noperands (insn) >= 0)
3043 ;
3044 else if (get_attr_min_length (insn) > 0)
3045 reset = 1;
3046
3047 if (reset > 0 && !RESETTING_VIEW_P (table->view))
3048 RESET_NEXT_VIEW (table->view);
3049 }
3050
3051 /* Each DIE attribute has a field specifying the attribute kind,
3052 a link to the next attribute in the chain, and an attribute value.
3053 Attributes are typically linked below the DIE they modify. */
3054
3055 typedef struct GTY(()) dw_attr_struct {
3056 enum dwarf_attribute dw_attr;
3057 dw_val_node dw_attr_val;
3058 }
3059 dw_attr_node;
3060
3061
3062 /* The Debugging Information Entry (DIE) structure. DIEs form a tree.
3063 The children of each node form a circular list linked by
3064 die_sib. die_child points to the node *before* the "first" child node. */
3065
3066 typedef struct GTY((chain_circular ("%h.die_sib"), for_user)) die_struct {
3067 union die_symbol_or_type_node
3068 {
3069 const char * GTY ((tag ("0"))) die_symbol;
3070 comdat_type_node *GTY ((tag ("1"))) die_type_node;
3071 }
3072 GTY ((desc ("%0.comdat_type_p"))) die_id;
3073 vec<dw_attr_node, va_gc> *die_attr;
3074 dw_die_ref die_parent;
3075 dw_die_ref die_child;
3076 dw_die_ref die_sib;
3077 dw_die_ref die_definition; /* ref from a specification to its definition */
3078 dw_offset die_offset;
3079 unsigned long die_abbrev;
3080 int die_mark;
3081 unsigned int decl_id;
3082 enum dwarf_tag die_tag;
3083 /* Die is used and must not be pruned as unused. */
3084 BOOL_BITFIELD die_perennial_p : 1;
3085 BOOL_BITFIELD comdat_type_p : 1; /* DIE has a type signature */
3086 /* For an external ref to die_symbol if die_offset contains an extra
3087 offset to that symbol. */
3088 BOOL_BITFIELD with_offset : 1;
3089 /* Whether this DIE was removed from the DIE tree, for example via
3090 prune_unused_types. We don't consider those present from the
3091 DIE lookup routines. */
3092 BOOL_BITFIELD removed : 1;
3093 /* Lots of spare bits. */
3094 }
3095 die_node;
3096
3097 /* Set to TRUE while dwarf2out_early_global_decl is running. */
3098 static bool early_dwarf;
3099 static bool early_dwarf_finished;
3100 struct set_early_dwarf {
3101 bool saved;
3102 set_early_dwarf () : saved(early_dwarf)
3103 {
3104 gcc_assert (! early_dwarf_finished);
3105 early_dwarf = true;
3106 }
3107 ~set_early_dwarf () { early_dwarf = saved; }
3108 };
3109
3110 /* Evaluate 'expr' while 'c' is set to each child of DIE in order. */
3111 #define FOR_EACH_CHILD(die, c, expr) do { \
3112 c = die->die_child; \
3113 if (c) do { \
3114 c = c->die_sib; \
3115 expr; \
3116 } while (c != die->die_child); \
3117 } while (0)
3118
3119 /* The pubname structure */
3120
3121 typedef struct GTY(()) pubname_struct {
3122 dw_die_ref die;
3123 const char *name;
3124 }
3125 pubname_entry;
3126
3127
3128 struct GTY(()) dw_ranges {
3129 const char *label;
3130 /* If this is positive, it's a block number, otherwise it's a
3131 bitwise-negated index into dw_ranges_by_label. */
3132 int num;
3133 /* Index for the range list for DW_FORM_rnglistx. */
3134 unsigned int idx : 31;
3135 /* True if this range might be possibly in a different section
3136 from previous entry. */
3137 unsigned int maybe_new_sec : 1;
3138 };
3139
3140 /* A structure to hold a macinfo entry. */
3141
3142 typedef struct GTY(()) macinfo_struct {
3143 unsigned char code;
3144 unsigned HOST_WIDE_INT lineno;
3145 const char *info;
3146 }
3147 macinfo_entry;
3148
3149
3150 struct GTY(()) dw_ranges_by_label {
3151 const char *begin;
3152 const char *end;
3153 };
3154
3155 /* The comdat type node structure. */
3156 struct GTY(()) comdat_type_node
3157 {
3158 dw_die_ref root_die;
3159 dw_die_ref type_die;
3160 dw_die_ref skeleton_die;
3161 char signature[DWARF_TYPE_SIGNATURE_SIZE];
3162 comdat_type_node *next;
3163 };
3164
3165 /* A list of DIEs for which we can't determine ancestry (parent_die
3166 field) just yet. Later in dwarf2out_finish we will fill in the
3167 missing bits. */
3168 typedef struct GTY(()) limbo_die_struct {
3169 dw_die_ref die;
3170 /* The tree for which this DIE was created. We use this to
3171 determine ancestry later. */
3172 tree created_for;
3173 struct limbo_die_struct *next;
3174 }
3175 limbo_die_node;
3176
3177 typedef struct skeleton_chain_struct
3178 {
3179 dw_die_ref old_die;
3180 dw_die_ref new_die;
3181 struct skeleton_chain_struct *parent;
3182 }
3183 skeleton_chain_node;
3184
3185 /* Define a macro which returns nonzero for a TYPE_DECL which was
3186 implicitly generated for a type.
3187
3188 Note that, unlike the C front-end (which generates a NULL named
3189 TYPE_DECL node for each complete tagged type, each array type,
3190 and each function type node created) the C++ front-end generates
3191 a _named_ TYPE_DECL node for each tagged type node created.
3192 These TYPE_DECLs have DECL_ARTIFICIAL set, so we know not to
3193 generate a DW_TAG_typedef DIE for them. Likewise with the Ada
3194 front-end, but for each type, tagged or not. */
3195
3196 #define TYPE_DECL_IS_STUB(decl) \
3197 (DECL_NAME (decl) == NULL_TREE \
3198 || (DECL_ARTIFICIAL (decl) \
3199 && ((decl == TYPE_STUB_DECL (TREE_TYPE (decl))) \
3200 /* This is necessary for stub decls that \
3201 appear in nested inline functions. */ \
3202 || (DECL_ABSTRACT_ORIGIN (decl) != NULL_TREE \
3203 && (decl_ultimate_origin (decl) \
3204 == TYPE_STUB_DECL (TREE_TYPE (decl)))))))
3205
3206 /* Information concerning the compilation unit's programming
3207 language, and compiler version. */
3208
3209 /* Fixed size portion of the DWARF compilation unit header. */
3210 #define DWARF_COMPILE_UNIT_HEADER_SIZE \
3211 (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE \
3212 + (dwarf_version >= 5 ? 4 : 3))
3213
3214 /* Fixed size portion of the DWARF comdat type unit header. */
3215 #define DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE \
3216 (DWARF_COMPILE_UNIT_HEADER_SIZE \
3217 + DWARF_TYPE_SIGNATURE_SIZE + DWARF_OFFSET_SIZE)
3218
3219 /* Fixed size portion of the DWARF skeleton compilation unit header. */
3220 #define DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE \
3221 (DWARF_COMPILE_UNIT_HEADER_SIZE + (dwarf_version >= 5 ? 8 : 0))
3222
3223 /* Fixed size portion of public names info. */
3224 #define DWARF_PUBNAMES_HEADER_SIZE (2 * DWARF_OFFSET_SIZE + 2)
3225
3226 /* Fixed size portion of the address range info. */
3227 #define DWARF_ARANGES_HEADER_SIZE \
3228 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3229 DWARF2_ADDR_SIZE * 2) \
3230 - DWARF_INITIAL_LENGTH_SIZE)
3231
3232 /* Size of padding portion in the address range info. It must be
3233 aligned to twice the pointer size. */
3234 #define DWARF_ARANGES_PAD_SIZE \
3235 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3236 DWARF2_ADDR_SIZE * 2) \
3237 - (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4))
3238
3239 /* Use assembler line directives if available. */
3240 #ifndef DWARF2_ASM_LINE_DEBUG_INFO
3241 #ifdef HAVE_AS_DWARF2_DEBUG_LINE
3242 #define DWARF2_ASM_LINE_DEBUG_INFO 1
3243 #else
3244 #define DWARF2_ASM_LINE_DEBUG_INFO 0
3245 #endif
3246 #endif
3247
3248 /* Use assembler views in line directives if available. */
3249 #ifndef DWARF2_ASM_VIEW_DEBUG_INFO
3250 #ifdef HAVE_AS_DWARF2_DEBUG_VIEW
3251 #define DWARF2_ASM_VIEW_DEBUG_INFO 1
3252 #else
3253 #define DWARF2_ASM_VIEW_DEBUG_INFO 0
3254 #endif
3255 #endif
3256
3257 /* Return true if GCC configure detected assembler support for .loc. */
3258
3259 bool
3260 dwarf2out_default_as_loc_support (void)
3261 {
3262 return DWARF2_ASM_LINE_DEBUG_INFO;
3263 #if (GCC_VERSION >= 3000)
3264 # undef DWARF2_ASM_LINE_DEBUG_INFO
3265 # pragma GCC poison DWARF2_ASM_LINE_DEBUG_INFO
3266 #endif
3267 }
3268
3269 /* Return true if GCC configure detected assembler support for views
3270 in .loc directives. */
3271
3272 bool
3273 dwarf2out_default_as_locview_support (void)
3274 {
3275 return DWARF2_ASM_VIEW_DEBUG_INFO;
3276 #if (GCC_VERSION >= 3000)
3277 # undef DWARF2_ASM_VIEW_DEBUG_INFO
3278 # pragma GCC poison DWARF2_ASM_VIEW_DEBUG_INFO
3279 #endif
3280 }
3281
3282 /* A bit is set in ZERO_VIEW_P if we are using the assembler-supported
3283 view computation, and it refers to a view identifier for which we
3284 will not emit a label because it is known to map to a view number
3285 zero. We won't allocate the bitmap if we're not using assembler
3286 support for location views, but we have to make the variable
3287 visible for GGC and for code that will be optimized out for lack of
3288 support but that's still parsed and compiled. We could abstract it
3289 out with macros, but it's not worth it. */
3290 static GTY(()) bitmap zero_view_p;
3291
3292 /* Evaluate to TRUE iff N is known to identify the first location view
3293 at its PC. When not using assembler location view computation,
3294 that must be view number zero. Otherwise, ZERO_VIEW_P is allocated
3295 and views label numbers recorded in it are the ones known to be
3296 zero. */
3297 #define ZERO_VIEW_P(N) ((N) == (var_loc_view)0 \
3298 || (N) == (var_loc_view)-1 \
3299 || (zero_view_p \
3300 && bitmap_bit_p (zero_view_p, (N))))
3301
3302 /* Return true iff we're to emit .loc directives for the assembler to
3303 generate line number sections.
3304
3305 When we're not emitting views, all we need from the assembler is
3306 support for .loc directives.
3307
3308 If we are emitting views, we can only use the assembler's .loc
3309 support if it also supports views.
3310
3311 When the compiler is emitting the line number programs and
3312 computing view numbers itself, it resets view numbers at known PC
3313 changes and counts from that, and then it emits view numbers as
3314 literal constants in locviewlists. There are cases in which the
3315 compiler is not sure about PC changes, e.g. when extra alignment is
3316 requested for a label. In these cases, the compiler may not reset
3317 the view counter, and the potential PC advance in the line number
3318 program will use an opcode that does not reset the view counter
3319 even if the PC actually changes, so that compiler and debug info
3320 consumer can keep view numbers in sync.
3321
3322 When the compiler defers view computation to the assembler, it
3323 emits symbolic view numbers in locviewlists, with the exception of
3324 views known to be zero (forced resets, or reset after
3325 compiler-visible PC changes): instead of emitting symbols for
3326 these, we emit literal zero and assert the assembler agrees with
3327 the compiler's assessment. We could use symbolic views everywhere,
3328 instead of special-casing zero views, but then we'd be unable to
3329 optimize out locviewlists that contain only zeros. */
3330
3331 static bool
3332 output_asm_line_debug_info (void)
3333 {
3334 return (dwarf2out_as_loc_support
3335 && (dwarf2out_as_locview_support
3336 || !debug_variable_location_views));
3337 }
3338
3339 /* Minimum line offset in a special line info. opcode.
3340 This value was chosen to give a reasonable range of values. */
3341 #define DWARF_LINE_BASE -10
3342
3343 /* First special line opcode - leave room for the standard opcodes. */
3344 #define DWARF_LINE_OPCODE_BASE ((int)DW_LNS_set_isa + 1)
3345
3346 /* Range of line offsets in a special line info. opcode. */
3347 #define DWARF_LINE_RANGE (254-DWARF_LINE_OPCODE_BASE+1)
3348
3349 /* Flag that indicates the initial value of the is_stmt_start flag.
3350 In the present implementation, we do not mark any lines as
3351 the beginning of a source statement, because that information
3352 is not made available by the GCC front-end. */
3353 #define DWARF_LINE_DEFAULT_IS_STMT_START 1
3354
3355 /* Maximum number of operations per instruction bundle. */
3356 #ifndef DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN
3357 #define DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN 1
3358 #endif
3359
3360 /* This location is used by calc_die_sizes() to keep track
3361 the offset of each DIE within the .debug_info section. */
3362 static unsigned long next_die_offset;
3363
3364 /* Record the root of the DIE's built for the current compilation unit. */
3365 static GTY(()) dw_die_ref single_comp_unit_die;
3366
3367 /* A list of type DIEs that have been separated into comdat sections. */
3368 static GTY(()) comdat_type_node *comdat_type_list;
3369
3370 /* A list of CU DIEs that have been separated. */
3371 static GTY(()) limbo_die_node *cu_die_list;
3372
3373 /* A list of DIEs with a NULL parent waiting to be relocated. */
3374 static GTY(()) limbo_die_node *limbo_die_list;
3375
3376 /* A list of DIEs for which we may have to generate
3377 DW_AT_{,MIPS_}linkage_name once their DECL_ASSEMBLER_NAMEs are set. */
3378 static GTY(()) limbo_die_node *deferred_asm_name;
3379
3380 struct dwarf_file_hasher : ggc_ptr_hash<dwarf_file_data>
3381 {
3382 typedef const char *compare_type;
3383
3384 static hashval_t hash (dwarf_file_data *);
3385 static bool equal (dwarf_file_data *, const char *);
3386 };
3387
3388 /* Filenames referenced by this compilation unit. */
3389 static GTY(()) hash_table<dwarf_file_hasher> *file_table;
3390
3391 struct decl_die_hasher : ggc_ptr_hash<die_node>
3392 {
3393 typedef tree compare_type;
3394
3395 static hashval_t hash (die_node *);
3396 static bool equal (die_node *, tree);
3397 };
3398 /* A hash table of references to DIE's that describe declarations.
3399 The key is a DECL_UID() which is a unique number identifying each decl. */
3400 static GTY (()) hash_table<decl_die_hasher> *decl_die_table;
3401
3402 struct GTY ((for_user)) variable_value_struct {
3403 unsigned int decl_id;
3404 vec<dw_die_ref, va_gc> *dies;
3405 };
3406
3407 struct variable_value_hasher : ggc_ptr_hash<variable_value_struct>
3408 {
3409 typedef tree compare_type;
3410
3411 static hashval_t hash (variable_value_struct *);
3412 static bool equal (variable_value_struct *, tree);
3413 };
3414 /* A hash table of DIEs that contain DW_OP_GNU_variable_value with
3415 dw_val_class_decl_ref class, indexed by FUNCTION_DECLs which is
3416 DECL_CONTEXT of the referenced VAR_DECLs. */
3417 static GTY (()) hash_table<variable_value_hasher> *variable_value_hash;
3418
3419 struct block_die_hasher : ggc_ptr_hash<die_struct>
3420 {
3421 static hashval_t hash (die_struct *);
3422 static bool equal (die_struct *, die_struct *);
3423 };
3424
3425 /* A hash table of references to DIE's that describe COMMON blocks.
3426 The key is DECL_UID() ^ die_parent. */
3427 static GTY (()) hash_table<block_die_hasher> *common_block_die_table;
3428
3429 typedef struct GTY(()) die_arg_entry_struct {
3430 dw_die_ref die;
3431 tree arg;
3432 } die_arg_entry;
3433
3434
3435 /* Node of the variable location list. */
3436 struct GTY ((chain_next ("%h.next"))) var_loc_node {
3437 /* Either NOTE_INSN_VAR_LOCATION, or, for SRA optimized variables,
3438 EXPR_LIST chain. For small bitsizes, bitsize is encoded
3439 in mode of the EXPR_LIST node and first EXPR_LIST operand
3440 is either NOTE_INSN_VAR_LOCATION for a piece with a known
3441 location or NULL for padding. For larger bitsizes,
3442 mode is 0 and first operand is a CONCAT with bitsize
3443 as first CONCAT operand and NOTE_INSN_VAR_LOCATION resp.
3444 NULL as second operand. */
3445 rtx GTY (()) loc;
3446 const char * GTY (()) label;
3447 struct var_loc_node * GTY (()) next;
3448 var_loc_view view;
3449 };
3450
3451 /* Variable location list. */
3452 struct GTY ((for_user)) var_loc_list_def {
3453 struct var_loc_node * GTY (()) first;
3454
3455 /* Pointer to the last but one or last element of the
3456 chained list. If the list is empty, both first and
3457 last are NULL, if the list contains just one node
3458 or the last node certainly is not redundant, it points
3459 to the last node, otherwise points to the last but one.
3460 Do not mark it for GC because it is marked through the chain. */
3461 struct var_loc_node * GTY ((skip ("%h"))) last;
3462
3463 /* Pointer to the last element before section switch,
3464 if NULL, either sections weren't switched or first
3465 is after section switch. */
3466 struct var_loc_node * GTY ((skip ("%h"))) last_before_switch;
3467
3468 /* DECL_UID of the variable decl. */
3469 unsigned int decl_id;
3470 };
3471 typedef struct var_loc_list_def var_loc_list;
3472
3473 /* Call argument location list. */
3474 struct GTY ((chain_next ("%h.next"))) call_arg_loc_node {
3475 rtx GTY (()) call_arg_loc_note;
3476 const char * GTY (()) label;
3477 tree GTY (()) block;
3478 bool tail_call_p;
3479 rtx GTY (()) symbol_ref;
3480 struct call_arg_loc_node * GTY (()) next;
3481 };
3482
3483
3484 struct decl_loc_hasher : ggc_ptr_hash<var_loc_list>
3485 {
3486 typedef const_tree compare_type;
3487
3488 static hashval_t hash (var_loc_list *);
3489 static bool equal (var_loc_list *, const_tree);
3490 };
3491
3492 /* Table of decl location linked lists. */
3493 static GTY (()) hash_table<decl_loc_hasher> *decl_loc_table;
3494
3495 /* Head and tail of call_arg_loc chain. */
3496 static GTY (()) struct call_arg_loc_node *call_arg_locations;
3497 static struct call_arg_loc_node *call_arg_loc_last;
3498
3499 /* Number of call sites in the current function. */
3500 static int call_site_count = -1;
3501 /* Number of tail call sites in the current function. */
3502 static int tail_call_site_count = -1;
3503
3504 /* A cached location list. */
3505 struct GTY ((for_user)) cached_dw_loc_list_def {
3506 /* The DECL_UID of the decl that this entry describes. */
3507 unsigned int decl_id;
3508
3509 /* The cached location list. */
3510 dw_loc_list_ref loc_list;
3511 };
3512 typedef struct cached_dw_loc_list_def cached_dw_loc_list;
3513
3514 struct dw_loc_list_hasher : ggc_ptr_hash<cached_dw_loc_list>
3515 {
3516
3517 typedef const_tree compare_type;
3518
3519 static hashval_t hash (cached_dw_loc_list *);
3520 static bool equal (cached_dw_loc_list *, const_tree);
3521 };
3522
3523 /* Table of cached location lists. */
3524 static GTY (()) hash_table<dw_loc_list_hasher> *cached_dw_loc_list_table;
3525
3526 /* A vector of references to DIE's that are uniquely identified by their tag,
3527 presence/absence of children DIE's, and list of attribute/value pairs. */
3528 static GTY(()) vec<dw_die_ref, va_gc> *abbrev_die_table;
3529
3530 /* A hash map to remember the stack usage for DWARF procedures. The value
3531 stored is the stack size difference between before the DWARF procedure
3532 invokation and after it returned. In other words, for a DWARF procedure
3533 that consumes N stack slots and that pushes M ones, this stores M - N. */
3534 static hash_map<dw_die_ref, int> *dwarf_proc_stack_usage_map;
3535
3536 /* A global counter for generating labels for line number data. */
3537 static unsigned int line_info_label_num;
3538
3539 /* The current table to which we should emit line number information
3540 for the current function. This will be set up at the beginning of
3541 assembly for the function. */
3542 static GTY(()) dw_line_info_table *cur_line_info_table;
3543
3544 /* The two default tables of line number info. */
3545 static GTY(()) dw_line_info_table *text_section_line_info;
3546 static GTY(()) dw_line_info_table *cold_text_section_line_info;
3547
3548 /* The set of all non-default tables of line number info. */
3549 static GTY(()) vec<dw_line_info_table *, va_gc> *separate_line_info;
3550
3551 /* A flag to tell pubnames/types export if there is an info section to
3552 refer to. */
3553 static bool info_section_emitted;
3554
3555 /* A pointer to the base of a table that contains a list of publicly
3556 accessible names. */
3557 static GTY (()) vec<pubname_entry, va_gc> *pubname_table;
3558
3559 /* A pointer to the base of a table that contains a list of publicly
3560 accessible types. */
3561 static GTY (()) vec<pubname_entry, va_gc> *pubtype_table;
3562
3563 /* A pointer to the base of a table that contains a list of macro
3564 defines/undefines (and file start/end markers). */
3565 static GTY (()) vec<macinfo_entry, va_gc> *macinfo_table;
3566
3567 /* True if .debug_macinfo or .debug_macros section is going to be
3568 emitted. */
3569 #define have_macinfo \
3570 ((!XCOFF_DEBUGGING_INFO || HAVE_XCOFF_DWARF_EXTRAS) \
3571 && debug_info_level >= DINFO_LEVEL_VERBOSE \
3572 && !macinfo_table->is_empty ())
3573
3574 /* Vector of dies for which we should generate .debug_ranges info. */
3575 static GTY (()) vec<dw_ranges, va_gc> *ranges_table;
3576
3577 /* Vector of pairs of labels referenced in ranges_table. */
3578 static GTY (()) vec<dw_ranges_by_label, va_gc> *ranges_by_label;
3579
3580 /* Whether we have location lists that need outputting */
3581 static GTY(()) bool have_location_lists;
3582
3583 /* Unique label counter. */
3584 static GTY(()) unsigned int loclabel_num;
3585
3586 /* Unique label counter for point-of-call tables. */
3587 static GTY(()) unsigned int poc_label_num;
3588
3589 /* The last file entry emitted by maybe_emit_file(). */
3590 static GTY(()) struct dwarf_file_data * last_emitted_file;
3591
3592 /* Number of internal labels generated by gen_internal_sym(). */
3593 static GTY(()) int label_num;
3594
3595 static GTY(()) vec<die_arg_entry, va_gc> *tmpl_value_parm_die_table;
3596
3597 /* Instances of generic types for which we need to generate debug
3598 info that describe their generic parameters and arguments. That
3599 generation needs to happen once all types are properly laid out so
3600 we do it at the end of compilation. */
3601 static GTY(()) vec<tree, va_gc> *generic_type_instances;
3602
3603 /* Offset from the "steady-state frame pointer" to the frame base,
3604 within the current function. */
3605 static poly_int64 frame_pointer_fb_offset;
3606 static bool frame_pointer_fb_offset_valid;
3607
3608 static vec<dw_die_ref> base_types;
3609
3610 /* Flags to represent a set of attribute classes for attributes that represent
3611 a scalar value (bounds, pointers, ...). */
3612 enum dw_scalar_form
3613 {
3614 dw_scalar_form_constant = 0x01,
3615 dw_scalar_form_exprloc = 0x02,
3616 dw_scalar_form_reference = 0x04
3617 };
3618
3619 /* Forward declarations for functions defined in this file. */
3620
3621 static int is_pseudo_reg (const_rtx);
3622 static tree type_main_variant (tree);
3623 static int is_tagged_type (const_tree);
3624 static const char *dwarf_tag_name (unsigned);
3625 static const char *dwarf_attr_name (unsigned);
3626 static const char *dwarf_form_name (unsigned);
3627 static tree decl_ultimate_origin (const_tree);
3628 static tree decl_class_context (tree);
3629 static void add_dwarf_attr (dw_die_ref, dw_attr_node *);
3630 static inline enum dw_val_class AT_class (dw_attr_node *);
3631 static inline unsigned int AT_index (dw_attr_node *);
3632 static void add_AT_flag (dw_die_ref, enum dwarf_attribute, unsigned);
3633 static inline unsigned AT_flag (dw_attr_node *);
3634 static void add_AT_int (dw_die_ref, enum dwarf_attribute, HOST_WIDE_INT);
3635 static inline HOST_WIDE_INT AT_int (dw_attr_node *);
3636 static void add_AT_unsigned (dw_die_ref, enum dwarf_attribute, unsigned HOST_WIDE_INT);
3637 static inline unsigned HOST_WIDE_INT AT_unsigned (dw_attr_node *);
3638 static void add_AT_double (dw_die_ref, enum dwarf_attribute,
3639 HOST_WIDE_INT, unsigned HOST_WIDE_INT);
3640 static inline void add_AT_vec (dw_die_ref, enum dwarf_attribute, unsigned int,
3641 unsigned int, unsigned char *);
3642 static void add_AT_data8 (dw_die_ref, enum dwarf_attribute, unsigned char *);
3643 static void add_AT_string (dw_die_ref, enum dwarf_attribute, const char *);
3644 static inline const char *AT_string (dw_attr_node *);
3645 static enum dwarf_form AT_string_form (dw_attr_node *);
3646 static void add_AT_die_ref (dw_die_ref, enum dwarf_attribute, dw_die_ref);
3647 static void add_AT_specification (dw_die_ref, dw_die_ref);
3648 static inline dw_die_ref AT_ref (dw_attr_node *);
3649 static inline int AT_ref_external (dw_attr_node *);
3650 static inline void set_AT_ref_external (dw_attr_node *, int);
3651 static void add_AT_fde_ref (dw_die_ref, enum dwarf_attribute, unsigned);
3652 static void add_AT_loc (dw_die_ref, enum dwarf_attribute, dw_loc_descr_ref);
3653 static inline dw_loc_descr_ref AT_loc (dw_attr_node *);
3654 static void add_AT_loc_list (dw_die_ref, enum dwarf_attribute,
3655 dw_loc_list_ref);
3656 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3657 static void add_AT_view_list (dw_die_ref, enum dwarf_attribute);
3658 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3659 static addr_table_entry *add_addr_table_entry (void *, enum ate_kind);
3660 static void remove_addr_table_entry (addr_table_entry *);
3661 static void add_AT_addr (dw_die_ref, enum dwarf_attribute, rtx, bool);
3662 static inline rtx AT_addr (dw_attr_node *);
3663 static void add_AT_symview (dw_die_ref, enum dwarf_attribute, const char *);
3664 static void add_AT_lbl_id (dw_die_ref, enum dwarf_attribute, const char *);
3665 static void add_AT_lineptr (dw_die_ref, enum dwarf_attribute, const char *);
3666 static void add_AT_macptr (dw_die_ref, enum dwarf_attribute, const char *);
3667 static void add_AT_loclistsptr (dw_die_ref, enum dwarf_attribute,
3668 const char *);
3669 static void add_AT_offset (dw_die_ref, enum dwarf_attribute,
3670 unsigned HOST_WIDE_INT);
3671 static void add_AT_range_list (dw_die_ref, enum dwarf_attribute,
3672 unsigned long, bool);
3673 static inline const char *AT_lbl (dw_attr_node *);
3674 static dw_attr_node *get_AT (dw_die_ref, enum dwarf_attribute);
3675 static const char *get_AT_low_pc (dw_die_ref);
3676 static const char *get_AT_hi_pc (dw_die_ref);
3677 static const char *get_AT_string (dw_die_ref, enum dwarf_attribute);
3678 static int get_AT_flag (dw_die_ref, enum dwarf_attribute);
3679 static unsigned get_AT_unsigned (dw_die_ref, enum dwarf_attribute);
3680 static inline dw_die_ref get_AT_ref (dw_die_ref, enum dwarf_attribute);
3681 static bool is_cxx (void);
3682 static bool is_cxx (const_tree);
3683 static bool is_fortran (void);
3684 static bool is_ada (void);
3685 static bool remove_AT (dw_die_ref, enum dwarf_attribute);
3686 static void remove_child_TAG (dw_die_ref, enum dwarf_tag);
3687 static void add_child_die (dw_die_ref, dw_die_ref);
3688 static dw_die_ref new_die (enum dwarf_tag, dw_die_ref, tree);
3689 static dw_die_ref lookup_type_die (tree);
3690 static dw_die_ref strip_naming_typedef (tree, dw_die_ref);
3691 static dw_die_ref lookup_type_die_strip_naming_typedef (tree);
3692 static void equate_type_number_to_die (tree, dw_die_ref);
3693 static dw_die_ref lookup_decl_die (tree);
3694 static var_loc_list *lookup_decl_loc (const_tree);
3695 static void equate_decl_number_to_die (tree, dw_die_ref);
3696 static struct var_loc_node *add_var_loc_to_decl (tree, rtx, const char *, var_loc_view);
3697 static void print_spaces (FILE *);
3698 static void print_die (dw_die_ref, FILE *);
3699 static void loc_checksum (dw_loc_descr_ref, struct md5_ctx *);
3700 static void attr_checksum (dw_attr_node *, struct md5_ctx *, int *);
3701 static void die_checksum (dw_die_ref, struct md5_ctx *, int *);
3702 static void checksum_sleb128 (HOST_WIDE_INT, struct md5_ctx *);
3703 static void checksum_uleb128 (unsigned HOST_WIDE_INT, struct md5_ctx *);
3704 static void loc_checksum_ordered (dw_loc_descr_ref, struct md5_ctx *);
3705 static void attr_checksum_ordered (enum dwarf_tag, dw_attr_node *,
3706 struct md5_ctx *, int *);
3707 struct checksum_attributes;
3708 static void collect_checksum_attributes (struct checksum_attributes *, dw_die_ref);
3709 static void die_checksum_ordered (dw_die_ref, struct md5_ctx *, int *);
3710 static void checksum_die_context (dw_die_ref, struct md5_ctx *);
3711 static void generate_type_signature (dw_die_ref, comdat_type_node *);
3712 static int same_loc_p (dw_loc_descr_ref, dw_loc_descr_ref, int *);
3713 static int same_dw_val_p (const dw_val_node *, const dw_val_node *, int *);
3714 static int same_attr_p (dw_attr_node *, dw_attr_node *, int *);
3715 static int same_die_p (dw_die_ref, dw_die_ref, int *);
3716 static int is_type_die (dw_die_ref);
3717 static int is_comdat_die (dw_die_ref);
3718 static inline bool is_template_instantiation (dw_die_ref);
3719 static int is_declaration_die (dw_die_ref);
3720 static int should_move_die_to_comdat (dw_die_ref);
3721 static dw_die_ref clone_as_declaration (dw_die_ref);
3722 static dw_die_ref clone_die (dw_die_ref);
3723 static dw_die_ref clone_tree (dw_die_ref);
3724 static dw_die_ref copy_declaration_context (dw_die_ref, dw_die_ref);
3725 static void generate_skeleton_ancestor_tree (skeleton_chain_node *);
3726 static void generate_skeleton_bottom_up (skeleton_chain_node *);
3727 static dw_die_ref generate_skeleton (dw_die_ref);
3728 static dw_die_ref remove_child_or_replace_with_skeleton (dw_die_ref,
3729 dw_die_ref,
3730 dw_die_ref);
3731 static void break_out_comdat_types (dw_die_ref);
3732 static void copy_decls_for_unworthy_types (dw_die_ref);
3733
3734 static void add_sibling_attributes (dw_die_ref);
3735 static void output_location_lists (dw_die_ref);
3736 static int constant_size (unsigned HOST_WIDE_INT);
3737 static unsigned long size_of_die (dw_die_ref);
3738 static void calc_die_sizes (dw_die_ref);
3739 static void calc_base_type_die_sizes (void);
3740 static void mark_dies (dw_die_ref);
3741 static void unmark_dies (dw_die_ref);
3742 static void unmark_all_dies (dw_die_ref);
3743 static unsigned long size_of_pubnames (vec<pubname_entry, va_gc> *);
3744 static unsigned long size_of_aranges (void);
3745 static enum dwarf_form value_format (dw_attr_node *);
3746 static void output_value_format (dw_attr_node *);
3747 static void output_abbrev_section (void);
3748 static void output_die_abbrevs (unsigned long, dw_die_ref);
3749 static void output_die (dw_die_ref);
3750 static void output_compilation_unit_header (enum dwarf_unit_type);
3751 static void output_comp_unit (dw_die_ref, int, const unsigned char *);
3752 static void output_comdat_type_unit (comdat_type_node *);
3753 static const char *dwarf2_name (tree, int);
3754 static void add_pubname (tree, dw_die_ref);
3755 static void add_enumerator_pubname (const char *, dw_die_ref);
3756 static void add_pubname_string (const char *, dw_die_ref);
3757 static void add_pubtype (tree, dw_die_ref);
3758 static void output_pubnames (vec<pubname_entry, va_gc> *);
3759 static void output_aranges (void);
3760 static unsigned int add_ranges (const_tree, bool = false);
3761 static void add_ranges_by_labels (dw_die_ref, const char *, const char *,
3762 bool *, bool);
3763 static void output_ranges (void);
3764 static dw_line_info_table *new_line_info_table (void);
3765 static void output_line_info (bool);
3766 static void output_file_names (void);
3767 static dw_die_ref base_type_die (tree, bool);
3768 static int is_base_type (tree);
3769 static dw_die_ref subrange_type_die (tree, tree, tree, tree, dw_die_ref);
3770 static int decl_quals (const_tree);
3771 static dw_die_ref modified_type_die (tree, int, bool, dw_die_ref);
3772 static dw_die_ref generic_parameter_die (tree, tree, bool, dw_die_ref);
3773 static dw_die_ref template_parameter_pack_die (tree, tree, dw_die_ref);
3774 static int type_is_enum (const_tree);
3775 static unsigned int dbx_reg_number (const_rtx);
3776 static void add_loc_descr_op_piece (dw_loc_descr_ref *, int);
3777 static dw_loc_descr_ref reg_loc_descriptor (rtx, enum var_init_status);
3778 static dw_loc_descr_ref one_reg_loc_descriptor (unsigned int,
3779 enum var_init_status);
3780 static dw_loc_descr_ref multiple_reg_loc_descriptor (rtx, rtx,
3781 enum var_init_status);
3782 static dw_loc_descr_ref based_loc_descr (rtx, poly_int64,
3783 enum var_init_status);
3784 static int is_based_loc (const_rtx);
3785 static bool resolve_one_addr (rtx *);
3786 static dw_loc_descr_ref concat_loc_descriptor (rtx, rtx,
3787 enum var_init_status);
3788 static dw_loc_descr_ref loc_descriptor (rtx, machine_mode mode,
3789 enum var_init_status);
3790 struct loc_descr_context;
3791 static void add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref);
3792 static void add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list);
3793 static dw_loc_list_ref loc_list_from_tree (tree, int,
3794 struct loc_descr_context *);
3795 static dw_loc_descr_ref loc_descriptor_from_tree (tree, int,
3796 struct loc_descr_context *);
3797 static HOST_WIDE_INT ceiling (HOST_WIDE_INT, unsigned int);
3798 static tree field_type (const_tree);
3799 static unsigned int simple_type_align_in_bits (const_tree);
3800 static unsigned int simple_decl_align_in_bits (const_tree);
3801 static unsigned HOST_WIDE_INT simple_type_size_in_bits (const_tree);
3802 struct vlr_context;
3803 static dw_loc_descr_ref field_byte_offset (const_tree, struct vlr_context *,
3804 HOST_WIDE_INT *);
3805 static void add_AT_location_description (dw_die_ref, enum dwarf_attribute,
3806 dw_loc_list_ref);
3807 static void add_data_member_location_attribute (dw_die_ref, tree,
3808 struct vlr_context *);
3809 static bool add_const_value_attribute (dw_die_ref, rtx);
3810 static void insert_int (HOST_WIDE_INT, unsigned, unsigned char *);
3811 static void insert_wide_int (const wide_int &, unsigned char *, int);
3812 static void insert_float (const_rtx, unsigned char *);
3813 static rtx rtl_for_decl_location (tree);
3814 static bool add_location_or_const_value_attribute (dw_die_ref, tree, bool);
3815 static bool tree_add_const_value_attribute (dw_die_ref, tree);
3816 static bool tree_add_const_value_attribute_for_decl (dw_die_ref, tree);
3817 static void add_name_attribute (dw_die_ref, const char *);
3818 static void add_gnat_descriptive_type_attribute (dw_die_ref, tree, dw_die_ref);
3819 static void add_comp_dir_attribute (dw_die_ref);
3820 static void add_scalar_info (dw_die_ref, enum dwarf_attribute, tree, int,
3821 struct loc_descr_context *);
3822 static void add_bound_info (dw_die_ref, enum dwarf_attribute, tree,
3823 struct loc_descr_context *);
3824 static void add_subscript_info (dw_die_ref, tree, bool);
3825 static void add_byte_size_attribute (dw_die_ref, tree);
3826 static void add_alignment_attribute (dw_die_ref, tree);
3827 static inline void add_bit_offset_attribute (dw_die_ref, tree,
3828 struct vlr_context *);
3829 static void add_bit_size_attribute (dw_die_ref, tree);
3830 static void add_prototyped_attribute (dw_die_ref, tree);
3831 static dw_die_ref add_abstract_origin_attribute (dw_die_ref, tree);
3832 static void add_pure_or_virtual_attribute (dw_die_ref, tree);
3833 static void add_src_coords_attributes (dw_die_ref, tree);
3834 static void add_name_and_src_coords_attributes (dw_die_ref, tree, bool = false);
3835 static void add_discr_value (dw_die_ref, dw_discr_value *);
3836 static void add_discr_list (dw_die_ref, dw_discr_list_ref);
3837 static inline dw_discr_list_ref AT_discr_list (dw_attr_node *);
3838 static void push_decl_scope (tree);
3839 static void pop_decl_scope (void);
3840 static dw_die_ref scope_die_for (tree, dw_die_ref);
3841 static inline int local_scope_p (dw_die_ref);
3842 static inline int class_scope_p (dw_die_ref);
3843 static inline int class_or_namespace_scope_p (dw_die_ref);
3844 static void add_type_attribute (dw_die_ref, tree, int, bool, dw_die_ref);
3845 static void add_calling_convention_attribute (dw_die_ref, tree);
3846 static const char *type_tag (const_tree);
3847 static tree member_declared_type (const_tree);
3848 #if 0
3849 static const char *decl_start_label (tree);
3850 #endif
3851 static void gen_array_type_die (tree, dw_die_ref);
3852 static void gen_descr_array_type_die (tree, struct array_descr_info *, dw_die_ref);
3853 #if 0
3854 static void gen_entry_point_die (tree, dw_die_ref);
3855 #endif
3856 static dw_die_ref gen_enumeration_type_die (tree, dw_die_ref);
3857 static dw_die_ref gen_formal_parameter_die (tree, tree, bool, dw_die_ref);
3858 static dw_die_ref gen_formal_parameter_pack_die (tree, tree, dw_die_ref, tree*);
3859 static void gen_unspecified_parameters_die (tree, dw_die_ref);
3860 static void gen_formal_types_die (tree, dw_die_ref);
3861 static void gen_subprogram_die (tree, dw_die_ref);
3862 static void gen_variable_die (tree, tree, dw_die_ref);
3863 static void gen_const_die (tree, dw_die_ref);
3864 static void gen_label_die (tree, dw_die_ref);
3865 static void gen_lexical_block_die (tree, dw_die_ref);
3866 static void gen_inlined_subroutine_die (tree, dw_die_ref);
3867 static void gen_field_die (tree, struct vlr_context *, dw_die_ref);
3868 static void gen_ptr_to_mbr_type_die (tree, dw_die_ref);
3869 static dw_die_ref gen_compile_unit_die (const char *);
3870 static void gen_inheritance_die (tree, tree, tree, dw_die_ref);
3871 static void gen_member_die (tree, dw_die_ref);
3872 static void gen_struct_or_union_type_die (tree, dw_die_ref,
3873 enum debug_info_usage);
3874 static void gen_subroutine_type_die (tree, dw_die_ref);
3875 static void gen_typedef_die (tree, dw_die_ref);
3876 static void gen_type_die (tree, dw_die_ref);
3877 static void gen_block_die (tree, dw_die_ref);
3878 static void decls_for_scope (tree, dw_die_ref);
3879 static bool is_naming_typedef_decl (const_tree);
3880 static inline dw_die_ref get_context_die (tree);
3881 static void gen_namespace_die (tree, dw_die_ref);
3882 static dw_die_ref gen_namelist_decl (tree, dw_die_ref, tree);
3883 static dw_die_ref gen_decl_die (tree, tree, struct vlr_context *, dw_die_ref);
3884 static dw_die_ref force_decl_die (tree);
3885 static dw_die_ref force_type_die (tree);
3886 static dw_die_ref setup_namespace_context (tree, dw_die_ref);
3887 static dw_die_ref declare_in_namespace (tree, dw_die_ref);
3888 static struct dwarf_file_data * lookup_filename (const char *);
3889 static void retry_incomplete_types (void);
3890 static void gen_type_die_for_member (tree, tree, dw_die_ref);
3891 static void gen_generic_params_dies (tree);
3892 static void gen_tagged_type_die (tree, dw_die_ref, enum debug_info_usage);
3893 static void gen_type_die_with_usage (tree, dw_die_ref, enum debug_info_usage);
3894 static void splice_child_die (dw_die_ref, dw_die_ref);
3895 static int file_info_cmp (const void *, const void *);
3896 static dw_loc_list_ref new_loc_list (dw_loc_descr_ref, const char *, var_loc_view,
3897 const char *, var_loc_view, const char *);
3898 static void output_loc_list (dw_loc_list_ref);
3899 static char *gen_internal_sym (const char *);
3900 static bool want_pubnames (void);
3901
3902 static void prune_unmark_dies (dw_die_ref);
3903 static void prune_unused_types_mark_generic_parms_dies (dw_die_ref);
3904 static void prune_unused_types_mark (dw_die_ref, int);
3905 static void prune_unused_types_walk (dw_die_ref);
3906 static void prune_unused_types_walk_attribs (dw_die_ref);
3907 static void prune_unused_types_prune (dw_die_ref);
3908 static void prune_unused_types (void);
3909 static int maybe_emit_file (struct dwarf_file_data *fd);
3910 static inline const char *AT_vms_delta1 (dw_attr_node *);
3911 static inline const char *AT_vms_delta2 (dw_attr_node *);
3912 static inline void add_AT_vms_delta (dw_die_ref, enum dwarf_attribute,
3913 const char *, const char *);
3914 static void append_entry_to_tmpl_value_parm_die_table (dw_die_ref, tree);
3915 static void gen_remaining_tmpl_value_param_die_attribute (void);
3916 static bool generic_type_p (tree);
3917 static void schedule_generic_params_dies_gen (tree t);
3918 static void gen_scheduled_generic_parms_dies (void);
3919 static void resolve_variable_values (void);
3920
3921 static const char *comp_dir_string (void);
3922
3923 static void hash_loc_operands (dw_loc_descr_ref, inchash::hash &);
3924
3925 /* enum for tracking thread-local variables whose address is really an offset
3926 relative to the TLS pointer, which will need link-time relocation, but will
3927 not need relocation by the DWARF consumer. */
3928
3929 enum dtprel_bool
3930 {
3931 dtprel_false = 0,
3932 dtprel_true = 1
3933 };
3934
3935 /* Return the operator to use for an address of a variable. For dtprel_true, we
3936 use DW_OP_const*. For regular variables, which need both link-time
3937 relocation and consumer-level relocation (e.g., to account for shared objects
3938 loaded at a random address), we use DW_OP_addr*. */
3939
3940 static inline enum dwarf_location_atom
3941 dw_addr_op (enum dtprel_bool dtprel)
3942 {
3943 if (dtprel == dtprel_true)
3944 return (dwarf_split_debug_info ? dwarf_OP (DW_OP_constx)
3945 : (DWARF2_ADDR_SIZE == 4 ? DW_OP_const4u : DW_OP_const8u));
3946 else
3947 return dwarf_split_debug_info ? dwarf_OP (DW_OP_addrx) : DW_OP_addr;
3948 }
3949
3950 /* Return a pointer to a newly allocated address location description. If
3951 dwarf_split_debug_info is true, then record the address with the appropriate
3952 relocation. */
3953 static inline dw_loc_descr_ref
3954 new_addr_loc_descr (rtx addr, enum dtprel_bool dtprel)
3955 {
3956 dw_loc_descr_ref ref = new_loc_descr (dw_addr_op (dtprel), 0, 0);
3957
3958 ref->dw_loc_oprnd1.val_class = dw_val_class_addr;
3959 ref->dw_loc_oprnd1.v.val_addr = addr;
3960 ref->dtprel = dtprel;
3961 if (dwarf_split_debug_info)
3962 ref->dw_loc_oprnd1.val_entry
3963 = add_addr_table_entry (addr,
3964 dtprel ? ate_kind_rtx_dtprel : ate_kind_rtx);
3965 else
3966 ref->dw_loc_oprnd1.val_entry = NULL;
3967
3968 return ref;
3969 }
3970
3971 /* Section names used to hold DWARF debugging information. */
3972
3973 #ifndef DEBUG_INFO_SECTION
3974 #define DEBUG_INFO_SECTION ".debug_info"
3975 #endif
3976 #ifndef DEBUG_DWO_INFO_SECTION
3977 #define DEBUG_DWO_INFO_SECTION ".debug_info.dwo"
3978 #endif
3979 #ifndef DEBUG_LTO_INFO_SECTION
3980 #define DEBUG_LTO_INFO_SECTION ".gnu.debuglto_.debug_info"
3981 #endif
3982 #ifndef DEBUG_LTO_DWO_INFO_SECTION
3983 #define DEBUG_LTO_DWO_INFO_SECTION ".gnu.debuglto_.debug_info.dwo"
3984 #endif
3985 #ifndef DEBUG_ABBREV_SECTION
3986 #define DEBUG_ABBREV_SECTION ".debug_abbrev"
3987 #endif
3988 #ifndef DEBUG_LTO_ABBREV_SECTION
3989 #define DEBUG_LTO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev"
3990 #endif
3991 #ifndef DEBUG_DWO_ABBREV_SECTION
3992 #define DEBUG_DWO_ABBREV_SECTION ".debug_abbrev.dwo"
3993 #endif
3994 #ifndef DEBUG_LTO_DWO_ABBREV_SECTION
3995 #define DEBUG_LTO_DWO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev.dwo"
3996 #endif
3997 #ifndef DEBUG_ARANGES_SECTION
3998 #define DEBUG_ARANGES_SECTION ".debug_aranges"
3999 #endif
4000 #ifndef DEBUG_ADDR_SECTION
4001 #define DEBUG_ADDR_SECTION ".debug_addr"
4002 #endif
4003 #ifndef DEBUG_MACINFO_SECTION
4004 #define DEBUG_MACINFO_SECTION ".debug_macinfo"
4005 #endif
4006 #ifndef DEBUG_LTO_MACINFO_SECTION
4007 #define DEBUG_LTO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo"
4008 #endif
4009 #ifndef DEBUG_DWO_MACINFO_SECTION
4010 #define DEBUG_DWO_MACINFO_SECTION ".debug_macinfo.dwo"
4011 #endif
4012 #ifndef DEBUG_LTO_DWO_MACINFO_SECTION
4013 #define DEBUG_LTO_DWO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo.dwo"
4014 #endif
4015 #ifndef DEBUG_MACRO_SECTION
4016 #define DEBUG_MACRO_SECTION ".debug_macro"
4017 #endif
4018 #ifndef DEBUG_LTO_MACRO_SECTION
4019 #define DEBUG_LTO_MACRO_SECTION ".gnu.debuglto_.debug_macro"
4020 #endif
4021 #ifndef DEBUG_DWO_MACRO_SECTION
4022 #define DEBUG_DWO_MACRO_SECTION ".debug_macro.dwo"
4023 #endif
4024 #ifndef DEBUG_LTO_DWO_MACRO_SECTION
4025 #define DEBUG_LTO_DWO_MACRO_SECTION ".gnu.debuglto_.debug_macro.dwo"
4026 #endif
4027 #ifndef DEBUG_LINE_SECTION
4028 #define DEBUG_LINE_SECTION ".debug_line"
4029 #endif
4030 #ifndef DEBUG_LTO_LINE_SECTION
4031 #define DEBUG_LTO_LINE_SECTION ".gnu.debuglto_.debug_line"
4032 #endif
4033 #ifndef DEBUG_DWO_LINE_SECTION
4034 #define DEBUG_DWO_LINE_SECTION ".debug_line.dwo"
4035 #endif
4036 #ifndef DEBUG_LTO_DWO_LINE_SECTION
4037 #define DEBUG_LTO_DWO_LINE_SECTION ".gnu.debuglto_.debug_line.dwo"
4038 #endif
4039 #ifndef DEBUG_LOC_SECTION
4040 #define DEBUG_LOC_SECTION ".debug_loc"
4041 #endif
4042 #ifndef DEBUG_DWO_LOC_SECTION
4043 #define DEBUG_DWO_LOC_SECTION ".debug_loc.dwo"
4044 #endif
4045 #ifndef DEBUG_LOCLISTS_SECTION
4046 #define DEBUG_LOCLISTS_SECTION ".debug_loclists"
4047 #endif
4048 #ifndef DEBUG_DWO_LOCLISTS_SECTION
4049 #define DEBUG_DWO_LOCLISTS_SECTION ".debug_loclists.dwo"
4050 #endif
4051 #ifndef DEBUG_PUBNAMES_SECTION
4052 #define DEBUG_PUBNAMES_SECTION \
4053 ((debug_generate_pub_sections == 2) \
4054 ? ".debug_gnu_pubnames" : ".debug_pubnames")
4055 #endif
4056 #ifndef DEBUG_PUBTYPES_SECTION
4057 #define DEBUG_PUBTYPES_SECTION \
4058 ((debug_generate_pub_sections == 2) \
4059 ? ".debug_gnu_pubtypes" : ".debug_pubtypes")
4060 #endif
4061 #ifndef DEBUG_STR_OFFSETS_SECTION
4062 #define DEBUG_STR_OFFSETS_SECTION ".debug_str_offsets"
4063 #endif
4064 #ifndef DEBUG_DWO_STR_OFFSETS_SECTION
4065 #define DEBUG_DWO_STR_OFFSETS_SECTION ".debug_str_offsets.dwo"
4066 #endif
4067 #ifndef DEBUG_LTO_DWO_STR_OFFSETS_SECTION
4068 #define DEBUG_LTO_DWO_STR_OFFSETS_SECTION ".gnu.debuglto_.debug_str_offsets.dwo"
4069 #endif
4070 #ifndef DEBUG_STR_SECTION
4071 #define DEBUG_STR_SECTION ".debug_str"
4072 #endif
4073 #ifndef DEBUG_LTO_STR_SECTION
4074 #define DEBUG_LTO_STR_SECTION ".gnu.debuglto_.debug_str"
4075 #endif
4076 #ifndef DEBUG_STR_DWO_SECTION
4077 #define DEBUG_STR_DWO_SECTION ".debug_str.dwo"
4078 #endif
4079 #ifndef DEBUG_LTO_STR_DWO_SECTION
4080 #define DEBUG_LTO_STR_DWO_SECTION ".gnu.debuglto_.debug_str.dwo"
4081 #endif
4082 #ifndef DEBUG_RANGES_SECTION
4083 #define DEBUG_RANGES_SECTION ".debug_ranges"
4084 #endif
4085 #ifndef DEBUG_RNGLISTS_SECTION
4086 #define DEBUG_RNGLISTS_SECTION ".debug_rnglists"
4087 #endif
4088 #ifndef DEBUG_LINE_STR_SECTION
4089 #define DEBUG_LINE_STR_SECTION ".debug_line_str"
4090 #endif
4091 #ifndef DEBUG_LTO_LINE_STR_SECTION
4092 #define DEBUG_LTO_LINE_STR_SECTION ".gnu.debuglto_.debug_line_str"
4093 #endif
4094
4095 /* Standard ELF section names for compiled code and data. */
4096 #ifndef TEXT_SECTION_NAME
4097 #define TEXT_SECTION_NAME ".text"
4098 #endif
4099
4100 /* Section flags for .debug_str section. */
4101 #define DEBUG_STR_SECTION_FLAGS \
4102 (HAVE_GAS_SHF_MERGE && flag_merge_debug_strings \
4103 ? SECTION_DEBUG | SECTION_MERGE | SECTION_STRINGS | 1 \
4104 : SECTION_DEBUG)
4105
4106 /* Section flags for .debug_str.dwo section. */
4107 #define DEBUG_STR_DWO_SECTION_FLAGS (SECTION_DEBUG | SECTION_EXCLUDE)
4108
4109 /* Attribute used to refer to the macro section. */
4110 #define DEBUG_MACRO_ATTRIBUTE (dwarf_version >= 5 ? DW_AT_macros \
4111 : dwarf_strict ? DW_AT_macro_info : DW_AT_GNU_macros)
4112
4113 /* Labels we insert at beginning sections we can reference instead of
4114 the section names themselves. */
4115
4116 #ifndef TEXT_SECTION_LABEL
4117 #define TEXT_SECTION_LABEL "Ltext"
4118 #endif
4119 #ifndef COLD_TEXT_SECTION_LABEL
4120 #define COLD_TEXT_SECTION_LABEL "Ltext_cold"
4121 #endif
4122 #ifndef DEBUG_LINE_SECTION_LABEL
4123 #define DEBUG_LINE_SECTION_LABEL "Ldebug_line"
4124 #endif
4125 #ifndef DEBUG_SKELETON_LINE_SECTION_LABEL
4126 #define DEBUG_SKELETON_LINE_SECTION_LABEL "Lskeleton_debug_line"
4127 #endif
4128 #ifndef DEBUG_INFO_SECTION_LABEL
4129 #define DEBUG_INFO_SECTION_LABEL "Ldebug_info"
4130 #endif
4131 #ifndef DEBUG_SKELETON_INFO_SECTION_LABEL
4132 #define DEBUG_SKELETON_INFO_SECTION_LABEL "Lskeleton_debug_info"
4133 #endif
4134 #ifndef DEBUG_ABBREV_SECTION_LABEL
4135 #define DEBUG_ABBREV_SECTION_LABEL "Ldebug_abbrev"
4136 #endif
4137 #ifndef DEBUG_SKELETON_ABBREV_SECTION_LABEL
4138 #define DEBUG_SKELETON_ABBREV_SECTION_LABEL "Lskeleton_debug_abbrev"
4139 #endif
4140 #ifndef DEBUG_ADDR_SECTION_LABEL
4141 #define DEBUG_ADDR_SECTION_LABEL "Ldebug_addr"
4142 #endif
4143 #ifndef DEBUG_LOC_SECTION_LABEL
4144 #define DEBUG_LOC_SECTION_LABEL "Ldebug_loc"
4145 #endif
4146 #ifndef DEBUG_RANGES_SECTION_LABEL
4147 #define DEBUG_RANGES_SECTION_LABEL "Ldebug_ranges"
4148 #endif
4149 #ifndef DEBUG_MACINFO_SECTION_LABEL
4150 #define DEBUG_MACINFO_SECTION_LABEL "Ldebug_macinfo"
4151 #endif
4152 #ifndef DEBUG_MACRO_SECTION_LABEL
4153 #define DEBUG_MACRO_SECTION_LABEL "Ldebug_macro"
4154 #endif
4155 #define SKELETON_COMP_DIE_ABBREV 1
4156 #define SKELETON_TYPE_DIE_ABBREV 2
4157
4158 /* Definitions of defaults for formats and names of various special
4159 (artificial) labels which may be generated within this file (when the -g
4160 options is used and DWARF2_DEBUGGING_INFO is in effect.
4161 If necessary, these may be overridden from within the tm.h file, but
4162 typically, overriding these defaults is unnecessary. */
4163
4164 static char text_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4165 static char text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4166 static char cold_text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4167 static char cold_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4168 static char abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4169 static char debug_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4170 static char debug_skeleton_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4171 static char debug_skeleton_abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4172 static char debug_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4173 static char debug_addr_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4174 static char debug_skeleton_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4175 static char macinfo_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4176 static char loc_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4177 static char ranges_section_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4178 static char ranges_base_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4179
4180 #ifndef TEXT_END_LABEL
4181 #define TEXT_END_LABEL "Letext"
4182 #endif
4183 #ifndef COLD_END_LABEL
4184 #define COLD_END_LABEL "Letext_cold"
4185 #endif
4186 #ifndef BLOCK_BEGIN_LABEL
4187 #define BLOCK_BEGIN_LABEL "LBB"
4188 #endif
4189 #ifndef BLOCK_INLINE_ENTRY_LABEL
4190 #define BLOCK_INLINE_ENTRY_LABEL "LBI"
4191 #endif
4192 #ifndef BLOCK_END_LABEL
4193 #define BLOCK_END_LABEL "LBE"
4194 #endif
4195 #ifndef LINE_CODE_LABEL
4196 #define LINE_CODE_LABEL "LM"
4197 #endif
4198
4199 \f
4200 /* Return the root of the DIE's built for the current compilation unit. */
4201 static dw_die_ref
4202 comp_unit_die (void)
4203 {
4204 if (!single_comp_unit_die)
4205 single_comp_unit_die = gen_compile_unit_die (NULL);
4206 return single_comp_unit_die;
4207 }
4208
4209 /* We allow a language front-end to designate a function that is to be
4210 called to "demangle" any name before it is put into a DIE. */
4211
4212 static const char *(*demangle_name_func) (const char *);
4213
4214 void
4215 dwarf2out_set_demangle_name_func (const char *(*func) (const char *))
4216 {
4217 demangle_name_func = func;
4218 }
4219
4220 /* Test if rtl node points to a pseudo register. */
4221
4222 static inline int
4223 is_pseudo_reg (const_rtx rtl)
4224 {
4225 return ((REG_P (rtl) && REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
4226 || (GET_CODE (rtl) == SUBREG
4227 && REGNO (SUBREG_REG (rtl)) >= FIRST_PSEUDO_REGISTER));
4228 }
4229
4230 /* Return a reference to a type, with its const and volatile qualifiers
4231 removed. */
4232
4233 static inline tree
4234 type_main_variant (tree type)
4235 {
4236 type = TYPE_MAIN_VARIANT (type);
4237
4238 /* ??? There really should be only one main variant among any group of
4239 variants of a given type (and all of the MAIN_VARIANT values for all
4240 members of the group should point to that one type) but sometimes the C
4241 front-end messes this up for array types, so we work around that bug
4242 here. */
4243 if (TREE_CODE (type) == ARRAY_TYPE)
4244 while (type != TYPE_MAIN_VARIANT (type))
4245 type = TYPE_MAIN_VARIANT (type);
4246
4247 return type;
4248 }
4249
4250 /* Return nonzero if the given type node represents a tagged type. */
4251
4252 static inline int
4253 is_tagged_type (const_tree type)
4254 {
4255 enum tree_code code = TREE_CODE (type);
4256
4257 return (code == RECORD_TYPE || code == UNION_TYPE
4258 || code == QUAL_UNION_TYPE || code == ENUMERAL_TYPE);
4259 }
4260
4261 /* Set label to debug_info_section_label + die_offset of a DIE reference. */
4262
4263 static void
4264 get_ref_die_offset_label (char *label, dw_die_ref ref)
4265 {
4266 sprintf (label, "%s+%ld", debug_info_section_label, ref->die_offset);
4267 }
4268
4269 /* Return die_offset of a DIE reference to a base type. */
4270
4271 static unsigned long int
4272 get_base_type_offset (dw_die_ref ref)
4273 {
4274 if (ref->die_offset)
4275 return ref->die_offset;
4276 if (comp_unit_die ()->die_abbrev)
4277 {
4278 calc_base_type_die_sizes ();
4279 gcc_assert (ref->die_offset);
4280 }
4281 return ref->die_offset;
4282 }
4283
4284 /* Return die_offset of a DIE reference other than base type. */
4285
4286 static unsigned long int
4287 get_ref_die_offset (dw_die_ref ref)
4288 {
4289 gcc_assert (ref->die_offset);
4290 return ref->die_offset;
4291 }
4292
4293 /* Convert a DIE tag into its string name. */
4294
4295 static const char *
4296 dwarf_tag_name (unsigned int tag)
4297 {
4298 const char *name = get_DW_TAG_name (tag);
4299
4300 if (name != NULL)
4301 return name;
4302
4303 return "DW_TAG_<unknown>";
4304 }
4305
4306 /* Convert a DWARF attribute code into its string name. */
4307
4308 static const char *
4309 dwarf_attr_name (unsigned int attr)
4310 {
4311 const char *name;
4312
4313 switch (attr)
4314 {
4315 #if VMS_DEBUGGING_INFO
4316 case DW_AT_HP_prologue:
4317 return "DW_AT_HP_prologue";
4318 #else
4319 case DW_AT_MIPS_loop_unroll_factor:
4320 return "DW_AT_MIPS_loop_unroll_factor";
4321 #endif
4322
4323 #if VMS_DEBUGGING_INFO
4324 case DW_AT_HP_epilogue:
4325 return "DW_AT_HP_epilogue";
4326 #else
4327 case DW_AT_MIPS_stride:
4328 return "DW_AT_MIPS_stride";
4329 #endif
4330 }
4331
4332 name = get_DW_AT_name (attr);
4333
4334 if (name != NULL)
4335 return name;
4336
4337 return "DW_AT_<unknown>";
4338 }
4339
4340 /* Convert a DWARF value form code into its string name. */
4341
4342 static const char *
4343 dwarf_form_name (unsigned int form)
4344 {
4345 const char *name = get_DW_FORM_name (form);
4346
4347 if (name != NULL)
4348 return name;
4349
4350 return "DW_FORM_<unknown>";
4351 }
4352 \f
4353 /* Determine the "ultimate origin" of a decl. The decl may be an inlined
4354 instance of an inlined instance of a decl which is local to an inline
4355 function, so we have to trace all of the way back through the origin chain
4356 to find out what sort of node actually served as the original seed for the
4357 given block. */
4358
4359 static tree
4360 decl_ultimate_origin (const_tree decl)
4361 {
4362 if (!CODE_CONTAINS_STRUCT (TREE_CODE (decl), TS_DECL_COMMON))
4363 return NULL_TREE;
4364
4365 /* DECL_ABSTRACT_ORIGIN can point to itself; ignore that if
4366 we're trying to output the abstract instance of this function. */
4367 if (DECL_ABSTRACT_P (decl) && DECL_ABSTRACT_ORIGIN (decl) == decl)
4368 return NULL_TREE;
4369
4370 /* Since the DECL_ABSTRACT_ORIGIN for a DECL is supposed to be the
4371 most distant ancestor, this should never happen. */
4372 gcc_assert (!DECL_FROM_INLINE (DECL_ORIGIN (decl)));
4373
4374 return DECL_ABSTRACT_ORIGIN (decl);
4375 }
4376
4377 /* Get the class to which DECL belongs, if any. In g++, the DECL_CONTEXT
4378 of a virtual function may refer to a base class, so we check the 'this'
4379 parameter. */
4380
4381 static tree
4382 decl_class_context (tree decl)
4383 {
4384 tree context = NULL_TREE;
4385
4386 if (TREE_CODE (decl) != FUNCTION_DECL || ! DECL_VINDEX (decl))
4387 context = DECL_CONTEXT (decl);
4388 else
4389 context = TYPE_MAIN_VARIANT
4390 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
4391
4392 if (context && !TYPE_P (context))
4393 context = NULL_TREE;
4394
4395 return context;
4396 }
4397 \f
4398 /* Add an attribute/value pair to a DIE. */
4399
4400 static inline void
4401 add_dwarf_attr (dw_die_ref die, dw_attr_node *attr)
4402 {
4403 /* Maybe this should be an assert? */
4404 if (die == NULL)
4405 return;
4406
4407 if (flag_checking)
4408 {
4409 /* Check we do not add duplicate attrs. Can't use get_AT here
4410 because that recurses to the specification/abstract origin DIE. */
4411 dw_attr_node *a;
4412 unsigned ix;
4413 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
4414 gcc_assert (a->dw_attr != attr->dw_attr);
4415 }
4416
4417 vec_safe_reserve (die->die_attr, 1);
4418 vec_safe_push (die->die_attr, *attr);
4419 }
4420
4421 static inline enum dw_val_class
4422 AT_class (dw_attr_node *a)
4423 {
4424 return a->dw_attr_val.val_class;
4425 }
4426
4427 /* Return the index for any attribute that will be referenced with a
4428 DW_FORM_addrx/GNU_addr_index or DW_FORM_strx/GNU_str_index. String
4429 indices are stored in dw_attr_val.v.val_str for reference counting
4430 pruning. */
4431
4432 static inline unsigned int
4433 AT_index (dw_attr_node *a)
4434 {
4435 if (AT_class (a) == dw_val_class_str)
4436 return a->dw_attr_val.v.val_str->index;
4437 else if (a->dw_attr_val.val_entry != NULL)
4438 return a->dw_attr_val.val_entry->index;
4439 return NOT_INDEXED;
4440 }
4441
4442 /* Add a flag value attribute to a DIE. */
4443
4444 static inline void
4445 add_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int flag)
4446 {
4447 dw_attr_node attr;
4448
4449 attr.dw_attr = attr_kind;
4450 attr.dw_attr_val.val_class = dw_val_class_flag;
4451 attr.dw_attr_val.val_entry = NULL;
4452 attr.dw_attr_val.v.val_flag = flag;
4453 add_dwarf_attr (die, &attr);
4454 }
4455
4456 static inline unsigned
4457 AT_flag (dw_attr_node *a)
4458 {
4459 gcc_assert (a && AT_class (a) == dw_val_class_flag);
4460 return a->dw_attr_val.v.val_flag;
4461 }
4462
4463 /* Add a signed integer attribute value to a DIE. */
4464
4465 static inline void
4466 add_AT_int (dw_die_ref die, enum dwarf_attribute attr_kind, HOST_WIDE_INT int_val)
4467 {
4468 dw_attr_node attr;
4469
4470 attr.dw_attr = attr_kind;
4471 attr.dw_attr_val.val_class = dw_val_class_const;
4472 attr.dw_attr_val.val_entry = NULL;
4473 attr.dw_attr_val.v.val_int = int_val;
4474 add_dwarf_attr (die, &attr);
4475 }
4476
4477 static inline HOST_WIDE_INT
4478 AT_int (dw_attr_node *a)
4479 {
4480 gcc_assert (a && (AT_class (a) == dw_val_class_const
4481 || AT_class (a) == dw_val_class_const_implicit));
4482 return a->dw_attr_val.v.val_int;
4483 }
4484
4485 /* Add an unsigned integer attribute value to a DIE. */
4486
4487 static inline void
4488 add_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind,
4489 unsigned HOST_WIDE_INT unsigned_val)
4490 {
4491 dw_attr_node attr;
4492
4493 attr.dw_attr = attr_kind;
4494 attr.dw_attr_val.val_class = dw_val_class_unsigned_const;
4495 attr.dw_attr_val.val_entry = NULL;
4496 attr.dw_attr_val.v.val_unsigned = unsigned_val;
4497 add_dwarf_attr (die, &attr);
4498 }
4499
4500 static inline unsigned HOST_WIDE_INT
4501 AT_unsigned (dw_attr_node *a)
4502 {
4503 gcc_assert (a && (AT_class (a) == dw_val_class_unsigned_const
4504 || AT_class (a) == dw_val_class_unsigned_const_implicit));
4505 return a->dw_attr_val.v.val_unsigned;
4506 }
4507
4508 /* Add an unsigned wide integer attribute value to a DIE. */
4509
4510 static inline void
4511 add_AT_wide (dw_die_ref die, enum dwarf_attribute attr_kind,
4512 const wide_int& w)
4513 {
4514 dw_attr_node attr;
4515
4516 attr.dw_attr = attr_kind;
4517 attr.dw_attr_val.val_class = dw_val_class_wide_int;
4518 attr.dw_attr_val.val_entry = NULL;
4519 attr.dw_attr_val.v.val_wide = ggc_alloc<wide_int> ();
4520 *attr.dw_attr_val.v.val_wide = w;
4521 add_dwarf_attr (die, &attr);
4522 }
4523
4524 /* Add an unsigned double integer attribute value to a DIE. */
4525
4526 static inline void
4527 add_AT_double (dw_die_ref die, enum dwarf_attribute attr_kind,
4528 HOST_WIDE_INT high, unsigned HOST_WIDE_INT low)
4529 {
4530 dw_attr_node attr;
4531
4532 attr.dw_attr = attr_kind;
4533 attr.dw_attr_val.val_class = dw_val_class_const_double;
4534 attr.dw_attr_val.val_entry = NULL;
4535 attr.dw_attr_val.v.val_double.high = high;
4536 attr.dw_attr_val.v.val_double.low = low;
4537 add_dwarf_attr (die, &attr);
4538 }
4539
4540 /* Add a floating point attribute value to a DIE and return it. */
4541
4542 static inline void
4543 add_AT_vec (dw_die_ref die, enum dwarf_attribute attr_kind,
4544 unsigned int length, unsigned int elt_size, unsigned char *array)
4545 {
4546 dw_attr_node attr;
4547
4548 attr.dw_attr = attr_kind;
4549 attr.dw_attr_val.val_class = dw_val_class_vec;
4550 attr.dw_attr_val.val_entry = NULL;
4551 attr.dw_attr_val.v.val_vec.length = length;
4552 attr.dw_attr_val.v.val_vec.elt_size = elt_size;
4553 attr.dw_attr_val.v.val_vec.array = array;
4554 add_dwarf_attr (die, &attr);
4555 }
4556
4557 /* Add an 8-byte data attribute value to a DIE. */
4558
4559 static inline void
4560 add_AT_data8 (dw_die_ref die, enum dwarf_attribute attr_kind,
4561 unsigned char data8[8])
4562 {
4563 dw_attr_node attr;
4564
4565 attr.dw_attr = attr_kind;
4566 attr.dw_attr_val.val_class = dw_val_class_data8;
4567 attr.dw_attr_val.val_entry = NULL;
4568 memcpy (attr.dw_attr_val.v.val_data8, data8, 8);
4569 add_dwarf_attr (die, &attr);
4570 }
4571
4572 /* Add DW_AT_low_pc and DW_AT_high_pc to a DIE. When using
4573 dwarf_split_debug_info, address attributes in dies destined for the
4574 final executable have force_direct set to avoid using indexed
4575 references. */
4576
4577 static inline void
4578 add_AT_low_high_pc (dw_die_ref die, const char *lbl_low, const char *lbl_high,
4579 bool force_direct)
4580 {
4581 dw_attr_node attr;
4582 char * lbl_id;
4583
4584 lbl_id = xstrdup (lbl_low);
4585 attr.dw_attr = DW_AT_low_pc;
4586 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4587 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4588 if (dwarf_split_debug_info && !force_direct)
4589 attr.dw_attr_val.val_entry
4590 = add_addr_table_entry (lbl_id, ate_kind_label);
4591 else
4592 attr.dw_attr_val.val_entry = NULL;
4593 add_dwarf_attr (die, &attr);
4594
4595 attr.dw_attr = DW_AT_high_pc;
4596 if (dwarf_version < 4)
4597 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4598 else
4599 attr.dw_attr_val.val_class = dw_val_class_high_pc;
4600 lbl_id = xstrdup (lbl_high);
4601 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4602 if (attr.dw_attr_val.val_class == dw_val_class_lbl_id
4603 && dwarf_split_debug_info && !force_direct)
4604 attr.dw_attr_val.val_entry
4605 = add_addr_table_entry (lbl_id, ate_kind_label);
4606 else
4607 attr.dw_attr_val.val_entry = NULL;
4608 add_dwarf_attr (die, &attr);
4609 }
4610
4611 /* Hash and equality functions for debug_str_hash. */
4612
4613 hashval_t
4614 indirect_string_hasher::hash (indirect_string_node *x)
4615 {
4616 return htab_hash_string (x->str);
4617 }
4618
4619 bool
4620 indirect_string_hasher::equal (indirect_string_node *x1, const char *x2)
4621 {
4622 return strcmp (x1->str, x2) == 0;
4623 }
4624
4625 /* Add STR to the given string hash table. */
4626
4627 static struct indirect_string_node *
4628 find_AT_string_in_table (const char *str,
4629 hash_table<indirect_string_hasher> *table)
4630 {
4631 struct indirect_string_node *node;
4632
4633 indirect_string_node **slot
4634 = table->find_slot_with_hash (str, htab_hash_string (str), INSERT);
4635 if (*slot == NULL)
4636 {
4637 node = ggc_cleared_alloc<indirect_string_node> ();
4638 node->str = ggc_strdup (str);
4639 *slot = node;
4640 }
4641 else
4642 node = *slot;
4643
4644 node->refcount++;
4645 return node;
4646 }
4647
4648 /* Add STR to the indirect string hash table. */
4649
4650 static struct indirect_string_node *
4651 find_AT_string (const char *str)
4652 {
4653 if (! debug_str_hash)
4654 debug_str_hash = hash_table<indirect_string_hasher>::create_ggc (10);
4655
4656 return find_AT_string_in_table (str, debug_str_hash);
4657 }
4658
4659 /* Add a string attribute value to a DIE. */
4660
4661 static inline void
4662 add_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind, const char *str)
4663 {
4664 dw_attr_node attr;
4665 struct indirect_string_node *node;
4666
4667 node = find_AT_string (str);
4668
4669 attr.dw_attr = attr_kind;
4670 attr.dw_attr_val.val_class = dw_val_class_str;
4671 attr.dw_attr_val.val_entry = NULL;
4672 attr.dw_attr_val.v.val_str = node;
4673 add_dwarf_attr (die, &attr);
4674 }
4675
4676 static inline const char *
4677 AT_string (dw_attr_node *a)
4678 {
4679 gcc_assert (a && AT_class (a) == dw_val_class_str);
4680 return a->dw_attr_val.v.val_str->str;
4681 }
4682
4683 /* Call this function directly to bypass AT_string_form's logic to put
4684 the string inline in the die. */
4685
4686 static void
4687 set_indirect_string (struct indirect_string_node *node)
4688 {
4689 char label[MAX_ARTIFICIAL_LABEL_BYTES];
4690 /* Already indirect is a no op. */
4691 if (node->form == DW_FORM_strp
4692 || node->form == DW_FORM_line_strp
4693 || node->form == dwarf_FORM (DW_FORM_strx))
4694 {
4695 gcc_assert (node->label);
4696 return;
4697 }
4698 ASM_GENERATE_INTERNAL_LABEL (label, "LASF", dw2_string_counter);
4699 ++dw2_string_counter;
4700 node->label = xstrdup (label);
4701
4702 if (!dwarf_split_debug_info)
4703 {
4704 node->form = DW_FORM_strp;
4705 node->index = NOT_INDEXED;
4706 }
4707 else
4708 {
4709 node->form = dwarf_FORM (DW_FORM_strx);
4710 node->index = NO_INDEX_ASSIGNED;
4711 }
4712 }
4713
4714 /* A helper function for dwarf2out_finish, called to reset indirect
4715 string decisions done for early LTO dwarf output before fat object
4716 dwarf output. */
4717
4718 int
4719 reset_indirect_string (indirect_string_node **h, void *)
4720 {
4721 struct indirect_string_node *node = *h;
4722 if (node->form == DW_FORM_strp || node->form == dwarf_FORM (DW_FORM_strx))
4723 {
4724 free (node->label);
4725 node->label = NULL;
4726 node->form = (dwarf_form) 0;
4727 node->index = 0;
4728 }
4729 return 1;
4730 }
4731
4732 /* Find out whether a string should be output inline in DIE
4733 or out-of-line in .debug_str section. */
4734
4735 static enum dwarf_form
4736 find_string_form (struct indirect_string_node *node)
4737 {
4738 unsigned int len;
4739
4740 if (node->form)
4741 return node->form;
4742
4743 len = strlen (node->str) + 1;
4744
4745 /* If the string is shorter or equal to the size of the reference, it is
4746 always better to put it inline. */
4747 if (len <= DWARF_OFFSET_SIZE || node->refcount == 0)
4748 return node->form = DW_FORM_string;
4749
4750 /* If we cannot expect the linker to merge strings in .debug_str
4751 section, only put it into .debug_str if it is worth even in this
4752 single module. */
4753 if (DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
4754 || ((debug_str_section->common.flags & SECTION_MERGE) == 0
4755 && (len - DWARF_OFFSET_SIZE) * node->refcount <= len))
4756 return node->form = DW_FORM_string;
4757
4758 set_indirect_string (node);
4759
4760 return node->form;
4761 }
4762
4763 /* Find out whether the string referenced from the attribute should be
4764 output inline in DIE or out-of-line in .debug_str section. */
4765
4766 static enum dwarf_form
4767 AT_string_form (dw_attr_node *a)
4768 {
4769 gcc_assert (a && AT_class (a) == dw_val_class_str);
4770 return find_string_form (a->dw_attr_val.v.val_str);
4771 }
4772
4773 /* Add a DIE reference attribute value to a DIE. */
4774
4775 static inline void
4776 add_AT_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind, dw_die_ref targ_die)
4777 {
4778 dw_attr_node attr;
4779 gcc_checking_assert (targ_die != NULL);
4780
4781 /* With LTO we can end up trying to reference something we didn't create
4782 a DIE for. Avoid crashing later on a NULL referenced DIE. */
4783 if (targ_die == NULL)
4784 return;
4785
4786 attr.dw_attr = attr_kind;
4787 attr.dw_attr_val.val_class = dw_val_class_die_ref;
4788 attr.dw_attr_val.val_entry = NULL;
4789 attr.dw_attr_val.v.val_die_ref.die = targ_die;
4790 attr.dw_attr_val.v.val_die_ref.external = 0;
4791 add_dwarf_attr (die, &attr);
4792 }
4793
4794 /* Change DIE reference REF to point to NEW_DIE instead. */
4795
4796 static inline void
4797 change_AT_die_ref (dw_attr_node *ref, dw_die_ref new_die)
4798 {
4799 gcc_assert (ref->dw_attr_val.val_class == dw_val_class_die_ref);
4800 ref->dw_attr_val.v.val_die_ref.die = new_die;
4801 ref->dw_attr_val.v.val_die_ref.external = 0;
4802 }
4803
4804 /* Add an AT_specification attribute to a DIE, and also make the back
4805 pointer from the specification to the definition. */
4806
4807 static inline void
4808 add_AT_specification (dw_die_ref die, dw_die_ref targ_die)
4809 {
4810 add_AT_die_ref (die, DW_AT_specification, targ_die);
4811 gcc_assert (!targ_die->die_definition);
4812 targ_die->die_definition = die;
4813 }
4814
4815 static inline dw_die_ref
4816 AT_ref (dw_attr_node *a)
4817 {
4818 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4819 return a->dw_attr_val.v.val_die_ref.die;
4820 }
4821
4822 static inline int
4823 AT_ref_external (dw_attr_node *a)
4824 {
4825 if (a && AT_class (a) == dw_val_class_die_ref)
4826 return a->dw_attr_val.v.val_die_ref.external;
4827
4828 return 0;
4829 }
4830
4831 static inline void
4832 set_AT_ref_external (dw_attr_node *a, int i)
4833 {
4834 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4835 a->dw_attr_val.v.val_die_ref.external = i;
4836 }
4837
4838 /* Add an FDE reference attribute value to a DIE. */
4839
4840 static inline void
4841 add_AT_fde_ref (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int targ_fde)
4842 {
4843 dw_attr_node attr;
4844
4845 attr.dw_attr = attr_kind;
4846 attr.dw_attr_val.val_class = dw_val_class_fde_ref;
4847 attr.dw_attr_val.val_entry = NULL;
4848 attr.dw_attr_val.v.val_fde_index = targ_fde;
4849 add_dwarf_attr (die, &attr);
4850 }
4851
4852 /* Add a location description attribute value to a DIE. */
4853
4854 static inline void
4855 add_AT_loc (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_descr_ref loc)
4856 {
4857 dw_attr_node attr;
4858
4859 attr.dw_attr = attr_kind;
4860 attr.dw_attr_val.val_class = dw_val_class_loc;
4861 attr.dw_attr_val.val_entry = NULL;
4862 attr.dw_attr_val.v.val_loc = loc;
4863 add_dwarf_attr (die, &attr);
4864 }
4865
4866 static inline dw_loc_descr_ref
4867 AT_loc (dw_attr_node *a)
4868 {
4869 gcc_assert (a && AT_class (a) == dw_val_class_loc);
4870 return a->dw_attr_val.v.val_loc;
4871 }
4872
4873 static inline void
4874 add_AT_loc_list (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_list_ref loc_list)
4875 {
4876 dw_attr_node attr;
4877
4878 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4879 return;
4880
4881 attr.dw_attr = attr_kind;
4882 attr.dw_attr_val.val_class = dw_val_class_loc_list;
4883 attr.dw_attr_val.val_entry = NULL;
4884 attr.dw_attr_val.v.val_loc_list = loc_list;
4885 add_dwarf_attr (die, &attr);
4886 have_location_lists = true;
4887 }
4888
4889 static inline dw_loc_list_ref
4890 AT_loc_list (dw_attr_node *a)
4891 {
4892 gcc_assert (a && AT_class (a) == dw_val_class_loc_list);
4893 return a->dw_attr_val.v.val_loc_list;
4894 }
4895
4896 /* Add a view list attribute to DIE. It must have a DW_AT_location
4897 attribute, because the view list complements the location list. */
4898
4899 static inline void
4900 add_AT_view_list (dw_die_ref die, enum dwarf_attribute attr_kind)
4901 {
4902 dw_attr_node attr;
4903
4904 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4905 return;
4906
4907 attr.dw_attr = attr_kind;
4908 attr.dw_attr_val.val_class = dw_val_class_view_list;
4909 attr.dw_attr_val.val_entry = NULL;
4910 attr.dw_attr_val.v.val_view_list = die;
4911 add_dwarf_attr (die, &attr);
4912 gcc_checking_assert (get_AT (die, DW_AT_location));
4913 gcc_assert (have_location_lists);
4914 }
4915
4916 /* Return a pointer to the location list referenced by the attribute.
4917 If the named attribute is a view list, look up the corresponding
4918 DW_AT_location attribute and return its location list. */
4919
4920 static inline dw_loc_list_ref *
4921 AT_loc_list_ptr (dw_attr_node *a)
4922 {
4923 gcc_assert (a);
4924 switch (AT_class (a))
4925 {
4926 case dw_val_class_loc_list:
4927 return &a->dw_attr_val.v.val_loc_list;
4928 case dw_val_class_view_list:
4929 {
4930 dw_attr_node *l;
4931 l = get_AT (a->dw_attr_val.v.val_view_list, DW_AT_location);
4932 if (!l)
4933 return NULL;
4934 gcc_checking_assert (l + 1 == a);
4935 return AT_loc_list_ptr (l);
4936 }
4937 default:
4938 gcc_unreachable ();
4939 }
4940 }
4941
4942 /* Return the location attribute value associated with a view list
4943 attribute value. */
4944
4945 static inline dw_val_node *
4946 view_list_to_loc_list_val_node (dw_val_node *val)
4947 {
4948 gcc_assert (val->val_class == dw_val_class_view_list);
4949 dw_attr_node *loc = get_AT (val->v.val_view_list, DW_AT_location);
4950 if (!loc)
4951 return NULL;
4952 gcc_checking_assert (&(loc + 1)->dw_attr_val == val);
4953 gcc_assert (AT_class (loc) == dw_val_class_loc_list);
4954 return &loc->dw_attr_val;
4955 }
4956
4957 struct addr_hasher : ggc_ptr_hash<addr_table_entry>
4958 {
4959 static hashval_t hash (addr_table_entry *);
4960 static bool equal (addr_table_entry *, addr_table_entry *);
4961 };
4962
4963 /* Table of entries into the .debug_addr section. */
4964
4965 static GTY (()) hash_table<addr_hasher> *addr_index_table;
4966
4967 /* Hash an address_table_entry. */
4968
4969 hashval_t
4970 addr_hasher::hash (addr_table_entry *a)
4971 {
4972 inchash::hash hstate;
4973 switch (a->kind)
4974 {
4975 case ate_kind_rtx:
4976 hstate.add_int (0);
4977 break;
4978 case ate_kind_rtx_dtprel:
4979 hstate.add_int (1);
4980 break;
4981 case ate_kind_label:
4982 return htab_hash_string (a->addr.label);
4983 default:
4984 gcc_unreachable ();
4985 }
4986 inchash::add_rtx (a->addr.rtl, hstate);
4987 return hstate.end ();
4988 }
4989
4990 /* Determine equality for two address_table_entries. */
4991
4992 bool
4993 addr_hasher::equal (addr_table_entry *a1, addr_table_entry *a2)
4994 {
4995 if (a1->kind != a2->kind)
4996 return 0;
4997 switch (a1->kind)
4998 {
4999 case ate_kind_rtx:
5000 case ate_kind_rtx_dtprel:
5001 return rtx_equal_p (a1->addr.rtl, a2->addr.rtl);
5002 case ate_kind_label:
5003 return strcmp (a1->addr.label, a2->addr.label) == 0;
5004 default:
5005 gcc_unreachable ();
5006 }
5007 }
5008
5009 /* Initialize an addr_table_entry. */
5010
5011 void
5012 init_addr_table_entry (addr_table_entry *e, enum ate_kind kind, void *addr)
5013 {
5014 e->kind = kind;
5015 switch (kind)
5016 {
5017 case ate_kind_rtx:
5018 case ate_kind_rtx_dtprel:
5019 e->addr.rtl = (rtx) addr;
5020 break;
5021 case ate_kind_label:
5022 e->addr.label = (char *) addr;
5023 break;
5024 }
5025 e->refcount = 0;
5026 e->index = NO_INDEX_ASSIGNED;
5027 }
5028
5029 /* Add attr to the address table entry to the table. Defer setting an
5030 index until output time. */
5031
5032 static addr_table_entry *
5033 add_addr_table_entry (void *addr, enum ate_kind kind)
5034 {
5035 addr_table_entry *node;
5036 addr_table_entry finder;
5037
5038 gcc_assert (dwarf_split_debug_info);
5039 if (! addr_index_table)
5040 addr_index_table = hash_table<addr_hasher>::create_ggc (10);
5041 init_addr_table_entry (&finder, kind, addr);
5042 addr_table_entry **slot = addr_index_table->find_slot (&finder, INSERT);
5043
5044 if (*slot == HTAB_EMPTY_ENTRY)
5045 {
5046 node = ggc_cleared_alloc<addr_table_entry> ();
5047 init_addr_table_entry (node, kind, addr);
5048 *slot = node;
5049 }
5050 else
5051 node = *slot;
5052
5053 node->refcount++;
5054 return node;
5055 }
5056
5057 /* Remove an entry from the addr table by decrementing its refcount.
5058 Strictly, decrementing the refcount would be enough, but the
5059 assertion that the entry is actually in the table has found
5060 bugs. */
5061
5062 static void
5063 remove_addr_table_entry (addr_table_entry *entry)
5064 {
5065 gcc_assert (dwarf_split_debug_info && addr_index_table);
5066 /* After an index is assigned, the table is frozen. */
5067 gcc_assert (entry->refcount > 0 && entry->index == NO_INDEX_ASSIGNED);
5068 entry->refcount--;
5069 }
5070
5071 /* Given a location list, remove all addresses it refers to from the
5072 address_table. */
5073
5074 static void
5075 remove_loc_list_addr_table_entries (dw_loc_descr_ref descr)
5076 {
5077 for (; descr; descr = descr->dw_loc_next)
5078 if (descr->dw_loc_oprnd1.val_entry != NULL)
5079 {
5080 gcc_assert (descr->dw_loc_oprnd1.val_entry->index == NO_INDEX_ASSIGNED);
5081 remove_addr_table_entry (descr->dw_loc_oprnd1.val_entry);
5082 }
5083 }
5084
5085 /* A helper function for dwarf2out_finish called through
5086 htab_traverse. Assign an addr_table_entry its index. All entries
5087 must be collected into the table when this function is called,
5088 because the indexing code relies on htab_traverse to traverse nodes
5089 in the same order for each run. */
5090
5091 int
5092 index_addr_table_entry (addr_table_entry **h, unsigned int *index)
5093 {
5094 addr_table_entry *node = *h;
5095
5096 /* Don't index unreferenced nodes. */
5097 if (node->refcount == 0)
5098 return 1;
5099
5100 gcc_assert (node->index == NO_INDEX_ASSIGNED);
5101 node->index = *index;
5102 *index += 1;
5103
5104 return 1;
5105 }
5106
5107 /* Add an address constant attribute value to a DIE. When using
5108 dwarf_split_debug_info, address attributes in dies destined for the
5109 final executable should be direct references--setting the parameter
5110 force_direct ensures this behavior. */
5111
5112 static inline void
5113 add_AT_addr (dw_die_ref die, enum dwarf_attribute attr_kind, rtx addr,
5114 bool force_direct)
5115 {
5116 dw_attr_node attr;
5117
5118 attr.dw_attr = attr_kind;
5119 attr.dw_attr_val.val_class = dw_val_class_addr;
5120 attr.dw_attr_val.v.val_addr = addr;
5121 if (dwarf_split_debug_info && !force_direct)
5122 attr.dw_attr_val.val_entry = add_addr_table_entry (addr, ate_kind_rtx);
5123 else
5124 attr.dw_attr_val.val_entry = NULL;
5125 add_dwarf_attr (die, &attr);
5126 }
5127
5128 /* Get the RTX from to an address DIE attribute. */
5129
5130 static inline rtx
5131 AT_addr (dw_attr_node *a)
5132 {
5133 gcc_assert (a && AT_class (a) == dw_val_class_addr);
5134 return a->dw_attr_val.v.val_addr;
5135 }
5136
5137 /* Add a file attribute value to a DIE. */
5138
5139 static inline void
5140 add_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind,
5141 struct dwarf_file_data *fd)
5142 {
5143 dw_attr_node attr;
5144
5145 attr.dw_attr = attr_kind;
5146 attr.dw_attr_val.val_class = dw_val_class_file;
5147 attr.dw_attr_val.val_entry = NULL;
5148 attr.dw_attr_val.v.val_file = fd;
5149 add_dwarf_attr (die, &attr);
5150 }
5151
5152 /* Get the dwarf_file_data from a file DIE attribute. */
5153
5154 static inline struct dwarf_file_data *
5155 AT_file (dw_attr_node *a)
5156 {
5157 gcc_assert (a && (AT_class (a) == dw_val_class_file
5158 || AT_class (a) == dw_val_class_file_implicit));
5159 return a->dw_attr_val.v.val_file;
5160 }
5161
5162 /* Add a vms delta attribute value to a DIE. */
5163
5164 static inline void
5165 add_AT_vms_delta (dw_die_ref die, enum dwarf_attribute attr_kind,
5166 const char *lbl1, const char *lbl2)
5167 {
5168 dw_attr_node attr;
5169
5170 attr.dw_attr = attr_kind;
5171 attr.dw_attr_val.val_class = dw_val_class_vms_delta;
5172 attr.dw_attr_val.val_entry = NULL;
5173 attr.dw_attr_val.v.val_vms_delta.lbl1 = xstrdup (lbl1);
5174 attr.dw_attr_val.v.val_vms_delta.lbl2 = xstrdup (lbl2);
5175 add_dwarf_attr (die, &attr);
5176 }
5177
5178 /* Add a symbolic view identifier attribute value to a DIE. */
5179
5180 static inline void
5181 add_AT_symview (dw_die_ref die, enum dwarf_attribute attr_kind,
5182 const char *view_label)
5183 {
5184 dw_attr_node attr;
5185
5186 attr.dw_attr = attr_kind;
5187 attr.dw_attr_val.val_class = dw_val_class_symview;
5188 attr.dw_attr_val.val_entry = NULL;
5189 attr.dw_attr_val.v.val_symbolic_view = xstrdup (view_label);
5190 add_dwarf_attr (die, &attr);
5191 }
5192
5193 /* Add a label identifier attribute value to a DIE. */
5194
5195 static inline void
5196 add_AT_lbl_id (dw_die_ref die, enum dwarf_attribute attr_kind,
5197 const char *lbl_id)
5198 {
5199 dw_attr_node attr;
5200
5201 attr.dw_attr = attr_kind;
5202 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
5203 attr.dw_attr_val.val_entry = NULL;
5204 attr.dw_attr_val.v.val_lbl_id = xstrdup (lbl_id);
5205 if (dwarf_split_debug_info)
5206 attr.dw_attr_val.val_entry
5207 = add_addr_table_entry (attr.dw_attr_val.v.val_lbl_id,
5208 ate_kind_label);
5209 add_dwarf_attr (die, &attr);
5210 }
5211
5212 /* Add a section offset attribute value to a DIE, an offset into the
5213 debug_line section. */
5214
5215 static inline void
5216 add_AT_lineptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5217 const char *label)
5218 {
5219 dw_attr_node attr;
5220
5221 attr.dw_attr = attr_kind;
5222 attr.dw_attr_val.val_class = dw_val_class_lineptr;
5223 attr.dw_attr_val.val_entry = NULL;
5224 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5225 add_dwarf_attr (die, &attr);
5226 }
5227
5228 /* Add a section offset attribute value to a DIE, an offset into the
5229 debug_loclists section. */
5230
5231 static inline void
5232 add_AT_loclistsptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5233 const char *label)
5234 {
5235 dw_attr_node attr;
5236
5237 attr.dw_attr = attr_kind;
5238 attr.dw_attr_val.val_class = dw_val_class_loclistsptr;
5239 attr.dw_attr_val.val_entry = NULL;
5240 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5241 add_dwarf_attr (die, &attr);
5242 }
5243
5244 /* Add a section offset attribute value to a DIE, an offset into the
5245 debug_macinfo section. */
5246
5247 static inline void
5248 add_AT_macptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5249 const char *label)
5250 {
5251 dw_attr_node attr;
5252
5253 attr.dw_attr = attr_kind;
5254 attr.dw_attr_val.val_class = dw_val_class_macptr;
5255 attr.dw_attr_val.val_entry = NULL;
5256 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5257 add_dwarf_attr (die, &attr);
5258 }
5259
5260 /* Add an offset attribute value to a DIE. */
5261
5262 static inline void
5263 add_AT_offset (dw_die_ref die, enum dwarf_attribute attr_kind,
5264 unsigned HOST_WIDE_INT offset)
5265 {
5266 dw_attr_node attr;
5267
5268 attr.dw_attr = attr_kind;
5269 attr.dw_attr_val.val_class = dw_val_class_offset;
5270 attr.dw_attr_val.val_entry = NULL;
5271 attr.dw_attr_val.v.val_offset = offset;
5272 add_dwarf_attr (die, &attr);
5273 }
5274
5275 /* Add a range_list attribute value to a DIE. When using
5276 dwarf_split_debug_info, address attributes in dies destined for the
5277 final executable should be direct references--setting the parameter
5278 force_direct ensures this behavior. */
5279
5280 #define UNRELOCATED_OFFSET ((addr_table_entry *) 1)
5281 #define RELOCATED_OFFSET (NULL)
5282
5283 static void
5284 add_AT_range_list (dw_die_ref die, enum dwarf_attribute attr_kind,
5285 long unsigned int offset, bool force_direct)
5286 {
5287 dw_attr_node attr;
5288
5289 attr.dw_attr = attr_kind;
5290 attr.dw_attr_val.val_class = dw_val_class_range_list;
5291 /* For the range_list attribute, use val_entry to store whether the
5292 offset should follow split-debug-info or normal semantics. This
5293 value is read in output_range_list_offset. */
5294 if (dwarf_split_debug_info && !force_direct)
5295 attr.dw_attr_val.val_entry = UNRELOCATED_OFFSET;
5296 else
5297 attr.dw_attr_val.val_entry = RELOCATED_OFFSET;
5298 attr.dw_attr_val.v.val_offset = offset;
5299 add_dwarf_attr (die, &attr);
5300 }
5301
5302 /* Return the start label of a delta attribute. */
5303
5304 static inline const char *
5305 AT_vms_delta1 (dw_attr_node *a)
5306 {
5307 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5308 return a->dw_attr_val.v.val_vms_delta.lbl1;
5309 }
5310
5311 /* Return the end label of a delta attribute. */
5312
5313 static inline const char *
5314 AT_vms_delta2 (dw_attr_node *a)
5315 {
5316 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5317 return a->dw_attr_val.v.val_vms_delta.lbl2;
5318 }
5319
5320 static inline const char *
5321 AT_lbl (dw_attr_node *a)
5322 {
5323 gcc_assert (a && (AT_class (a) == dw_val_class_lbl_id
5324 || AT_class (a) == dw_val_class_lineptr
5325 || AT_class (a) == dw_val_class_macptr
5326 || AT_class (a) == dw_val_class_loclistsptr
5327 || AT_class (a) == dw_val_class_high_pc));
5328 return a->dw_attr_val.v.val_lbl_id;
5329 }
5330
5331 /* Get the attribute of type attr_kind. */
5332
5333 static dw_attr_node *
5334 get_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5335 {
5336 dw_attr_node *a;
5337 unsigned ix;
5338 dw_die_ref spec = NULL;
5339
5340 if (! die)
5341 return NULL;
5342
5343 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5344 if (a->dw_attr == attr_kind)
5345 return a;
5346 else if (a->dw_attr == DW_AT_specification
5347 || a->dw_attr == DW_AT_abstract_origin)
5348 spec = AT_ref (a);
5349
5350 if (spec)
5351 return get_AT (spec, attr_kind);
5352
5353 return NULL;
5354 }
5355
5356 /* Returns the parent of the declaration of DIE. */
5357
5358 static dw_die_ref
5359 get_die_parent (dw_die_ref die)
5360 {
5361 dw_die_ref t;
5362
5363 if (!die)
5364 return NULL;
5365
5366 if ((t = get_AT_ref (die, DW_AT_abstract_origin))
5367 || (t = get_AT_ref (die, DW_AT_specification)))
5368 die = t;
5369
5370 return die->die_parent;
5371 }
5372
5373 /* Return the "low pc" attribute value, typically associated with a subprogram
5374 DIE. Return null if the "low pc" attribute is either not present, or if it
5375 cannot be represented as an assembler label identifier. */
5376
5377 static inline const char *
5378 get_AT_low_pc (dw_die_ref die)
5379 {
5380 dw_attr_node *a = get_AT (die, DW_AT_low_pc);
5381
5382 return a ? AT_lbl (a) : NULL;
5383 }
5384
5385 /* Return the "high pc" attribute value, typically associated with a subprogram
5386 DIE. Return null if the "high pc" attribute is either not present, or if it
5387 cannot be represented as an assembler label identifier. */
5388
5389 static inline const char *
5390 get_AT_hi_pc (dw_die_ref die)
5391 {
5392 dw_attr_node *a = get_AT (die, DW_AT_high_pc);
5393
5394 return a ? AT_lbl (a) : NULL;
5395 }
5396
5397 /* Return the value of the string attribute designated by ATTR_KIND, or
5398 NULL if it is not present. */
5399
5400 static inline const char *
5401 get_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind)
5402 {
5403 dw_attr_node *a = get_AT (die, attr_kind);
5404
5405 return a ? AT_string (a) : NULL;
5406 }
5407
5408 /* Return the value of the flag attribute designated by ATTR_KIND, or -1
5409 if it is not present. */
5410
5411 static inline int
5412 get_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind)
5413 {
5414 dw_attr_node *a = get_AT (die, attr_kind);
5415
5416 return a ? AT_flag (a) : 0;
5417 }
5418
5419 /* Return the value of the unsigned attribute designated by ATTR_KIND, or 0
5420 if it is not present. */
5421
5422 static inline unsigned
5423 get_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind)
5424 {
5425 dw_attr_node *a = get_AT (die, attr_kind);
5426
5427 return a ? AT_unsigned (a) : 0;
5428 }
5429
5430 static inline dw_die_ref
5431 get_AT_ref (dw_die_ref die, enum dwarf_attribute attr_kind)
5432 {
5433 dw_attr_node *a = get_AT (die, attr_kind);
5434
5435 return a ? AT_ref (a) : NULL;
5436 }
5437
5438 static inline struct dwarf_file_data *
5439 get_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind)
5440 {
5441 dw_attr_node *a = get_AT (die, attr_kind);
5442
5443 return a ? AT_file (a) : NULL;
5444 }
5445
5446 /* Return TRUE if the language is C++. */
5447
5448 static inline bool
5449 is_cxx (void)
5450 {
5451 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5452
5453 return (lang == DW_LANG_C_plus_plus || lang == DW_LANG_ObjC_plus_plus
5454 || lang == DW_LANG_C_plus_plus_11 || lang == DW_LANG_C_plus_plus_14);
5455 }
5456
5457 /* Return TRUE if DECL was created by the C++ frontend. */
5458
5459 static bool
5460 is_cxx (const_tree decl)
5461 {
5462 if (in_lto_p)
5463 {
5464 const_tree context = get_ultimate_context (decl);
5465 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5466 return strncmp (TRANSLATION_UNIT_LANGUAGE (context), "GNU C++", 7) == 0;
5467 }
5468 return is_cxx ();
5469 }
5470
5471 /* Return TRUE if the language is Fortran. */
5472
5473 static inline bool
5474 is_fortran (void)
5475 {
5476 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5477
5478 return (lang == DW_LANG_Fortran77
5479 || lang == DW_LANG_Fortran90
5480 || lang == DW_LANG_Fortran95
5481 || lang == DW_LANG_Fortran03
5482 || lang == DW_LANG_Fortran08);
5483 }
5484
5485 static inline bool
5486 is_fortran (const_tree decl)
5487 {
5488 if (in_lto_p)
5489 {
5490 const_tree context = get_ultimate_context (decl);
5491 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5492 return (strncmp (TRANSLATION_UNIT_LANGUAGE (context),
5493 "GNU Fortran", 11) == 0
5494 || strcmp (TRANSLATION_UNIT_LANGUAGE (context),
5495 "GNU F77") == 0);
5496 }
5497 return is_fortran ();
5498 }
5499
5500 /* Return TRUE if the language is Ada. */
5501
5502 static inline bool
5503 is_ada (void)
5504 {
5505 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5506
5507 return lang == DW_LANG_Ada95 || lang == DW_LANG_Ada83;
5508 }
5509
5510 /* Remove the specified attribute if present. Return TRUE if removal
5511 was successful. */
5512
5513 static bool
5514 remove_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5515 {
5516 dw_attr_node *a;
5517 unsigned ix;
5518
5519 if (! die)
5520 return false;
5521
5522 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5523 if (a->dw_attr == attr_kind)
5524 {
5525 if (AT_class (a) == dw_val_class_str)
5526 if (a->dw_attr_val.v.val_str->refcount)
5527 a->dw_attr_val.v.val_str->refcount--;
5528
5529 /* vec::ordered_remove should help reduce the number of abbrevs
5530 that are needed. */
5531 die->die_attr->ordered_remove (ix);
5532 return true;
5533 }
5534 return false;
5535 }
5536
5537 /* Remove CHILD from its parent. PREV must have the property that
5538 PREV->DIE_SIB == CHILD. Does not alter CHILD. */
5539
5540 static void
5541 remove_child_with_prev (dw_die_ref child, dw_die_ref prev)
5542 {
5543 gcc_assert (child->die_parent == prev->die_parent);
5544 gcc_assert (prev->die_sib == child);
5545 if (prev == child)
5546 {
5547 gcc_assert (child->die_parent->die_child == child);
5548 prev = NULL;
5549 }
5550 else
5551 prev->die_sib = child->die_sib;
5552 if (child->die_parent->die_child == child)
5553 child->die_parent->die_child = prev;
5554 child->die_sib = NULL;
5555 }
5556
5557 /* Replace OLD_CHILD with NEW_CHILD. PREV must have the property that
5558 PREV->DIE_SIB == OLD_CHILD. Does not alter OLD_CHILD. */
5559
5560 static void
5561 replace_child (dw_die_ref old_child, dw_die_ref new_child, dw_die_ref prev)
5562 {
5563 dw_die_ref parent = old_child->die_parent;
5564
5565 gcc_assert (parent == prev->die_parent);
5566 gcc_assert (prev->die_sib == old_child);
5567
5568 new_child->die_parent = parent;
5569 if (prev == old_child)
5570 {
5571 gcc_assert (parent->die_child == old_child);
5572 new_child->die_sib = new_child;
5573 }
5574 else
5575 {
5576 prev->die_sib = new_child;
5577 new_child->die_sib = old_child->die_sib;
5578 }
5579 if (old_child->die_parent->die_child == old_child)
5580 old_child->die_parent->die_child = new_child;
5581 old_child->die_sib = NULL;
5582 }
5583
5584 /* Move all children from OLD_PARENT to NEW_PARENT. */
5585
5586 static void
5587 move_all_children (dw_die_ref old_parent, dw_die_ref new_parent)
5588 {
5589 dw_die_ref c;
5590 new_parent->die_child = old_parent->die_child;
5591 old_parent->die_child = NULL;
5592 FOR_EACH_CHILD (new_parent, c, c->die_parent = new_parent);
5593 }
5594
5595 /* Remove child DIE whose die_tag is TAG. Do nothing if no child
5596 matches TAG. */
5597
5598 static void
5599 remove_child_TAG (dw_die_ref die, enum dwarf_tag tag)
5600 {
5601 dw_die_ref c;
5602
5603 c = die->die_child;
5604 if (c) do {
5605 dw_die_ref prev = c;
5606 c = c->die_sib;
5607 while (c->die_tag == tag)
5608 {
5609 remove_child_with_prev (c, prev);
5610 c->die_parent = NULL;
5611 /* Might have removed every child. */
5612 if (die->die_child == NULL)
5613 return;
5614 c = prev->die_sib;
5615 }
5616 } while (c != die->die_child);
5617 }
5618
5619 /* Add a CHILD_DIE as the last child of DIE. */
5620
5621 static void
5622 add_child_die (dw_die_ref die, dw_die_ref child_die)
5623 {
5624 /* FIXME this should probably be an assert. */
5625 if (! die || ! child_die)
5626 return;
5627 gcc_assert (die != child_die);
5628
5629 child_die->die_parent = die;
5630 if (die->die_child)
5631 {
5632 child_die->die_sib = die->die_child->die_sib;
5633 die->die_child->die_sib = child_die;
5634 }
5635 else
5636 child_die->die_sib = child_die;
5637 die->die_child = child_die;
5638 }
5639
5640 /* Like add_child_die, but put CHILD_DIE after AFTER_DIE. */
5641
5642 static void
5643 add_child_die_after (dw_die_ref die, dw_die_ref child_die,
5644 dw_die_ref after_die)
5645 {
5646 gcc_assert (die
5647 && child_die
5648 && after_die
5649 && die->die_child
5650 && die != child_die);
5651
5652 child_die->die_parent = die;
5653 child_die->die_sib = after_die->die_sib;
5654 after_die->die_sib = child_die;
5655 if (die->die_child == after_die)
5656 die->die_child = child_die;
5657 }
5658
5659 /* Unassociate CHILD from its parent, and make its parent be
5660 NEW_PARENT. */
5661
5662 static void
5663 reparent_child (dw_die_ref child, dw_die_ref new_parent)
5664 {
5665 for (dw_die_ref p = child->die_parent->die_child; ; p = p->die_sib)
5666 if (p->die_sib == child)
5667 {
5668 remove_child_with_prev (child, p);
5669 break;
5670 }
5671 add_child_die (new_parent, child);
5672 }
5673
5674 /* Move CHILD, which must be a child of PARENT or the DIE for which PARENT
5675 is the specification, to the end of PARENT's list of children.
5676 This is done by removing and re-adding it. */
5677
5678 static void
5679 splice_child_die (dw_die_ref parent, dw_die_ref child)
5680 {
5681 /* We want the declaration DIE from inside the class, not the
5682 specification DIE at toplevel. */
5683 if (child->die_parent != parent)
5684 {
5685 dw_die_ref tmp = get_AT_ref (child, DW_AT_specification);
5686
5687 if (tmp)
5688 child = tmp;
5689 }
5690
5691 gcc_assert (child->die_parent == parent
5692 || (child->die_parent
5693 == get_AT_ref (parent, DW_AT_specification)));
5694
5695 reparent_child (child, parent);
5696 }
5697
5698 /* Create and return a new die with TAG_VALUE as tag. */
5699
5700 static inline dw_die_ref
5701 new_die_raw (enum dwarf_tag tag_value)
5702 {
5703 dw_die_ref die = ggc_cleared_alloc<die_node> ();
5704 die->die_tag = tag_value;
5705 return die;
5706 }
5707
5708 /* Create and return a new die with a parent of PARENT_DIE. If
5709 PARENT_DIE is NULL, the new DIE is placed in limbo and an
5710 associated tree T must be supplied to determine parenthood
5711 later. */
5712
5713 static inline dw_die_ref
5714 new_die (enum dwarf_tag tag_value, dw_die_ref parent_die, tree t)
5715 {
5716 dw_die_ref die = new_die_raw (tag_value);
5717
5718 if (parent_die != NULL)
5719 add_child_die (parent_die, die);
5720 else
5721 {
5722 limbo_die_node *limbo_node;
5723
5724 /* No DIEs created after early dwarf should end up in limbo,
5725 because the limbo list should not persist past LTO
5726 streaming. */
5727 if (tag_value != DW_TAG_compile_unit
5728 /* These are allowed because they're generated while
5729 breaking out COMDAT units late. */
5730 && tag_value != DW_TAG_type_unit
5731 && tag_value != DW_TAG_skeleton_unit
5732 && !early_dwarf
5733 /* Allow nested functions to live in limbo because they will
5734 only temporarily live there, as decls_for_scope will fix
5735 them up. */
5736 && (TREE_CODE (t) != FUNCTION_DECL
5737 || !decl_function_context (t))
5738 /* Same as nested functions above but for types. Types that
5739 are local to a function will be fixed in
5740 decls_for_scope. */
5741 && (!RECORD_OR_UNION_TYPE_P (t)
5742 || !TYPE_CONTEXT (t)
5743 || TREE_CODE (TYPE_CONTEXT (t)) != FUNCTION_DECL)
5744 /* FIXME debug-early: Allow late limbo DIE creation for LTO,
5745 especially in the ltrans stage, but once we implement LTO
5746 dwarf streaming, we should remove this exception. */
5747 && !in_lto_p)
5748 {
5749 fprintf (stderr, "symbol ended up in limbo too late:");
5750 debug_generic_stmt (t);
5751 gcc_unreachable ();
5752 }
5753
5754 limbo_node = ggc_cleared_alloc<limbo_die_node> ();
5755 limbo_node->die = die;
5756 limbo_node->created_for = t;
5757 limbo_node->next = limbo_die_list;
5758 limbo_die_list = limbo_node;
5759 }
5760
5761 return die;
5762 }
5763
5764 /* Return the DIE associated with the given type specifier. */
5765
5766 static inline dw_die_ref
5767 lookup_type_die (tree type)
5768 {
5769 dw_die_ref die = TYPE_SYMTAB_DIE (type);
5770 if (die && die->removed)
5771 {
5772 TYPE_SYMTAB_DIE (type) = NULL;
5773 return NULL;
5774 }
5775 return die;
5776 }
5777
5778 /* Given a TYPE_DIE representing the type TYPE, if TYPE is an
5779 anonymous type named by the typedef TYPE_DIE, return the DIE of the
5780 anonymous type instead the one of the naming typedef. */
5781
5782 static inline dw_die_ref
5783 strip_naming_typedef (tree type, dw_die_ref type_die)
5784 {
5785 if (type
5786 && TREE_CODE (type) == RECORD_TYPE
5787 && type_die
5788 && type_die->die_tag == DW_TAG_typedef
5789 && is_naming_typedef_decl (TYPE_NAME (type)))
5790 type_die = get_AT_ref (type_die, DW_AT_type);
5791 return type_die;
5792 }
5793
5794 /* Like lookup_type_die, but if type is an anonymous type named by a
5795 typedef[1], return the DIE of the anonymous type instead the one of
5796 the naming typedef. This is because in gen_typedef_die, we did
5797 equate the anonymous struct named by the typedef with the DIE of
5798 the naming typedef. So by default, lookup_type_die on an anonymous
5799 struct yields the DIE of the naming typedef.
5800
5801 [1]: Read the comment of is_naming_typedef_decl to learn about what
5802 a naming typedef is. */
5803
5804 static inline dw_die_ref
5805 lookup_type_die_strip_naming_typedef (tree type)
5806 {
5807 dw_die_ref die = lookup_type_die (type);
5808 return strip_naming_typedef (type, die);
5809 }
5810
5811 /* Equate a DIE to a given type specifier. */
5812
5813 static inline void
5814 equate_type_number_to_die (tree type, dw_die_ref type_die)
5815 {
5816 TYPE_SYMTAB_DIE (type) = type_die;
5817 }
5818
5819 /* Returns a hash value for X (which really is a die_struct). */
5820
5821 inline hashval_t
5822 decl_die_hasher::hash (die_node *x)
5823 {
5824 return (hashval_t) x->decl_id;
5825 }
5826
5827 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
5828
5829 inline bool
5830 decl_die_hasher::equal (die_node *x, tree y)
5831 {
5832 return (x->decl_id == DECL_UID (y));
5833 }
5834
5835 /* Return the DIE associated with a given declaration. */
5836
5837 static inline dw_die_ref
5838 lookup_decl_die (tree decl)
5839 {
5840 dw_die_ref *die = decl_die_table->find_slot_with_hash (decl, DECL_UID (decl),
5841 NO_INSERT);
5842 if (!die)
5843 return NULL;
5844 if ((*die)->removed)
5845 {
5846 decl_die_table->clear_slot (die);
5847 return NULL;
5848 }
5849 return *die;
5850 }
5851
5852
5853 /* For DECL which might have early dwarf output query a SYMBOL + OFFSET
5854 style reference. Return true if we found one refering to a DIE for
5855 DECL, otherwise return false. */
5856
5857 static bool
5858 dwarf2out_die_ref_for_decl (tree decl, const char **sym,
5859 unsigned HOST_WIDE_INT *off)
5860 {
5861 dw_die_ref die;
5862
5863 if (flag_wpa && !decl_die_table)
5864 return false;
5865
5866 if (TREE_CODE (decl) == BLOCK)
5867 die = BLOCK_DIE (decl);
5868 else
5869 die = lookup_decl_die (decl);
5870 if (!die)
5871 return false;
5872
5873 /* During WPA stage we currently use DIEs to store the
5874 decl <-> label + offset map. That's quite inefficient but it
5875 works for now. */
5876 if (flag_wpa)
5877 {
5878 dw_die_ref ref = get_AT_ref (die, DW_AT_abstract_origin);
5879 if (!ref)
5880 {
5881 gcc_assert (die == comp_unit_die ());
5882 return false;
5883 }
5884 *off = ref->die_offset;
5885 *sym = ref->die_id.die_symbol;
5886 return true;
5887 }
5888
5889 /* Similar to get_ref_die_offset_label, but using the "correct"
5890 label. */
5891 *off = die->die_offset;
5892 while (die->die_parent)
5893 die = die->die_parent;
5894 /* For the containing CU DIE we compute a die_symbol in
5895 compute_comp_unit_symbol. */
5896 gcc_assert (die->die_tag == DW_TAG_compile_unit
5897 && die->die_id.die_symbol != NULL);
5898 *sym = die->die_id.die_symbol;
5899 return true;
5900 }
5901
5902 /* Add a reference of kind ATTR_KIND to a DIE at SYMBOL + OFFSET to DIE. */
5903
5904 static void
5905 add_AT_external_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind,
5906 const char *symbol, HOST_WIDE_INT offset)
5907 {
5908 /* Create a fake DIE that contains the reference. Don't use
5909 new_die because we don't want to end up in the limbo list. */
5910 dw_die_ref ref = new_die_raw (die->die_tag);
5911 ref->die_id.die_symbol = IDENTIFIER_POINTER (get_identifier (symbol));
5912 ref->die_offset = offset;
5913 ref->with_offset = 1;
5914 add_AT_die_ref (die, attr_kind, ref);
5915 }
5916
5917 /* Create a DIE for DECL if required and add a reference to a DIE
5918 at SYMBOL + OFFSET which contains attributes dumped early. */
5919
5920 static void
5921 dwarf2out_register_external_die (tree decl, const char *sym,
5922 unsigned HOST_WIDE_INT off)
5923 {
5924 if (debug_info_level == DINFO_LEVEL_NONE)
5925 return;
5926
5927 if (flag_wpa && !decl_die_table)
5928 decl_die_table = hash_table<decl_die_hasher>::create_ggc (1000);
5929
5930 dw_die_ref die
5931 = TREE_CODE (decl) == BLOCK ? BLOCK_DIE (decl) : lookup_decl_die (decl);
5932 gcc_assert (!die);
5933
5934 tree ctx;
5935 dw_die_ref parent = NULL;
5936 /* Need to lookup a DIE for the decls context - the containing
5937 function or translation unit. */
5938 if (TREE_CODE (decl) == BLOCK)
5939 {
5940 ctx = BLOCK_SUPERCONTEXT (decl);
5941 /* ??? We do not output DIEs for all scopes thus skip as
5942 many DIEs as needed. */
5943 while (TREE_CODE (ctx) == BLOCK
5944 && !BLOCK_DIE (ctx))
5945 ctx = BLOCK_SUPERCONTEXT (ctx);
5946 }
5947 else
5948 ctx = DECL_CONTEXT (decl);
5949 /* Peel types in the context stack. */
5950 while (ctx && TYPE_P (ctx))
5951 ctx = TYPE_CONTEXT (ctx);
5952 /* Likewise namespaces in case we do not want to emit DIEs for them. */
5953 if (debug_info_level <= DINFO_LEVEL_TERSE)
5954 while (ctx && TREE_CODE (ctx) == NAMESPACE_DECL)
5955 ctx = DECL_CONTEXT (ctx);
5956 if (ctx)
5957 {
5958 if (TREE_CODE (ctx) == BLOCK)
5959 parent = BLOCK_DIE (ctx);
5960 else if (TREE_CODE (ctx) == TRANSLATION_UNIT_DECL
5961 /* Keep the 1:1 association during WPA. */
5962 && !flag_wpa)
5963 /* Otherwise all late annotations go to the main CU which
5964 imports the original CUs. */
5965 parent = comp_unit_die ();
5966 else if (TREE_CODE (ctx) == FUNCTION_DECL
5967 && TREE_CODE (decl) != PARM_DECL
5968 && TREE_CODE (decl) != BLOCK)
5969 /* Leave function local entities parent determination to when
5970 we process scope vars. */
5971 ;
5972 else
5973 parent = lookup_decl_die (ctx);
5974 }
5975 else
5976 /* In some cases the FEs fail to set DECL_CONTEXT properly.
5977 Handle this case gracefully by globalizing stuff. */
5978 parent = comp_unit_die ();
5979 /* Create a DIE "stub". */
5980 switch (TREE_CODE (decl))
5981 {
5982 case TRANSLATION_UNIT_DECL:
5983 if (! flag_wpa)
5984 {
5985 die = comp_unit_die ();
5986 dw_die_ref import = new_die (DW_TAG_imported_unit, die, NULL_TREE);
5987 add_AT_external_die_ref (import, DW_AT_import, sym, off);
5988 /* We re-target all CU decls to the LTRANS CU DIE, so no need
5989 to create a DIE for the original CUs. */
5990 return;
5991 }
5992 /* Keep the 1:1 association during WPA. */
5993 die = new_die (DW_TAG_compile_unit, NULL, decl);
5994 break;
5995 case NAMESPACE_DECL:
5996 if (is_fortran (decl))
5997 die = new_die (DW_TAG_module, parent, decl);
5998 else
5999 die = new_die (DW_TAG_namespace, parent, decl);
6000 break;
6001 case FUNCTION_DECL:
6002 die = new_die (DW_TAG_subprogram, parent, decl);
6003 break;
6004 case VAR_DECL:
6005 die = new_die (DW_TAG_variable, parent, decl);
6006 break;
6007 case RESULT_DECL:
6008 die = new_die (DW_TAG_variable, parent, decl);
6009 break;
6010 case PARM_DECL:
6011 die = new_die (DW_TAG_formal_parameter, parent, decl);
6012 break;
6013 case CONST_DECL:
6014 die = new_die (DW_TAG_constant, parent, decl);
6015 break;
6016 case LABEL_DECL:
6017 die = new_die (DW_TAG_label, parent, decl);
6018 break;
6019 case BLOCK:
6020 die = new_die (DW_TAG_lexical_block, parent, decl);
6021 break;
6022 default:
6023 gcc_unreachable ();
6024 }
6025 if (TREE_CODE (decl) == BLOCK)
6026 BLOCK_DIE (decl) = die;
6027 else
6028 equate_decl_number_to_die (decl, die);
6029
6030 /* Add a reference to the DIE providing early debug at $sym + off. */
6031 add_AT_external_die_ref (die, DW_AT_abstract_origin, sym, off);
6032 }
6033
6034 /* Returns a hash value for X (which really is a var_loc_list). */
6035
6036 inline hashval_t
6037 decl_loc_hasher::hash (var_loc_list *x)
6038 {
6039 return (hashval_t) x->decl_id;
6040 }
6041
6042 /* Return nonzero if decl_id of var_loc_list X is the same as
6043 UID of decl *Y. */
6044
6045 inline bool
6046 decl_loc_hasher::equal (var_loc_list *x, const_tree y)
6047 {
6048 return (x->decl_id == DECL_UID (y));
6049 }
6050
6051 /* Return the var_loc list associated with a given declaration. */
6052
6053 static inline var_loc_list *
6054 lookup_decl_loc (const_tree decl)
6055 {
6056 if (!decl_loc_table)
6057 return NULL;
6058 return decl_loc_table->find_with_hash (decl, DECL_UID (decl));
6059 }
6060
6061 /* Returns a hash value for X (which really is a cached_dw_loc_list_list). */
6062
6063 inline hashval_t
6064 dw_loc_list_hasher::hash (cached_dw_loc_list *x)
6065 {
6066 return (hashval_t) x->decl_id;
6067 }
6068
6069 /* Return nonzero if decl_id of cached_dw_loc_list X is the same as
6070 UID of decl *Y. */
6071
6072 inline bool
6073 dw_loc_list_hasher::equal (cached_dw_loc_list *x, const_tree y)
6074 {
6075 return (x->decl_id == DECL_UID (y));
6076 }
6077
6078 /* Equate a DIE to a particular declaration. */
6079
6080 static void
6081 equate_decl_number_to_die (tree decl, dw_die_ref decl_die)
6082 {
6083 unsigned int decl_id = DECL_UID (decl);
6084
6085 *decl_die_table->find_slot_with_hash (decl, decl_id, INSERT) = decl_die;
6086 decl_die->decl_id = decl_id;
6087 }
6088
6089 /* Return how many bits covers PIECE EXPR_LIST. */
6090
6091 static HOST_WIDE_INT
6092 decl_piece_bitsize (rtx piece)
6093 {
6094 int ret = (int) GET_MODE (piece);
6095 if (ret)
6096 return ret;
6097 gcc_assert (GET_CODE (XEXP (piece, 0)) == CONCAT
6098 && CONST_INT_P (XEXP (XEXP (piece, 0), 0)));
6099 return INTVAL (XEXP (XEXP (piece, 0), 0));
6100 }
6101
6102 /* Return pointer to the location of location note in PIECE EXPR_LIST. */
6103
6104 static rtx *
6105 decl_piece_varloc_ptr (rtx piece)
6106 {
6107 if ((int) GET_MODE (piece))
6108 return &XEXP (piece, 0);
6109 else
6110 return &XEXP (XEXP (piece, 0), 1);
6111 }
6112
6113 /* Create an EXPR_LIST for location note LOC_NOTE covering BITSIZE bits.
6114 Next is the chain of following piece nodes. */
6115
6116 static rtx_expr_list *
6117 decl_piece_node (rtx loc_note, HOST_WIDE_INT bitsize, rtx next)
6118 {
6119 if (bitsize > 0 && bitsize <= (int) MAX_MACHINE_MODE)
6120 return alloc_EXPR_LIST (bitsize, loc_note, next);
6121 else
6122 return alloc_EXPR_LIST (0, gen_rtx_CONCAT (VOIDmode,
6123 GEN_INT (bitsize),
6124 loc_note), next);
6125 }
6126
6127 /* Return rtx that should be stored into loc field for
6128 LOC_NOTE and BITPOS/BITSIZE. */
6129
6130 static rtx
6131 construct_piece_list (rtx loc_note, HOST_WIDE_INT bitpos,
6132 HOST_WIDE_INT bitsize)
6133 {
6134 if (bitsize != -1)
6135 {
6136 loc_note = decl_piece_node (loc_note, bitsize, NULL_RTX);
6137 if (bitpos != 0)
6138 loc_note = decl_piece_node (NULL_RTX, bitpos, loc_note);
6139 }
6140 return loc_note;
6141 }
6142
6143 /* This function either modifies location piece list *DEST in
6144 place (if SRC and INNER is NULL), or copies location piece list
6145 *SRC to *DEST while modifying it. Location BITPOS is modified
6146 to contain LOC_NOTE, any pieces overlapping it are removed resp.
6147 not copied and if needed some padding around it is added.
6148 When modifying in place, DEST should point to EXPR_LIST where
6149 earlier pieces cover PIECE_BITPOS bits, when copying SRC points
6150 to the start of the whole list and INNER points to the EXPR_LIST
6151 where earlier pieces cover PIECE_BITPOS bits. */
6152
6153 static void
6154 adjust_piece_list (rtx *dest, rtx *src, rtx *inner,
6155 HOST_WIDE_INT bitpos, HOST_WIDE_INT piece_bitpos,
6156 HOST_WIDE_INT bitsize, rtx loc_note)
6157 {
6158 HOST_WIDE_INT diff;
6159 bool copy = inner != NULL;
6160
6161 if (copy)
6162 {
6163 /* First copy all nodes preceding the current bitpos. */
6164 while (src != inner)
6165 {
6166 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6167 decl_piece_bitsize (*src), NULL_RTX);
6168 dest = &XEXP (*dest, 1);
6169 src = &XEXP (*src, 1);
6170 }
6171 }
6172 /* Add padding if needed. */
6173 if (bitpos != piece_bitpos)
6174 {
6175 *dest = decl_piece_node (NULL_RTX, bitpos - piece_bitpos,
6176 copy ? NULL_RTX : *dest);
6177 dest = &XEXP (*dest, 1);
6178 }
6179 else if (*dest && decl_piece_bitsize (*dest) == bitsize)
6180 {
6181 gcc_assert (!copy);
6182 /* A piece with correct bitpos and bitsize already exist,
6183 just update the location for it and return. */
6184 *decl_piece_varloc_ptr (*dest) = loc_note;
6185 return;
6186 }
6187 /* Add the piece that changed. */
6188 *dest = decl_piece_node (loc_note, bitsize, copy ? NULL_RTX : *dest);
6189 dest = &XEXP (*dest, 1);
6190 /* Skip over pieces that overlap it. */
6191 diff = bitpos - piece_bitpos + bitsize;
6192 if (!copy)
6193 src = dest;
6194 while (diff > 0 && *src)
6195 {
6196 rtx piece = *src;
6197 diff -= decl_piece_bitsize (piece);
6198 if (copy)
6199 src = &XEXP (piece, 1);
6200 else
6201 {
6202 *src = XEXP (piece, 1);
6203 free_EXPR_LIST_node (piece);
6204 }
6205 }
6206 /* Add padding if needed. */
6207 if (diff < 0 && *src)
6208 {
6209 if (!copy)
6210 dest = src;
6211 *dest = decl_piece_node (NULL_RTX, -diff, copy ? NULL_RTX : *dest);
6212 dest = &XEXP (*dest, 1);
6213 }
6214 if (!copy)
6215 return;
6216 /* Finally copy all nodes following it. */
6217 while (*src)
6218 {
6219 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6220 decl_piece_bitsize (*src), NULL_RTX);
6221 dest = &XEXP (*dest, 1);
6222 src = &XEXP (*src, 1);
6223 }
6224 }
6225
6226 /* Add a variable location node to the linked list for DECL. */
6227
6228 static struct var_loc_node *
6229 add_var_loc_to_decl (tree decl, rtx loc_note, const char *label, var_loc_view view)
6230 {
6231 unsigned int decl_id;
6232 var_loc_list *temp;
6233 struct var_loc_node *loc = NULL;
6234 HOST_WIDE_INT bitsize = -1, bitpos = -1;
6235
6236 if (VAR_P (decl) && DECL_HAS_DEBUG_EXPR_P (decl))
6237 {
6238 tree realdecl = DECL_DEBUG_EXPR (decl);
6239 if (handled_component_p (realdecl)
6240 || (TREE_CODE (realdecl) == MEM_REF
6241 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
6242 {
6243 bool reverse;
6244 tree innerdecl = get_ref_base_and_extent_hwi (realdecl, &bitpos,
6245 &bitsize, &reverse);
6246 if (!innerdecl
6247 || !DECL_P (innerdecl)
6248 || DECL_IGNORED_P (innerdecl)
6249 || TREE_STATIC (innerdecl)
6250 || bitsize == 0
6251 || bitpos + bitsize > 256)
6252 return NULL;
6253 decl = innerdecl;
6254 }
6255 }
6256
6257 decl_id = DECL_UID (decl);
6258 var_loc_list **slot
6259 = decl_loc_table->find_slot_with_hash (decl, decl_id, INSERT);
6260 if (*slot == NULL)
6261 {
6262 temp = ggc_cleared_alloc<var_loc_list> ();
6263 temp->decl_id = decl_id;
6264 *slot = temp;
6265 }
6266 else
6267 temp = *slot;
6268
6269 /* For PARM_DECLs try to keep around the original incoming value,
6270 even if that means we'll emit a zero-range .debug_loc entry. */
6271 if (temp->last
6272 && temp->first == temp->last
6273 && TREE_CODE (decl) == PARM_DECL
6274 && NOTE_P (temp->first->loc)
6275 && NOTE_VAR_LOCATION_DECL (temp->first->loc) == decl
6276 && DECL_INCOMING_RTL (decl)
6277 && NOTE_VAR_LOCATION_LOC (temp->first->loc)
6278 && GET_CODE (NOTE_VAR_LOCATION_LOC (temp->first->loc))
6279 == GET_CODE (DECL_INCOMING_RTL (decl))
6280 && prev_real_insn (as_a<rtx_insn *> (temp->first->loc)) == NULL_RTX
6281 && (bitsize != -1
6282 || !rtx_equal_p (NOTE_VAR_LOCATION_LOC (temp->first->loc),
6283 NOTE_VAR_LOCATION_LOC (loc_note))
6284 || (NOTE_VAR_LOCATION_STATUS (temp->first->loc)
6285 != NOTE_VAR_LOCATION_STATUS (loc_note))))
6286 {
6287 loc = ggc_cleared_alloc<var_loc_node> ();
6288 temp->first->next = loc;
6289 temp->last = loc;
6290 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6291 }
6292 else if (temp->last)
6293 {
6294 struct var_loc_node *last = temp->last, *unused = NULL;
6295 rtx *piece_loc = NULL, last_loc_note;
6296 HOST_WIDE_INT piece_bitpos = 0;
6297 if (last->next)
6298 {
6299 last = last->next;
6300 gcc_assert (last->next == NULL);
6301 }
6302 if (bitsize != -1 && GET_CODE (last->loc) == EXPR_LIST)
6303 {
6304 piece_loc = &last->loc;
6305 do
6306 {
6307 HOST_WIDE_INT cur_bitsize = decl_piece_bitsize (*piece_loc);
6308 if (piece_bitpos + cur_bitsize > bitpos)
6309 break;
6310 piece_bitpos += cur_bitsize;
6311 piece_loc = &XEXP (*piece_loc, 1);
6312 }
6313 while (*piece_loc);
6314 }
6315 /* TEMP->LAST here is either pointer to the last but one or
6316 last element in the chained list, LAST is pointer to the
6317 last element. */
6318 if (label && strcmp (last->label, label) == 0 && last->view == view)
6319 {
6320 /* For SRA optimized variables if there weren't any real
6321 insns since last note, just modify the last node. */
6322 if (piece_loc != NULL)
6323 {
6324 adjust_piece_list (piece_loc, NULL, NULL,
6325 bitpos, piece_bitpos, bitsize, loc_note);
6326 return NULL;
6327 }
6328 /* If the last note doesn't cover any instructions, remove it. */
6329 if (temp->last != last)
6330 {
6331 temp->last->next = NULL;
6332 unused = last;
6333 last = temp->last;
6334 gcc_assert (strcmp (last->label, label) != 0 || last->view != view);
6335 }
6336 else
6337 {
6338 gcc_assert (temp->first == temp->last
6339 || (temp->first->next == temp->last
6340 && TREE_CODE (decl) == PARM_DECL));
6341 memset (temp->last, '\0', sizeof (*temp->last));
6342 temp->last->loc = construct_piece_list (loc_note, bitpos, bitsize);
6343 return temp->last;
6344 }
6345 }
6346 if (bitsize == -1 && NOTE_P (last->loc))
6347 last_loc_note = last->loc;
6348 else if (piece_loc != NULL
6349 && *piece_loc != NULL_RTX
6350 && piece_bitpos == bitpos
6351 && decl_piece_bitsize (*piece_loc) == bitsize)
6352 last_loc_note = *decl_piece_varloc_ptr (*piece_loc);
6353 else
6354 last_loc_note = NULL_RTX;
6355 /* If the current location is the same as the end of the list,
6356 and either both or neither of the locations is uninitialized,
6357 we have nothing to do. */
6358 if (last_loc_note == NULL_RTX
6359 || (!rtx_equal_p (NOTE_VAR_LOCATION_LOC (last_loc_note),
6360 NOTE_VAR_LOCATION_LOC (loc_note)))
6361 || ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6362 != NOTE_VAR_LOCATION_STATUS (loc_note))
6363 && ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6364 == VAR_INIT_STATUS_UNINITIALIZED)
6365 || (NOTE_VAR_LOCATION_STATUS (loc_note)
6366 == VAR_INIT_STATUS_UNINITIALIZED))))
6367 {
6368 /* Add LOC to the end of list and update LAST. If the last
6369 element of the list has been removed above, reuse its
6370 memory for the new node, otherwise allocate a new one. */
6371 if (unused)
6372 {
6373 loc = unused;
6374 memset (loc, '\0', sizeof (*loc));
6375 }
6376 else
6377 loc = ggc_cleared_alloc<var_loc_node> ();
6378 if (bitsize == -1 || piece_loc == NULL)
6379 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6380 else
6381 adjust_piece_list (&loc->loc, &last->loc, piece_loc,
6382 bitpos, piece_bitpos, bitsize, loc_note);
6383 last->next = loc;
6384 /* Ensure TEMP->LAST will point either to the new last but one
6385 element of the chain, or to the last element in it. */
6386 if (last != temp->last)
6387 temp->last = last;
6388 }
6389 else if (unused)
6390 ggc_free (unused);
6391 }
6392 else
6393 {
6394 loc = ggc_cleared_alloc<var_loc_node> ();
6395 temp->first = loc;
6396 temp->last = loc;
6397 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6398 }
6399 return loc;
6400 }
6401 \f
6402 /* Keep track of the number of spaces used to indent the
6403 output of the debugging routines that print the structure of
6404 the DIE internal representation. */
6405 static int print_indent;
6406
6407 /* Indent the line the number of spaces given by print_indent. */
6408
6409 static inline void
6410 print_spaces (FILE *outfile)
6411 {
6412 fprintf (outfile, "%*s", print_indent, "");
6413 }
6414
6415 /* Print a type signature in hex. */
6416
6417 static inline void
6418 print_signature (FILE *outfile, char *sig)
6419 {
6420 int i;
6421
6422 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
6423 fprintf (outfile, "%02x", sig[i] & 0xff);
6424 }
6425
6426 static inline void
6427 print_discr_value (FILE *outfile, dw_discr_value *discr_value)
6428 {
6429 if (discr_value->pos)
6430 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, discr_value->v.sval);
6431 else
6432 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, discr_value->v.uval);
6433 }
6434
6435 static void print_loc_descr (dw_loc_descr_ref, FILE *);
6436
6437 /* Print the value associated to the VAL DWARF value node to OUTFILE. If
6438 RECURSE, output location descriptor operations. */
6439
6440 static void
6441 print_dw_val (dw_val_node *val, bool recurse, FILE *outfile)
6442 {
6443 switch (val->val_class)
6444 {
6445 case dw_val_class_addr:
6446 fprintf (outfile, "address");
6447 break;
6448 case dw_val_class_offset:
6449 fprintf (outfile, "offset");
6450 break;
6451 case dw_val_class_loc:
6452 fprintf (outfile, "location descriptor");
6453 if (val->v.val_loc == NULL)
6454 fprintf (outfile, " -> <null>\n");
6455 else if (recurse)
6456 {
6457 fprintf (outfile, ":\n");
6458 print_indent += 4;
6459 print_loc_descr (val->v.val_loc, outfile);
6460 print_indent -= 4;
6461 }
6462 else
6463 fprintf (outfile, " (%p)\n", (void *) val->v.val_loc);
6464 break;
6465 case dw_val_class_loc_list:
6466 fprintf (outfile, "location list -> label:%s",
6467 val->v.val_loc_list->ll_symbol);
6468 break;
6469 case dw_val_class_view_list:
6470 val = view_list_to_loc_list_val_node (val);
6471 fprintf (outfile, "location list with views -> labels:%s and %s",
6472 val->v.val_loc_list->ll_symbol,
6473 val->v.val_loc_list->vl_symbol);
6474 break;
6475 case dw_val_class_range_list:
6476 fprintf (outfile, "range list");
6477 break;
6478 case dw_val_class_const:
6479 case dw_val_class_const_implicit:
6480 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, val->v.val_int);
6481 break;
6482 case dw_val_class_unsigned_const:
6483 case dw_val_class_unsigned_const_implicit:
6484 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, val->v.val_unsigned);
6485 break;
6486 case dw_val_class_const_double:
6487 fprintf (outfile, "constant (" HOST_WIDE_INT_PRINT_DEC","\
6488 HOST_WIDE_INT_PRINT_UNSIGNED")",
6489 val->v.val_double.high,
6490 val->v.val_double.low);
6491 break;
6492 case dw_val_class_wide_int:
6493 {
6494 int i = val->v.val_wide->get_len ();
6495 fprintf (outfile, "constant (");
6496 gcc_assert (i > 0);
6497 if (val->v.val_wide->elt (i - 1) == 0)
6498 fprintf (outfile, "0x");
6499 fprintf (outfile, HOST_WIDE_INT_PRINT_HEX,
6500 val->v.val_wide->elt (--i));
6501 while (--i >= 0)
6502 fprintf (outfile, HOST_WIDE_INT_PRINT_PADDED_HEX,
6503 val->v.val_wide->elt (i));
6504 fprintf (outfile, ")");
6505 break;
6506 }
6507 case dw_val_class_vec:
6508 fprintf (outfile, "floating-point or vector constant");
6509 break;
6510 case dw_val_class_flag:
6511 fprintf (outfile, "%u", val->v.val_flag);
6512 break;
6513 case dw_val_class_die_ref:
6514 if (val->v.val_die_ref.die != NULL)
6515 {
6516 dw_die_ref die = val->v.val_die_ref.die;
6517
6518 if (die->comdat_type_p)
6519 {
6520 fprintf (outfile, "die -> signature: ");
6521 print_signature (outfile,
6522 die->die_id.die_type_node->signature);
6523 }
6524 else if (die->die_id.die_symbol)
6525 {
6526 fprintf (outfile, "die -> label: %s", die->die_id.die_symbol);
6527 if (die->with_offset)
6528 fprintf (outfile, " + %ld", die->die_offset);
6529 }
6530 else
6531 fprintf (outfile, "die -> %ld", die->die_offset);
6532 fprintf (outfile, " (%p)", (void *) die);
6533 }
6534 else
6535 fprintf (outfile, "die -> <null>");
6536 break;
6537 case dw_val_class_vms_delta:
6538 fprintf (outfile, "delta: @slotcount(%s-%s)",
6539 val->v.val_vms_delta.lbl2, val->v.val_vms_delta.lbl1);
6540 break;
6541 case dw_val_class_symview:
6542 fprintf (outfile, "view: %s", val->v.val_symbolic_view);
6543 break;
6544 case dw_val_class_lbl_id:
6545 case dw_val_class_lineptr:
6546 case dw_val_class_macptr:
6547 case dw_val_class_loclistsptr:
6548 case dw_val_class_high_pc:
6549 fprintf (outfile, "label: %s", val->v.val_lbl_id);
6550 break;
6551 case dw_val_class_str:
6552 if (val->v.val_str->str != NULL)
6553 fprintf (outfile, "\"%s\"", val->v.val_str->str);
6554 else
6555 fprintf (outfile, "<null>");
6556 break;
6557 case dw_val_class_file:
6558 case dw_val_class_file_implicit:
6559 fprintf (outfile, "\"%s\" (%d)", val->v.val_file->filename,
6560 val->v.val_file->emitted_number);
6561 break;
6562 case dw_val_class_data8:
6563 {
6564 int i;
6565
6566 for (i = 0; i < 8; i++)
6567 fprintf (outfile, "%02x", val->v.val_data8[i]);
6568 break;
6569 }
6570 case dw_val_class_discr_value:
6571 print_discr_value (outfile, &val->v.val_discr_value);
6572 break;
6573 case dw_val_class_discr_list:
6574 for (dw_discr_list_ref node = val->v.val_discr_list;
6575 node != NULL;
6576 node = node->dw_discr_next)
6577 {
6578 if (node->dw_discr_range)
6579 {
6580 fprintf (outfile, " .. ");
6581 print_discr_value (outfile, &node->dw_discr_lower_bound);
6582 print_discr_value (outfile, &node->dw_discr_upper_bound);
6583 }
6584 else
6585 print_discr_value (outfile, &node->dw_discr_lower_bound);
6586
6587 if (node->dw_discr_next != NULL)
6588 fprintf (outfile, " | ");
6589 }
6590 default:
6591 break;
6592 }
6593 }
6594
6595 /* Likewise, for a DIE attribute. */
6596
6597 static void
6598 print_attribute (dw_attr_node *a, bool recurse, FILE *outfile)
6599 {
6600 print_dw_val (&a->dw_attr_val, recurse, outfile);
6601 }
6602
6603
6604 /* Print the list of operands in the LOC location description to OUTFILE. This
6605 routine is a debugging aid only. */
6606
6607 static void
6608 print_loc_descr (dw_loc_descr_ref loc, FILE *outfile)
6609 {
6610 dw_loc_descr_ref l = loc;
6611
6612 if (loc == NULL)
6613 {
6614 print_spaces (outfile);
6615 fprintf (outfile, "<null>\n");
6616 return;
6617 }
6618
6619 for (l = loc; l != NULL; l = l->dw_loc_next)
6620 {
6621 print_spaces (outfile);
6622 fprintf (outfile, "(%p) %s",
6623 (void *) l,
6624 dwarf_stack_op_name (l->dw_loc_opc));
6625 if (l->dw_loc_oprnd1.val_class != dw_val_class_none)
6626 {
6627 fprintf (outfile, " ");
6628 print_dw_val (&l->dw_loc_oprnd1, false, outfile);
6629 }
6630 if (l->dw_loc_oprnd2.val_class != dw_val_class_none)
6631 {
6632 fprintf (outfile, ", ");
6633 print_dw_val (&l->dw_loc_oprnd2, false, outfile);
6634 }
6635 fprintf (outfile, "\n");
6636 }
6637 }
6638
6639 /* Print the information associated with a given DIE, and its children.
6640 This routine is a debugging aid only. */
6641
6642 static void
6643 print_die (dw_die_ref die, FILE *outfile)
6644 {
6645 dw_attr_node *a;
6646 dw_die_ref c;
6647 unsigned ix;
6648
6649 print_spaces (outfile);
6650 fprintf (outfile, "DIE %4ld: %s (%p)\n",
6651 die->die_offset, dwarf_tag_name (die->die_tag),
6652 (void*) die);
6653 print_spaces (outfile);
6654 fprintf (outfile, " abbrev id: %lu", die->die_abbrev);
6655 fprintf (outfile, " offset: %ld", die->die_offset);
6656 fprintf (outfile, " mark: %d\n", die->die_mark);
6657
6658 if (die->comdat_type_p)
6659 {
6660 print_spaces (outfile);
6661 fprintf (outfile, " signature: ");
6662 print_signature (outfile, die->die_id.die_type_node->signature);
6663 fprintf (outfile, "\n");
6664 }
6665
6666 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6667 {
6668 print_spaces (outfile);
6669 fprintf (outfile, " %s: ", dwarf_attr_name (a->dw_attr));
6670
6671 print_attribute (a, true, outfile);
6672 fprintf (outfile, "\n");
6673 }
6674
6675 if (die->die_child != NULL)
6676 {
6677 print_indent += 4;
6678 FOR_EACH_CHILD (die, c, print_die (c, outfile));
6679 print_indent -= 4;
6680 }
6681 if (print_indent == 0)
6682 fprintf (outfile, "\n");
6683 }
6684
6685 /* Print the list of operations in the LOC location description. */
6686
6687 DEBUG_FUNCTION void
6688 debug_dwarf_loc_descr (dw_loc_descr_ref loc)
6689 {
6690 print_loc_descr (loc, stderr);
6691 }
6692
6693 /* Print the information collected for a given DIE. */
6694
6695 DEBUG_FUNCTION void
6696 debug_dwarf_die (dw_die_ref die)
6697 {
6698 print_die (die, stderr);
6699 }
6700
6701 DEBUG_FUNCTION void
6702 debug (die_struct &ref)
6703 {
6704 print_die (&ref, stderr);
6705 }
6706
6707 DEBUG_FUNCTION void
6708 debug (die_struct *ptr)
6709 {
6710 if (ptr)
6711 debug (*ptr);
6712 else
6713 fprintf (stderr, "<nil>\n");
6714 }
6715
6716
6717 /* Print all DWARF information collected for the compilation unit.
6718 This routine is a debugging aid only. */
6719
6720 DEBUG_FUNCTION void
6721 debug_dwarf (void)
6722 {
6723 print_indent = 0;
6724 print_die (comp_unit_die (), stderr);
6725 }
6726
6727 /* Verify the DIE tree structure. */
6728
6729 DEBUG_FUNCTION void
6730 verify_die (dw_die_ref die)
6731 {
6732 gcc_assert (!die->die_mark);
6733 if (die->die_parent == NULL
6734 && die->die_sib == NULL)
6735 return;
6736 /* Verify the die_sib list is cyclic. */
6737 dw_die_ref x = die;
6738 do
6739 {
6740 x->die_mark = 1;
6741 x = x->die_sib;
6742 }
6743 while (x && !x->die_mark);
6744 gcc_assert (x == die);
6745 x = die;
6746 do
6747 {
6748 /* Verify all dies have the same parent. */
6749 gcc_assert (x->die_parent == die->die_parent);
6750 if (x->die_child)
6751 {
6752 /* Verify the child has the proper parent and recurse. */
6753 gcc_assert (x->die_child->die_parent == x);
6754 verify_die (x->die_child);
6755 }
6756 x->die_mark = 0;
6757 x = x->die_sib;
6758 }
6759 while (x && x->die_mark);
6760 }
6761
6762 /* Sanity checks on DIEs. */
6763
6764 static void
6765 check_die (dw_die_ref die)
6766 {
6767 unsigned ix;
6768 dw_attr_node *a;
6769 bool inline_found = false;
6770 int n_location = 0, n_low_pc = 0, n_high_pc = 0, n_artificial = 0;
6771 int n_decl_line = 0, n_decl_column = 0, n_decl_file = 0;
6772 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6773 {
6774 switch (a->dw_attr)
6775 {
6776 case DW_AT_inline:
6777 if (a->dw_attr_val.v.val_unsigned)
6778 inline_found = true;
6779 break;
6780 case DW_AT_location:
6781 ++n_location;
6782 break;
6783 case DW_AT_low_pc:
6784 ++n_low_pc;
6785 break;
6786 case DW_AT_high_pc:
6787 ++n_high_pc;
6788 break;
6789 case DW_AT_artificial:
6790 ++n_artificial;
6791 break;
6792 case DW_AT_decl_column:
6793 ++n_decl_column;
6794 break;
6795 case DW_AT_decl_line:
6796 ++n_decl_line;
6797 break;
6798 case DW_AT_decl_file:
6799 ++n_decl_file;
6800 break;
6801 default:
6802 break;
6803 }
6804 }
6805 if (n_location > 1 || n_low_pc > 1 || n_high_pc > 1 || n_artificial > 1
6806 || n_decl_column > 1 || n_decl_line > 1 || n_decl_file > 1)
6807 {
6808 fprintf (stderr, "Duplicate attributes in DIE:\n");
6809 debug_dwarf_die (die);
6810 gcc_unreachable ();
6811 }
6812 if (inline_found)
6813 {
6814 /* A debugging information entry that is a member of an abstract
6815 instance tree [that has DW_AT_inline] should not contain any
6816 attributes which describe aspects of the subroutine which vary
6817 between distinct inlined expansions or distinct out-of-line
6818 expansions. */
6819 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6820 gcc_assert (a->dw_attr != DW_AT_low_pc
6821 && a->dw_attr != DW_AT_high_pc
6822 && a->dw_attr != DW_AT_location
6823 && a->dw_attr != DW_AT_frame_base
6824 && a->dw_attr != DW_AT_call_all_calls
6825 && a->dw_attr != DW_AT_GNU_all_call_sites);
6826 }
6827 }
6828 \f
6829 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6830 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6831 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO), ctx)
6832
6833 /* Calculate the checksum of a location expression. */
6834
6835 static inline void
6836 loc_checksum (dw_loc_descr_ref loc, struct md5_ctx *ctx)
6837 {
6838 int tem;
6839 inchash::hash hstate;
6840 hashval_t hash;
6841
6842 tem = (loc->dtprel << 8) | ((unsigned int) loc->dw_loc_opc);
6843 CHECKSUM (tem);
6844 hash_loc_operands (loc, hstate);
6845 hash = hstate.end();
6846 CHECKSUM (hash);
6847 }
6848
6849 /* Calculate the checksum of an attribute. */
6850
6851 static void
6852 attr_checksum (dw_attr_node *at, struct md5_ctx *ctx, int *mark)
6853 {
6854 dw_loc_descr_ref loc;
6855 rtx r;
6856
6857 CHECKSUM (at->dw_attr);
6858
6859 /* We don't care that this was compiled with a different compiler
6860 snapshot; if the output is the same, that's what matters. */
6861 if (at->dw_attr == DW_AT_producer)
6862 return;
6863
6864 switch (AT_class (at))
6865 {
6866 case dw_val_class_const:
6867 case dw_val_class_const_implicit:
6868 CHECKSUM (at->dw_attr_val.v.val_int);
6869 break;
6870 case dw_val_class_unsigned_const:
6871 case dw_val_class_unsigned_const_implicit:
6872 CHECKSUM (at->dw_attr_val.v.val_unsigned);
6873 break;
6874 case dw_val_class_const_double:
6875 CHECKSUM (at->dw_attr_val.v.val_double);
6876 break;
6877 case dw_val_class_wide_int:
6878 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
6879 get_full_len (*at->dw_attr_val.v.val_wide)
6880 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
6881 break;
6882 case dw_val_class_vec:
6883 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
6884 (at->dw_attr_val.v.val_vec.length
6885 * at->dw_attr_val.v.val_vec.elt_size));
6886 break;
6887 case dw_val_class_flag:
6888 CHECKSUM (at->dw_attr_val.v.val_flag);
6889 break;
6890 case dw_val_class_str:
6891 CHECKSUM_STRING (AT_string (at));
6892 break;
6893
6894 case dw_val_class_addr:
6895 r = AT_addr (at);
6896 gcc_assert (GET_CODE (r) == SYMBOL_REF);
6897 CHECKSUM_STRING (XSTR (r, 0));
6898 break;
6899
6900 case dw_val_class_offset:
6901 CHECKSUM (at->dw_attr_val.v.val_offset);
6902 break;
6903
6904 case dw_val_class_loc:
6905 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
6906 loc_checksum (loc, ctx);
6907 break;
6908
6909 case dw_val_class_die_ref:
6910 die_checksum (AT_ref (at), ctx, mark);
6911 break;
6912
6913 case dw_val_class_fde_ref:
6914 case dw_val_class_vms_delta:
6915 case dw_val_class_symview:
6916 case dw_val_class_lbl_id:
6917 case dw_val_class_lineptr:
6918 case dw_val_class_macptr:
6919 case dw_val_class_loclistsptr:
6920 case dw_val_class_high_pc:
6921 break;
6922
6923 case dw_val_class_file:
6924 case dw_val_class_file_implicit:
6925 CHECKSUM_STRING (AT_file (at)->filename);
6926 break;
6927
6928 case dw_val_class_data8:
6929 CHECKSUM (at->dw_attr_val.v.val_data8);
6930 break;
6931
6932 default:
6933 break;
6934 }
6935 }
6936
6937 /* Calculate the checksum of a DIE. */
6938
6939 static void
6940 die_checksum (dw_die_ref die, struct md5_ctx *ctx, int *mark)
6941 {
6942 dw_die_ref c;
6943 dw_attr_node *a;
6944 unsigned ix;
6945
6946 /* To avoid infinite recursion. */
6947 if (die->die_mark)
6948 {
6949 CHECKSUM (die->die_mark);
6950 return;
6951 }
6952 die->die_mark = ++(*mark);
6953
6954 CHECKSUM (die->die_tag);
6955
6956 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6957 attr_checksum (a, ctx, mark);
6958
6959 FOR_EACH_CHILD (die, c, die_checksum (c, ctx, mark));
6960 }
6961
6962 #undef CHECKSUM
6963 #undef CHECKSUM_BLOCK
6964 #undef CHECKSUM_STRING
6965
6966 /* For DWARF-4 types, include the trailing NULL when checksumming strings. */
6967 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6968 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6969 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO) + 1, ctx)
6970 #define CHECKSUM_SLEB128(FOO) checksum_sleb128 ((FOO), ctx)
6971 #define CHECKSUM_ULEB128(FOO) checksum_uleb128 ((FOO), ctx)
6972 #define CHECKSUM_ATTR(FOO) \
6973 if (FOO) attr_checksum_ordered (die->die_tag, (FOO), ctx, mark)
6974
6975 /* Calculate the checksum of a number in signed LEB128 format. */
6976
6977 static void
6978 checksum_sleb128 (HOST_WIDE_INT value, struct md5_ctx *ctx)
6979 {
6980 unsigned char byte;
6981 bool more;
6982
6983 while (1)
6984 {
6985 byte = (value & 0x7f);
6986 value >>= 7;
6987 more = !((value == 0 && (byte & 0x40) == 0)
6988 || (value == -1 && (byte & 0x40) != 0));
6989 if (more)
6990 byte |= 0x80;
6991 CHECKSUM (byte);
6992 if (!more)
6993 break;
6994 }
6995 }
6996
6997 /* Calculate the checksum of a number in unsigned LEB128 format. */
6998
6999 static void
7000 checksum_uleb128 (unsigned HOST_WIDE_INT value, struct md5_ctx *ctx)
7001 {
7002 while (1)
7003 {
7004 unsigned char byte = (value & 0x7f);
7005 value >>= 7;
7006 if (value != 0)
7007 /* More bytes to follow. */
7008 byte |= 0x80;
7009 CHECKSUM (byte);
7010 if (value == 0)
7011 break;
7012 }
7013 }
7014
7015 /* Checksum the context of the DIE. This adds the names of any
7016 surrounding namespaces or structures to the checksum. */
7017
7018 static void
7019 checksum_die_context (dw_die_ref die, struct md5_ctx *ctx)
7020 {
7021 const char *name;
7022 dw_die_ref spec;
7023 int tag = die->die_tag;
7024
7025 if (tag != DW_TAG_namespace
7026 && tag != DW_TAG_structure_type
7027 && tag != DW_TAG_class_type)
7028 return;
7029
7030 name = get_AT_string (die, DW_AT_name);
7031
7032 spec = get_AT_ref (die, DW_AT_specification);
7033 if (spec != NULL)
7034 die = spec;
7035
7036 if (die->die_parent != NULL)
7037 checksum_die_context (die->die_parent, ctx);
7038
7039 CHECKSUM_ULEB128 ('C');
7040 CHECKSUM_ULEB128 (tag);
7041 if (name != NULL)
7042 CHECKSUM_STRING (name);
7043 }
7044
7045 /* Calculate the checksum of a location expression. */
7046
7047 static inline void
7048 loc_checksum_ordered (dw_loc_descr_ref loc, struct md5_ctx *ctx)
7049 {
7050 /* Special case for lone DW_OP_plus_uconst: checksum as if the location
7051 were emitted as a DW_FORM_sdata instead of a location expression. */
7052 if (loc->dw_loc_opc == DW_OP_plus_uconst && loc->dw_loc_next == NULL)
7053 {
7054 CHECKSUM_ULEB128 (DW_FORM_sdata);
7055 CHECKSUM_SLEB128 ((HOST_WIDE_INT) loc->dw_loc_oprnd1.v.val_unsigned);
7056 return;
7057 }
7058
7059 /* Otherwise, just checksum the raw location expression. */
7060 while (loc != NULL)
7061 {
7062 inchash::hash hstate;
7063 hashval_t hash;
7064
7065 CHECKSUM_ULEB128 (loc->dtprel);
7066 CHECKSUM_ULEB128 (loc->dw_loc_opc);
7067 hash_loc_operands (loc, hstate);
7068 hash = hstate.end ();
7069 CHECKSUM (hash);
7070 loc = loc->dw_loc_next;
7071 }
7072 }
7073
7074 /* Calculate the checksum of an attribute. */
7075
7076 static void
7077 attr_checksum_ordered (enum dwarf_tag tag, dw_attr_node *at,
7078 struct md5_ctx *ctx, int *mark)
7079 {
7080 dw_loc_descr_ref loc;
7081 rtx r;
7082
7083 if (AT_class (at) == dw_val_class_die_ref)
7084 {
7085 dw_die_ref target_die = AT_ref (at);
7086
7087 /* For pointer and reference types, we checksum only the (qualified)
7088 name of the target type (if there is a name). For friend entries,
7089 we checksum only the (qualified) name of the target type or function.
7090 This allows the checksum to remain the same whether the target type
7091 is complete or not. */
7092 if ((at->dw_attr == DW_AT_type
7093 && (tag == DW_TAG_pointer_type
7094 || tag == DW_TAG_reference_type
7095 || tag == DW_TAG_rvalue_reference_type
7096 || tag == DW_TAG_ptr_to_member_type))
7097 || (at->dw_attr == DW_AT_friend
7098 && tag == DW_TAG_friend))
7099 {
7100 dw_attr_node *name_attr = get_AT (target_die, DW_AT_name);
7101
7102 if (name_attr != NULL)
7103 {
7104 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7105
7106 if (decl == NULL)
7107 decl = target_die;
7108 CHECKSUM_ULEB128 ('N');
7109 CHECKSUM_ULEB128 (at->dw_attr);
7110 if (decl->die_parent != NULL)
7111 checksum_die_context (decl->die_parent, ctx);
7112 CHECKSUM_ULEB128 ('E');
7113 CHECKSUM_STRING (AT_string (name_attr));
7114 return;
7115 }
7116 }
7117
7118 /* For all other references to another DIE, we check to see if the
7119 target DIE has already been visited. If it has, we emit a
7120 backward reference; if not, we descend recursively. */
7121 if (target_die->die_mark > 0)
7122 {
7123 CHECKSUM_ULEB128 ('R');
7124 CHECKSUM_ULEB128 (at->dw_attr);
7125 CHECKSUM_ULEB128 (target_die->die_mark);
7126 }
7127 else
7128 {
7129 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7130
7131 if (decl == NULL)
7132 decl = target_die;
7133 target_die->die_mark = ++(*mark);
7134 CHECKSUM_ULEB128 ('T');
7135 CHECKSUM_ULEB128 (at->dw_attr);
7136 if (decl->die_parent != NULL)
7137 checksum_die_context (decl->die_parent, ctx);
7138 die_checksum_ordered (target_die, ctx, mark);
7139 }
7140 return;
7141 }
7142
7143 CHECKSUM_ULEB128 ('A');
7144 CHECKSUM_ULEB128 (at->dw_attr);
7145
7146 switch (AT_class (at))
7147 {
7148 case dw_val_class_const:
7149 case dw_val_class_const_implicit:
7150 CHECKSUM_ULEB128 (DW_FORM_sdata);
7151 CHECKSUM_SLEB128 (at->dw_attr_val.v.val_int);
7152 break;
7153
7154 case dw_val_class_unsigned_const:
7155 case dw_val_class_unsigned_const_implicit:
7156 CHECKSUM_ULEB128 (DW_FORM_sdata);
7157 CHECKSUM_SLEB128 ((int) at->dw_attr_val.v.val_unsigned);
7158 break;
7159
7160 case dw_val_class_const_double:
7161 CHECKSUM_ULEB128 (DW_FORM_block);
7162 CHECKSUM_ULEB128 (sizeof (at->dw_attr_val.v.val_double));
7163 CHECKSUM (at->dw_attr_val.v.val_double);
7164 break;
7165
7166 case dw_val_class_wide_int:
7167 CHECKSUM_ULEB128 (DW_FORM_block);
7168 CHECKSUM_ULEB128 (get_full_len (*at->dw_attr_val.v.val_wide)
7169 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
7170 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
7171 get_full_len (*at->dw_attr_val.v.val_wide)
7172 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
7173 break;
7174
7175 case dw_val_class_vec:
7176 CHECKSUM_ULEB128 (DW_FORM_block);
7177 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_vec.length
7178 * at->dw_attr_val.v.val_vec.elt_size);
7179 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
7180 (at->dw_attr_val.v.val_vec.length
7181 * at->dw_attr_val.v.val_vec.elt_size));
7182 break;
7183
7184 case dw_val_class_flag:
7185 CHECKSUM_ULEB128 (DW_FORM_flag);
7186 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_flag ? 1 : 0);
7187 break;
7188
7189 case dw_val_class_str:
7190 CHECKSUM_ULEB128 (DW_FORM_string);
7191 CHECKSUM_STRING (AT_string (at));
7192 break;
7193
7194 case dw_val_class_addr:
7195 r = AT_addr (at);
7196 gcc_assert (GET_CODE (r) == SYMBOL_REF);
7197 CHECKSUM_ULEB128 (DW_FORM_string);
7198 CHECKSUM_STRING (XSTR (r, 0));
7199 break;
7200
7201 case dw_val_class_offset:
7202 CHECKSUM_ULEB128 (DW_FORM_sdata);
7203 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_offset);
7204 break;
7205
7206 case dw_val_class_loc:
7207 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
7208 loc_checksum_ordered (loc, ctx);
7209 break;
7210
7211 case dw_val_class_fde_ref:
7212 case dw_val_class_symview:
7213 case dw_val_class_lbl_id:
7214 case dw_val_class_lineptr:
7215 case dw_val_class_macptr:
7216 case dw_val_class_loclistsptr:
7217 case dw_val_class_high_pc:
7218 break;
7219
7220 case dw_val_class_file:
7221 case dw_val_class_file_implicit:
7222 CHECKSUM_ULEB128 (DW_FORM_string);
7223 CHECKSUM_STRING (AT_file (at)->filename);
7224 break;
7225
7226 case dw_val_class_data8:
7227 CHECKSUM (at->dw_attr_val.v.val_data8);
7228 break;
7229
7230 default:
7231 break;
7232 }
7233 }
7234
7235 struct checksum_attributes
7236 {
7237 dw_attr_node *at_name;
7238 dw_attr_node *at_type;
7239 dw_attr_node *at_friend;
7240 dw_attr_node *at_accessibility;
7241 dw_attr_node *at_address_class;
7242 dw_attr_node *at_alignment;
7243 dw_attr_node *at_allocated;
7244 dw_attr_node *at_artificial;
7245 dw_attr_node *at_associated;
7246 dw_attr_node *at_binary_scale;
7247 dw_attr_node *at_bit_offset;
7248 dw_attr_node *at_bit_size;
7249 dw_attr_node *at_bit_stride;
7250 dw_attr_node *at_byte_size;
7251 dw_attr_node *at_byte_stride;
7252 dw_attr_node *at_const_value;
7253 dw_attr_node *at_containing_type;
7254 dw_attr_node *at_count;
7255 dw_attr_node *at_data_location;
7256 dw_attr_node *at_data_member_location;
7257 dw_attr_node *at_decimal_scale;
7258 dw_attr_node *at_decimal_sign;
7259 dw_attr_node *at_default_value;
7260 dw_attr_node *at_digit_count;
7261 dw_attr_node *at_discr;
7262 dw_attr_node *at_discr_list;
7263 dw_attr_node *at_discr_value;
7264 dw_attr_node *at_encoding;
7265 dw_attr_node *at_endianity;
7266 dw_attr_node *at_explicit;
7267 dw_attr_node *at_is_optional;
7268 dw_attr_node *at_location;
7269 dw_attr_node *at_lower_bound;
7270 dw_attr_node *at_mutable;
7271 dw_attr_node *at_ordering;
7272 dw_attr_node *at_picture_string;
7273 dw_attr_node *at_prototyped;
7274 dw_attr_node *at_small;
7275 dw_attr_node *at_segment;
7276 dw_attr_node *at_string_length;
7277 dw_attr_node *at_string_length_bit_size;
7278 dw_attr_node *at_string_length_byte_size;
7279 dw_attr_node *at_threads_scaled;
7280 dw_attr_node *at_upper_bound;
7281 dw_attr_node *at_use_location;
7282 dw_attr_node *at_use_UTF8;
7283 dw_attr_node *at_variable_parameter;
7284 dw_attr_node *at_virtuality;
7285 dw_attr_node *at_visibility;
7286 dw_attr_node *at_vtable_elem_location;
7287 };
7288
7289 /* Collect the attributes that we will want to use for the checksum. */
7290
7291 static void
7292 collect_checksum_attributes (struct checksum_attributes *attrs, dw_die_ref die)
7293 {
7294 dw_attr_node *a;
7295 unsigned ix;
7296
7297 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7298 {
7299 switch (a->dw_attr)
7300 {
7301 case DW_AT_name:
7302 attrs->at_name = a;
7303 break;
7304 case DW_AT_type:
7305 attrs->at_type = a;
7306 break;
7307 case DW_AT_friend:
7308 attrs->at_friend = a;
7309 break;
7310 case DW_AT_accessibility:
7311 attrs->at_accessibility = a;
7312 break;
7313 case DW_AT_address_class:
7314 attrs->at_address_class = a;
7315 break;
7316 case DW_AT_alignment:
7317 attrs->at_alignment = a;
7318 break;
7319 case DW_AT_allocated:
7320 attrs->at_allocated = a;
7321 break;
7322 case DW_AT_artificial:
7323 attrs->at_artificial = a;
7324 break;
7325 case DW_AT_associated:
7326 attrs->at_associated = a;
7327 break;
7328 case DW_AT_binary_scale:
7329 attrs->at_binary_scale = a;
7330 break;
7331 case DW_AT_bit_offset:
7332 attrs->at_bit_offset = a;
7333 break;
7334 case DW_AT_bit_size:
7335 attrs->at_bit_size = a;
7336 break;
7337 case DW_AT_bit_stride:
7338 attrs->at_bit_stride = a;
7339 break;
7340 case DW_AT_byte_size:
7341 attrs->at_byte_size = a;
7342 break;
7343 case DW_AT_byte_stride:
7344 attrs->at_byte_stride = a;
7345 break;
7346 case DW_AT_const_value:
7347 attrs->at_const_value = a;
7348 break;
7349 case DW_AT_containing_type:
7350 attrs->at_containing_type = a;
7351 break;
7352 case DW_AT_count:
7353 attrs->at_count = a;
7354 break;
7355 case DW_AT_data_location:
7356 attrs->at_data_location = a;
7357 break;
7358 case DW_AT_data_member_location:
7359 attrs->at_data_member_location = a;
7360 break;
7361 case DW_AT_decimal_scale:
7362 attrs->at_decimal_scale = a;
7363 break;
7364 case DW_AT_decimal_sign:
7365 attrs->at_decimal_sign = a;
7366 break;
7367 case DW_AT_default_value:
7368 attrs->at_default_value = a;
7369 break;
7370 case DW_AT_digit_count:
7371 attrs->at_digit_count = a;
7372 break;
7373 case DW_AT_discr:
7374 attrs->at_discr = a;
7375 break;
7376 case DW_AT_discr_list:
7377 attrs->at_discr_list = a;
7378 break;
7379 case DW_AT_discr_value:
7380 attrs->at_discr_value = a;
7381 break;
7382 case DW_AT_encoding:
7383 attrs->at_encoding = a;
7384 break;
7385 case DW_AT_endianity:
7386 attrs->at_endianity = a;
7387 break;
7388 case DW_AT_explicit:
7389 attrs->at_explicit = a;
7390 break;
7391 case DW_AT_is_optional:
7392 attrs->at_is_optional = a;
7393 break;
7394 case DW_AT_location:
7395 attrs->at_location = a;
7396 break;
7397 case DW_AT_lower_bound:
7398 attrs->at_lower_bound = a;
7399 break;
7400 case DW_AT_mutable:
7401 attrs->at_mutable = a;
7402 break;
7403 case DW_AT_ordering:
7404 attrs->at_ordering = a;
7405 break;
7406 case DW_AT_picture_string:
7407 attrs->at_picture_string = a;
7408 break;
7409 case DW_AT_prototyped:
7410 attrs->at_prototyped = a;
7411 break;
7412 case DW_AT_small:
7413 attrs->at_small = a;
7414 break;
7415 case DW_AT_segment:
7416 attrs->at_segment = a;
7417 break;
7418 case DW_AT_string_length:
7419 attrs->at_string_length = a;
7420 break;
7421 case DW_AT_string_length_bit_size:
7422 attrs->at_string_length_bit_size = a;
7423 break;
7424 case DW_AT_string_length_byte_size:
7425 attrs->at_string_length_byte_size = a;
7426 break;
7427 case DW_AT_threads_scaled:
7428 attrs->at_threads_scaled = a;
7429 break;
7430 case DW_AT_upper_bound:
7431 attrs->at_upper_bound = a;
7432 break;
7433 case DW_AT_use_location:
7434 attrs->at_use_location = a;
7435 break;
7436 case DW_AT_use_UTF8:
7437 attrs->at_use_UTF8 = a;
7438 break;
7439 case DW_AT_variable_parameter:
7440 attrs->at_variable_parameter = a;
7441 break;
7442 case DW_AT_virtuality:
7443 attrs->at_virtuality = a;
7444 break;
7445 case DW_AT_visibility:
7446 attrs->at_visibility = a;
7447 break;
7448 case DW_AT_vtable_elem_location:
7449 attrs->at_vtable_elem_location = a;
7450 break;
7451 default:
7452 break;
7453 }
7454 }
7455 }
7456
7457 /* Calculate the checksum of a DIE, using an ordered subset of attributes. */
7458
7459 static void
7460 die_checksum_ordered (dw_die_ref die, struct md5_ctx *ctx, int *mark)
7461 {
7462 dw_die_ref c;
7463 dw_die_ref decl;
7464 struct checksum_attributes attrs;
7465
7466 CHECKSUM_ULEB128 ('D');
7467 CHECKSUM_ULEB128 (die->die_tag);
7468
7469 memset (&attrs, 0, sizeof (attrs));
7470
7471 decl = get_AT_ref (die, DW_AT_specification);
7472 if (decl != NULL)
7473 collect_checksum_attributes (&attrs, decl);
7474 collect_checksum_attributes (&attrs, die);
7475
7476 CHECKSUM_ATTR (attrs.at_name);
7477 CHECKSUM_ATTR (attrs.at_accessibility);
7478 CHECKSUM_ATTR (attrs.at_address_class);
7479 CHECKSUM_ATTR (attrs.at_allocated);
7480 CHECKSUM_ATTR (attrs.at_artificial);
7481 CHECKSUM_ATTR (attrs.at_associated);
7482 CHECKSUM_ATTR (attrs.at_binary_scale);
7483 CHECKSUM_ATTR (attrs.at_bit_offset);
7484 CHECKSUM_ATTR (attrs.at_bit_size);
7485 CHECKSUM_ATTR (attrs.at_bit_stride);
7486 CHECKSUM_ATTR (attrs.at_byte_size);
7487 CHECKSUM_ATTR (attrs.at_byte_stride);
7488 CHECKSUM_ATTR (attrs.at_const_value);
7489 CHECKSUM_ATTR (attrs.at_containing_type);
7490 CHECKSUM_ATTR (attrs.at_count);
7491 CHECKSUM_ATTR (attrs.at_data_location);
7492 CHECKSUM_ATTR (attrs.at_data_member_location);
7493 CHECKSUM_ATTR (attrs.at_decimal_scale);
7494 CHECKSUM_ATTR (attrs.at_decimal_sign);
7495 CHECKSUM_ATTR (attrs.at_default_value);
7496 CHECKSUM_ATTR (attrs.at_digit_count);
7497 CHECKSUM_ATTR (attrs.at_discr);
7498 CHECKSUM_ATTR (attrs.at_discr_list);
7499 CHECKSUM_ATTR (attrs.at_discr_value);
7500 CHECKSUM_ATTR (attrs.at_encoding);
7501 CHECKSUM_ATTR (attrs.at_endianity);
7502 CHECKSUM_ATTR (attrs.at_explicit);
7503 CHECKSUM_ATTR (attrs.at_is_optional);
7504 CHECKSUM_ATTR (attrs.at_location);
7505 CHECKSUM_ATTR (attrs.at_lower_bound);
7506 CHECKSUM_ATTR (attrs.at_mutable);
7507 CHECKSUM_ATTR (attrs.at_ordering);
7508 CHECKSUM_ATTR (attrs.at_picture_string);
7509 CHECKSUM_ATTR (attrs.at_prototyped);
7510 CHECKSUM_ATTR (attrs.at_small);
7511 CHECKSUM_ATTR (attrs.at_segment);
7512 CHECKSUM_ATTR (attrs.at_string_length);
7513 CHECKSUM_ATTR (attrs.at_string_length_bit_size);
7514 CHECKSUM_ATTR (attrs.at_string_length_byte_size);
7515 CHECKSUM_ATTR (attrs.at_threads_scaled);
7516 CHECKSUM_ATTR (attrs.at_upper_bound);
7517 CHECKSUM_ATTR (attrs.at_use_location);
7518 CHECKSUM_ATTR (attrs.at_use_UTF8);
7519 CHECKSUM_ATTR (attrs.at_variable_parameter);
7520 CHECKSUM_ATTR (attrs.at_virtuality);
7521 CHECKSUM_ATTR (attrs.at_visibility);
7522 CHECKSUM_ATTR (attrs.at_vtable_elem_location);
7523 CHECKSUM_ATTR (attrs.at_type);
7524 CHECKSUM_ATTR (attrs.at_friend);
7525 CHECKSUM_ATTR (attrs.at_alignment);
7526
7527 /* Checksum the child DIEs. */
7528 c = die->die_child;
7529 if (c) do {
7530 dw_attr_node *name_attr;
7531
7532 c = c->die_sib;
7533 name_attr = get_AT (c, DW_AT_name);
7534 if (is_template_instantiation (c))
7535 {
7536 /* Ignore instantiations of member type and function templates. */
7537 }
7538 else if (name_attr != NULL
7539 && (is_type_die (c) || c->die_tag == DW_TAG_subprogram))
7540 {
7541 /* Use a shallow checksum for named nested types and member
7542 functions. */
7543 CHECKSUM_ULEB128 ('S');
7544 CHECKSUM_ULEB128 (c->die_tag);
7545 CHECKSUM_STRING (AT_string (name_attr));
7546 }
7547 else
7548 {
7549 /* Use a deep checksum for other children. */
7550 /* Mark this DIE so it gets processed when unmarking. */
7551 if (c->die_mark == 0)
7552 c->die_mark = -1;
7553 die_checksum_ordered (c, ctx, mark);
7554 }
7555 } while (c != die->die_child);
7556
7557 CHECKSUM_ULEB128 (0);
7558 }
7559
7560 /* Add a type name and tag to a hash. */
7561 static void
7562 die_odr_checksum (int tag, const char *name, md5_ctx *ctx)
7563 {
7564 CHECKSUM_ULEB128 (tag);
7565 CHECKSUM_STRING (name);
7566 }
7567
7568 #undef CHECKSUM
7569 #undef CHECKSUM_STRING
7570 #undef CHECKSUM_ATTR
7571 #undef CHECKSUM_LEB128
7572 #undef CHECKSUM_ULEB128
7573
7574 /* Generate the type signature for DIE. This is computed by generating an
7575 MD5 checksum over the DIE's tag, its relevant attributes, and its
7576 children. Attributes that are references to other DIEs are processed
7577 by recursion, using the MARK field to prevent infinite recursion.
7578 If the DIE is nested inside a namespace or another type, we also
7579 need to include that context in the signature. The lower 64 bits
7580 of the resulting MD5 checksum comprise the signature. */
7581
7582 static void
7583 generate_type_signature (dw_die_ref die, comdat_type_node *type_node)
7584 {
7585 int mark;
7586 const char *name;
7587 unsigned char checksum[16];
7588 struct md5_ctx ctx;
7589 dw_die_ref decl;
7590 dw_die_ref parent;
7591
7592 name = get_AT_string (die, DW_AT_name);
7593 decl = get_AT_ref (die, DW_AT_specification);
7594 parent = get_die_parent (die);
7595
7596 /* First, compute a signature for just the type name (and its surrounding
7597 context, if any. This is stored in the type unit DIE for link-time
7598 ODR (one-definition rule) checking. */
7599
7600 if (is_cxx () && name != NULL)
7601 {
7602 md5_init_ctx (&ctx);
7603
7604 /* Checksum the names of surrounding namespaces and structures. */
7605 if (parent != NULL)
7606 checksum_die_context (parent, &ctx);
7607
7608 /* Checksum the current DIE. */
7609 die_odr_checksum (die->die_tag, name, &ctx);
7610 md5_finish_ctx (&ctx, checksum);
7611
7612 add_AT_data8 (type_node->root_die, DW_AT_GNU_odr_signature, &checksum[8]);
7613 }
7614
7615 /* Next, compute the complete type signature. */
7616
7617 md5_init_ctx (&ctx);
7618 mark = 1;
7619 die->die_mark = mark;
7620
7621 /* Checksum the names of surrounding namespaces and structures. */
7622 if (parent != NULL)
7623 checksum_die_context (parent, &ctx);
7624
7625 /* Checksum the DIE and its children. */
7626 die_checksum_ordered (die, &ctx, &mark);
7627 unmark_all_dies (die);
7628 md5_finish_ctx (&ctx, checksum);
7629
7630 /* Store the signature in the type node and link the type DIE and the
7631 type node together. */
7632 memcpy (type_node->signature, &checksum[16 - DWARF_TYPE_SIGNATURE_SIZE],
7633 DWARF_TYPE_SIGNATURE_SIZE);
7634 die->comdat_type_p = true;
7635 die->die_id.die_type_node = type_node;
7636 type_node->type_die = die;
7637
7638 /* If the DIE is a specification, link its declaration to the type node
7639 as well. */
7640 if (decl != NULL)
7641 {
7642 decl->comdat_type_p = true;
7643 decl->die_id.die_type_node = type_node;
7644 }
7645 }
7646
7647 /* Do the location expressions look same? */
7648 static inline int
7649 same_loc_p (dw_loc_descr_ref loc1, dw_loc_descr_ref loc2, int *mark)
7650 {
7651 return loc1->dw_loc_opc == loc2->dw_loc_opc
7652 && same_dw_val_p (&loc1->dw_loc_oprnd1, &loc2->dw_loc_oprnd1, mark)
7653 && same_dw_val_p (&loc1->dw_loc_oprnd2, &loc2->dw_loc_oprnd2, mark);
7654 }
7655
7656 /* Do the values look the same? */
7657 static int
7658 same_dw_val_p (const dw_val_node *v1, const dw_val_node *v2, int *mark)
7659 {
7660 dw_loc_descr_ref loc1, loc2;
7661 rtx r1, r2;
7662
7663 if (v1->val_class != v2->val_class)
7664 return 0;
7665
7666 switch (v1->val_class)
7667 {
7668 case dw_val_class_const:
7669 case dw_val_class_const_implicit:
7670 return v1->v.val_int == v2->v.val_int;
7671 case dw_val_class_unsigned_const:
7672 case dw_val_class_unsigned_const_implicit:
7673 return v1->v.val_unsigned == v2->v.val_unsigned;
7674 case dw_val_class_const_double:
7675 return v1->v.val_double.high == v2->v.val_double.high
7676 && v1->v.val_double.low == v2->v.val_double.low;
7677 case dw_val_class_wide_int:
7678 return *v1->v.val_wide == *v2->v.val_wide;
7679 case dw_val_class_vec:
7680 if (v1->v.val_vec.length != v2->v.val_vec.length
7681 || v1->v.val_vec.elt_size != v2->v.val_vec.elt_size)
7682 return 0;
7683 if (memcmp (v1->v.val_vec.array, v2->v.val_vec.array,
7684 v1->v.val_vec.length * v1->v.val_vec.elt_size))
7685 return 0;
7686 return 1;
7687 case dw_val_class_flag:
7688 return v1->v.val_flag == v2->v.val_flag;
7689 case dw_val_class_str:
7690 return !strcmp (v1->v.val_str->str, v2->v.val_str->str);
7691
7692 case dw_val_class_addr:
7693 r1 = v1->v.val_addr;
7694 r2 = v2->v.val_addr;
7695 if (GET_CODE (r1) != GET_CODE (r2))
7696 return 0;
7697 return !rtx_equal_p (r1, r2);
7698
7699 case dw_val_class_offset:
7700 return v1->v.val_offset == v2->v.val_offset;
7701
7702 case dw_val_class_loc:
7703 for (loc1 = v1->v.val_loc, loc2 = v2->v.val_loc;
7704 loc1 && loc2;
7705 loc1 = loc1->dw_loc_next, loc2 = loc2->dw_loc_next)
7706 if (!same_loc_p (loc1, loc2, mark))
7707 return 0;
7708 return !loc1 && !loc2;
7709
7710 case dw_val_class_die_ref:
7711 return same_die_p (v1->v.val_die_ref.die, v2->v.val_die_ref.die, mark);
7712
7713 case dw_val_class_symview:
7714 return strcmp (v1->v.val_symbolic_view, v2->v.val_symbolic_view) == 0;
7715
7716 case dw_val_class_fde_ref:
7717 case dw_val_class_vms_delta:
7718 case dw_val_class_lbl_id:
7719 case dw_val_class_lineptr:
7720 case dw_val_class_macptr:
7721 case dw_val_class_loclistsptr:
7722 case dw_val_class_high_pc:
7723 return 1;
7724
7725 case dw_val_class_file:
7726 case dw_val_class_file_implicit:
7727 return v1->v.val_file == v2->v.val_file;
7728
7729 case dw_val_class_data8:
7730 return !memcmp (v1->v.val_data8, v2->v.val_data8, 8);
7731
7732 default:
7733 return 1;
7734 }
7735 }
7736
7737 /* Do the attributes look the same? */
7738
7739 static int
7740 same_attr_p (dw_attr_node *at1, dw_attr_node *at2, int *mark)
7741 {
7742 if (at1->dw_attr != at2->dw_attr)
7743 return 0;
7744
7745 /* We don't care that this was compiled with a different compiler
7746 snapshot; if the output is the same, that's what matters. */
7747 if (at1->dw_attr == DW_AT_producer)
7748 return 1;
7749
7750 return same_dw_val_p (&at1->dw_attr_val, &at2->dw_attr_val, mark);
7751 }
7752
7753 /* Do the dies look the same? */
7754
7755 static int
7756 same_die_p (dw_die_ref die1, dw_die_ref die2, int *mark)
7757 {
7758 dw_die_ref c1, c2;
7759 dw_attr_node *a1;
7760 unsigned ix;
7761
7762 /* To avoid infinite recursion. */
7763 if (die1->die_mark)
7764 return die1->die_mark == die2->die_mark;
7765 die1->die_mark = die2->die_mark = ++(*mark);
7766
7767 if (die1->die_tag != die2->die_tag)
7768 return 0;
7769
7770 if (vec_safe_length (die1->die_attr) != vec_safe_length (die2->die_attr))
7771 return 0;
7772
7773 FOR_EACH_VEC_SAFE_ELT (die1->die_attr, ix, a1)
7774 if (!same_attr_p (a1, &(*die2->die_attr)[ix], mark))
7775 return 0;
7776
7777 c1 = die1->die_child;
7778 c2 = die2->die_child;
7779 if (! c1)
7780 {
7781 if (c2)
7782 return 0;
7783 }
7784 else
7785 for (;;)
7786 {
7787 if (!same_die_p (c1, c2, mark))
7788 return 0;
7789 c1 = c1->die_sib;
7790 c2 = c2->die_sib;
7791 if (c1 == die1->die_child)
7792 {
7793 if (c2 == die2->die_child)
7794 break;
7795 else
7796 return 0;
7797 }
7798 }
7799
7800 return 1;
7801 }
7802
7803 /* Calculate the MD5 checksum of the compilation unit DIE UNIT_DIE and its
7804 children, and set die_symbol. */
7805
7806 static void
7807 compute_comp_unit_symbol (dw_die_ref unit_die)
7808 {
7809 const char *die_name = get_AT_string (unit_die, DW_AT_name);
7810 const char *base = die_name ? lbasename (die_name) : "anonymous";
7811 char *name = XALLOCAVEC (char, strlen (base) + 64);
7812 char *p;
7813 int i, mark;
7814 unsigned char checksum[16];
7815 struct md5_ctx ctx;
7816
7817 /* Compute the checksum of the DIE, then append part of it as hex digits to
7818 the name filename of the unit. */
7819
7820 md5_init_ctx (&ctx);
7821 mark = 0;
7822 die_checksum (unit_die, &ctx, &mark);
7823 unmark_all_dies (unit_die);
7824 md5_finish_ctx (&ctx, checksum);
7825
7826 /* When we this for comp_unit_die () we have a DW_AT_name that might
7827 not start with a letter but with anything valid for filenames and
7828 clean_symbol_name doesn't fix that up. Prepend 'g' if the first
7829 character is not a letter. */
7830 sprintf (name, "%s%s.", ISALPHA (*base) ? "" : "g", base);
7831 clean_symbol_name (name);
7832
7833 p = name + strlen (name);
7834 for (i = 0; i < 4; i++)
7835 {
7836 sprintf (p, "%.2x", checksum[i]);
7837 p += 2;
7838 }
7839
7840 unit_die->die_id.die_symbol = xstrdup (name);
7841 }
7842
7843 /* Returns nonzero if DIE represents a type, in the sense of TYPE_P. */
7844
7845 static int
7846 is_type_die (dw_die_ref die)
7847 {
7848 switch (die->die_tag)
7849 {
7850 case DW_TAG_array_type:
7851 case DW_TAG_class_type:
7852 case DW_TAG_interface_type:
7853 case DW_TAG_enumeration_type:
7854 case DW_TAG_pointer_type:
7855 case DW_TAG_reference_type:
7856 case DW_TAG_rvalue_reference_type:
7857 case DW_TAG_string_type:
7858 case DW_TAG_structure_type:
7859 case DW_TAG_subroutine_type:
7860 case DW_TAG_union_type:
7861 case DW_TAG_ptr_to_member_type:
7862 case DW_TAG_set_type:
7863 case DW_TAG_subrange_type:
7864 case DW_TAG_base_type:
7865 case DW_TAG_const_type:
7866 case DW_TAG_file_type:
7867 case DW_TAG_packed_type:
7868 case DW_TAG_volatile_type:
7869 case DW_TAG_typedef:
7870 return 1;
7871 default:
7872 return 0;
7873 }
7874 }
7875
7876 /* Returns 1 iff C is the sort of DIE that should go into a COMDAT CU.
7877 Basically, we want to choose the bits that are likely to be shared between
7878 compilations (types) and leave out the bits that are specific to individual
7879 compilations (functions). */
7880
7881 static int
7882 is_comdat_die (dw_die_ref c)
7883 {
7884 /* I think we want to leave base types and __vtbl_ptr_type in the main CU, as
7885 we do for stabs. The advantage is a greater likelihood of sharing between
7886 objects that don't include headers in the same order (and therefore would
7887 put the base types in a different comdat). jason 8/28/00 */
7888
7889 if (c->die_tag == DW_TAG_base_type)
7890 return 0;
7891
7892 if (c->die_tag == DW_TAG_pointer_type
7893 || c->die_tag == DW_TAG_reference_type
7894 || c->die_tag == DW_TAG_rvalue_reference_type
7895 || c->die_tag == DW_TAG_const_type
7896 || c->die_tag == DW_TAG_volatile_type)
7897 {
7898 dw_die_ref t = get_AT_ref (c, DW_AT_type);
7899
7900 return t ? is_comdat_die (t) : 0;
7901 }
7902
7903 return is_type_die (c);
7904 }
7905
7906 /* Returns true iff C is a compile-unit DIE. */
7907
7908 static inline bool
7909 is_cu_die (dw_die_ref c)
7910 {
7911 return c && (c->die_tag == DW_TAG_compile_unit
7912 || c->die_tag == DW_TAG_skeleton_unit);
7913 }
7914
7915 /* Returns true iff C is a unit DIE of some sort. */
7916
7917 static inline bool
7918 is_unit_die (dw_die_ref c)
7919 {
7920 return c && (c->die_tag == DW_TAG_compile_unit
7921 || c->die_tag == DW_TAG_partial_unit
7922 || c->die_tag == DW_TAG_type_unit
7923 || c->die_tag == DW_TAG_skeleton_unit);
7924 }
7925
7926 /* Returns true iff C is a namespace DIE. */
7927
7928 static inline bool
7929 is_namespace_die (dw_die_ref c)
7930 {
7931 return c && c->die_tag == DW_TAG_namespace;
7932 }
7933
7934 /* Returns true iff C is a class or structure DIE. */
7935
7936 static inline bool
7937 is_class_die (dw_die_ref c)
7938 {
7939 return c && (c->die_tag == DW_TAG_class_type
7940 || c->die_tag == DW_TAG_structure_type);
7941 }
7942
7943 /* Return non-zero if this DIE is a template parameter. */
7944
7945 static inline bool
7946 is_template_parameter (dw_die_ref die)
7947 {
7948 switch (die->die_tag)
7949 {
7950 case DW_TAG_template_type_param:
7951 case DW_TAG_template_value_param:
7952 case DW_TAG_GNU_template_template_param:
7953 case DW_TAG_GNU_template_parameter_pack:
7954 return true;
7955 default:
7956 return false;
7957 }
7958 }
7959
7960 /* Return non-zero if this DIE represents a template instantiation. */
7961
7962 static inline bool
7963 is_template_instantiation (dw_die_ref die)
7964 {
7965 dw_die_ref c;
7966
7967 if (!is_type_die (die) && die->die_tag != DW_TAG_subprogram)
7968 return false;
7969 FOR_EACH_CHILD (die, c, if (is_template_parameter (c)) return true);
7970 return false;
7971 }
7972
7973 static char *
7974 gen_internal_sym (const char *prefix)
7975 {
7976 char buf[MAX_ARTIFICIAL_LABEL_BYTES];
7977
7978 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, label_num++);
7979 return xstrdup (buf);
7980 }
7981
7982 /* Return non-zero if this DIE is a declaration. */
7983
7984 static int
7985 is_declaration_die (dw_die_ref die)
7986 {
7987 dw_attr_node *a;
7988 unsigned ix;
7989
7990 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7991 if (a->dw_attr == DW_AT_declaration)
7992 return 1;
7993
7994 return 0;
7995 }
7996
7997 /* Return non-zero if this DIE is nested inside a subprogram. */
7998
7999 static int
8000 is_nested_in_subprogram (dw_die_ref die)
8001 {
8002 dw_die_ref decl = get_AT_ref (die, DW_AT_specification);
8003
8004 if (decl == NULL)
8005 decl = die;
8006 return local_scope_p (decl);
8007 }
8008
8009 /* Return non-zero if this DIE contains a defining declaration of a
8010 subprogram. */
8011
8012 static int
8013 contains_subprogram_definition (dw_die_ref die)
8014 {
8015 dw_die_ref c;
8016
8017 if (die->die_tag == DW_TAG_subprogram && ! is_declaration_die (die))
8018 return 1;
8019 FOR_EACH_CHILD (die, c, if (contains_subprogram_definition (c)) return 1);
8020 return 0;
8021 }
8022
8023 /* Return non-zero if this is a type DIE that should be moved to a
8024 COMDAT .debug_types section or .debug_info section with DW_UT_*type
8025 unit type. */
8026
8027 static int
8028 should_move_die_to_comdat (dw_die_ref die)
8029 {
8030 switch (die->die_tag)
8031 {
8032 case DW_TAG_class_type:
8033 case DW_TAG_structure_type:
8034 case DW_TAG_enumeration_type:
8035 case DW_TAG_union_type:
8036 /* Don't move declarations, inlined instances, types nested in a
8037 subprogram, or types that contain subprogram definitions. */
8038 if (is_declaration_die (die)
8039 || get_AT (die, DW_AT_abstract_origin)
8040 || is_nested_in_subprogram (die)
8041 || contains_subprogram_definition (die))
8042 return 0;
8043 return 1;
8044 case DW_TAG_array_type:
8045 case DW_TAG_interface_type:
8046 case DW_TAG_pointer_type:
8047 case DW_TAG_reference_type:
8048 case DW_TAG_rvalue_reference_type:
8049 case DW_TAG_string_type:
8050 case DW_TAG_subroutine_type:
8051 case DW_TAG_ptr_to_member_type:
8052 case DW_TAG_set_type:
8053 case DW_TAG_subrange_type:
8054 case DW_TAG_base_type:
8055 case DW_TAG_const_type:
8056 case DW_TAG_file_type:
8057 case DW_TAG_packed_type:
8058 case DW_TAG_volatile_type:
8059 case DW_TAG_typedef:
8060 default:
8061 return 0;
8062 }
8063 }
8064
8065 /* Make a clone of DIE. */
8066
8067 static dw_die_ref
8068 clone_die (dw_die_ref die)
8069 {
8070 dw_die_ref clone = new_die_raw (die->die_tag);
8071 dw_attr_node *a;
8072 unsigned ix;
8073
8074 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8075 add_dwarf_attr (clone, a);
8076
8077 return clone;
8078 }
8079
8080 /* Make a clone of the tree rooted at DIE. */
8081
8082 static dw_die_ref
8083 clone_tree (dw_die_ref die)
8084 {
8085 dw_die_ref c;
8086 dw_die_ref clone = clone_die (die);
8087
8088 FOR_EACH_CHILD (die, c, add_child_die (clone, clone_tree (c)));
8089
8090 return clone;
8091 }
8092
8093 /* Make a clone of DIE as a declaration. */
8094
8095 static dw_die_ref
8096 clone_as_declaration (dw_die_ref die)
8097 {
8098 dw_die_ref clone;
8099 dw_die_ref decl;
8100 dw_attr_node *a;
8101 unsigned ix;
8102
8103 /* If the DIE is already a declaration, just clone it. */
8104 if (is_declaration_die (die))
8105 return clone_die (die);
8106
8107 /* If the DIE is a specification, just clone its declaration DIE. */
8108 decl = get_AT_ref (die, DW_AT_specification);
8109 if (decl != NULL)
8110 {
8111 clone = clone_die (decl);
8112 if (die->comdat_type_p)
8113 add_AT_die_ref (clone, DW_AT_signature, die);
8114 return clone;
8115 }
8116
8117 clone = new_die_raw (die->die_tag);
8118
8119 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8120 {
8121 /* We don't want to copy over all attributes.
8122 For example we don't want DW_AT_byte_size because otherwise we will no
8123 longer have a declaration and GDB will treat it as a definition. */
8124
8125 switch (a->dw_attr)
8126 {
8127 case DW_AT_abstract_origin:
8128 case DW_AT_artificial:
8129 case DW_AT_containing_type:
8130 case DW_AT_external:
8131 case DW_AT_name:
8132 case DW_AT_type:
8133 case DW_AT_virtuality:
8134 case DW_AT_linkage_name:
8135 case DW_AT_MIPS_linkage_name:
8136 add_dwarf_attr (clone, a);
8137 break;
8138 case DW_AT_byte_size:
8139 case DW_AT_alignment:
8140 default:
8141 break;
8142 }
8143 }
8144
8145 if (die->comdat_type_p)
8146 add_AT_die_ref (clone, DW_AT_signature, die);
8147
8148 add_AT_flag (clone, DW_AT_declaration, 1);
8149 return clone;
8150 }
8151
8152
8153 /* Structure to map a DIE in one CU to its copy in a comdat type unit. */
8154
8155 struct decl_table_entry
8156 {
8157 dw_die_ref orig;
8158 dw_die_ref copy;
8159 };
8160
8161 /* Helpers to manipulate hash table of copied declarations. */
8162
8163 /* Hashtable helpers. */
8164
8165 struct decl_table_entry_hasher : free_ptr_hash <decl_table_entry>
8166 {
8167 typedef die_struct *compare_type;
8168 static inline hashval_t hash (const decl_table_entry *);
8169 static inline bool equal (const decl_table_entry *, const die_struct *);
8170 };
8171
8172 inline hashval_t
8173 decl_table_entry_hasher::hash (const decl_table_entry *entry)
8174 {
8175 return htab_hash_pointer (entry->orig);
8176 }
8177
8178 inline bool
8179 decl_table_entry_hasher::equal (const decl_table_entry *entry1,
8180 const die_struct *entry2)
8181 {
8182 return entry1->orig == entry2;
8183 }
8184
8185 typedef hash_table<decl_table_entry_hasher> decl_hash_type;
8186
8187 /* Copy DIE and its ancestors, up to, but not including, the compile unit
8188 or type unit entry, to a new tree. Adds the new tree to UNIT and returns
8189 a pointer to the copy of DIE. If DECL_TABLE is provided, it is used
8190 to check if the ancestor has already been copied into UNIT. */
8191
8192 static dw_die_ref
8193 copy_ancestor_tree (dw_die_ref unit, dw_die_ref die,
8194 decl_hash_type *decl_table)
8195 {
8196 dw_die_ref parent = die->die_parent;
8197 dw_die_ref new_parent = unit;
8198 dw_die_ref copy;
8199 decl_table_entry **slot = NULL;
8200 struct decl_table_entry *entry = NULL;
8201
8202 if (decl_table)
8203 {
8204 /* Check if the entry has already been copied to UNIT. */
8205 slot = decl_table->find_slot_with_hash (die, htab_hash_pointer (die),
8206 INSERT);
8207 if (*slot != HTAB_EMPTY_ENTRY)
8208 {
8209 entry = *slot;
8210 return entry->copy;
8211 }
8212
8213 /* Record in DECL_TABLE that DIE has been copied to UNIT. */
8214 entry = XCNEW (struct decl_table_entry);
8215 entry->orig = die;
8216 entry->copy = NULL;
8217 *slot = entry;
8218 }
8219
8220 if (parent != NULL)
8221 {
8222 dw_die_ref spec = get_AT_ref (parent, DW_AT_specification);
8223 if (spec != NULL)
8224 parent = spec;
8225 if (!is_unit_die (parent))
8226 new_parent = copy_ancestor_tree (unit, parent, decl_table);
8227 }
8228
8229 copy = clone_as_declaration (die);
8230 add_child_die (new_parent, copy);
8231
8232 if (decl_table)
8233 {
8234 /* Record the pointer to the copy. */
8235 entry->copy = copy;
8236 }
8237
8238 return copy;
8239 }
8240 /* Copy the declaration context to the new type unit DIE. This includes
8241 any surrounding namespace or type declarations. If the DIE has an
8242 AT_specification attribute, it also includes attributes and children
8243 attached to the specification, and returns a pointer to the original
8244 parent of the declaration DIE. Returns NULL otherwise. */
8245
8246 static dw_die_ref
8247 copy_declaration_context (dw_die_ref unit, dw_die_ref die)
8248 {
8249 dw_die_ref decl;
8250 dw_die_ref new_decl;
8251 dw_die_ref orig_parent = NULL;
8252
8253 decl = get_AT_ref (die, DW_AT_specification);
8254 if (decl == NULL)
8255 decl = die;
8256 else
8257 {
8258 unsigned ix;
8259 dw_die_ref c;
8260 dw_attr_node *a;
8261
8262 /* The original DIE will be changed to a declaration, and must
8263 be moved to be a child of the original declaration DIE. */
8264 orig_parent = decl->die_parent;
8265
8266 /* Copy the type node pointer from the new DIE to the original
8267 declaration DIE so we can forward references later. */
8268 decl->comdat_type_p = true;
8269 decl->die_id.die_type_node = die->die_id.die_type_node;
8270
8271 remove_AT (die, DW_AT_specification);
8272
8273 FOR_EACH_VEC_SAFE_ELT (decl->die_attr, ix, a)
8274 {
8275 if (a->dw_attr != DW_AT_name
8276 && a->dw_attr != DW_AT_declaration
8277 && a->dw_attr != DW_AT_external)
8278 add_dwarf_attr (die, a);
8279 }
8280
8281 FOR_EACH_CHILD (decl, c, add_child_die (die, clone_tree (c)));
8282 }
8283
8284 if (decl->die_parent != NULL
8285 && !is_unit_die (decl->die_parent))
8286 {
8287 new_decl = copy_ancestor_tree (unit, decl, NULL);
8288 if (new_decl != NULL)
8289 {
8290 remove_AT (new_decl, DW_AT_signature);
8291 add_AT_specification (die, new_decl);
8292 }
8293 }
8294
8295 return orig_parent;
8296 }
8297
8298 /* Generate the skeleton ancestor tree for the given NODE, then clone
8299 the DIE and add the clone into the tree. */
8300
8301 static void
8302 generate_skeleton_ancestor_tree (skeleton_chain_node *node)
8303 {
8304 if (node->new_die != NULL)
8305 return;
8306
8307 node->new_die = clone_as_declaration (node->old_die);
8308
8309 if (node->parent != NULL)
8310 {
8311 generate_skeleton_ancestor_tree (node->parent);
8312 add_child_die (node->parent->new_die, node->new_die);
8313 }
8314 }
8315
8316 /* Generate a skeleton tree of DIEs containing any declarations that are
8317 found in the original tree. We traverse the tree looking for declaration
8318 DIEs, and construct the skeleton from the bottom up whenever we find one. */
8319
8320 static void
8321 generate_skeleton_bottom_up (skeleton_chain_node *parent)
8322 {
8323 skeleton_chain_node node;
8324 dw_die_ref c;
8325 dw_die_ref first;
8326 dw_die_ref prev = NULL;
8327 dw_die_ref next = NULL;
8328
8329 node.parent = parent;
8330
8331 first = c = parent->old_die->die_child;
8332 if (c)
8333 next = c->die_sib;
8334 if (c) do {
8335 if (prev == NULL || prev->die_sib == c)
8336 prev = c;
8337 c = next;
8338 next = (c == first ? NULL : c->die_sib);
8339 node.old_die = c;
8340 node.new_die = NULL;
8341 if (is_declaration_die (c))
8342 {
8343 if (is_template_instantiation (c))
8344 {
8345 /* Instantiated templates do not need to be cloned into the
8346 type unit. Just move the DIE and its children back to
8347 the skeleton tree (in the main CU). */
8348 remove_child_with_prev (c, prev);
8349 add_child_die (parent->new_die, c);
8350 c = prev;
8351 }
8352 else if (c->comdat_type_p)
8353 {
8354 /* This is the skeleton of earlier break_out_comdat_types
8355 type. Clone the existing DIE, but keep the children
8356 under the original (which is in the main CU). */
8357 dw_die_ref clone = clone_die (c);
8358
8359 replace_child (c, clone, prev);
8360 generate_skeleton_ancestor_tree (parent);
8361 add_child_die (parent->new_die, c);
8362 c = clone;
8363 continue;
8364 }
8365 else
8366 {
8367 /* Clone the existing DIE, move the original to the skeleton
8368 tree (which is in the main CU), and put the clone, with
8369 all the original's children, where the original came from
8370 (which is about to be moved to the type unit). */
8371 dw_die_ref clone = clone_die (c);
8372 move_all_children (c, clone);
8373
8374 /* If the original has a DW_AT_object_pointer attribute,
8375 it would now point to a child DIE just moved to the
8376 cloned tree, so we need to remove that attribute from
8377 the original. */
8378 remove_AT (c, DW_AT_object_pointer);
8379
8380 replace_child (c, clone, prev);
8381 generate_skeleton_ancestor_tree (parent);
8382 add_child_die (parent->new_die, c);
8383 node.old_die = clone;
8384 node.new_die = c;
8385 c = clone;
8386 }
8387 }
8388 generate_skeleton_bottom_up (&node);
8389 } while (next != NULL);
8390 }
8391
8392 /* Wrapper function for generate_skeleton_bottom_up. */
8393
8394 static dw_die_ref
8395 generate_skeleton (dw_die_ref die)
8396 {
8397 skeleton_chain_node node;
8398
8399 node.old_die = die;
8400 node.new_die = NULL;
8401 node.parent = NULL;
8402
8403 /* If this type definition is nested inside another type,
8404 and is not an instantiation of a template, always leave
8405 at least a declaration in its place. */
8406 if (die->die_parent != NULL
8407 && is_type_die (die->die_parent)
8408 && !is_template_instantiation (die))
8409 node.new_die = clone_as_declaration (die);
8410
8411 generate_skeleton_bottom_up (&node);
8412 return node.new_die;
8413 }
8414
8415 /* Remove the CHILD DIE from its parent, possibly replacing it with a cloned
8416 declaration. The original DIE is moved to a new compile unit so that
8417 existing references to it follow it to the new location. If any of the
8418 original DIE's descendants is a declaration, we need to replace the
8419 original DIE with a skeleton tree and move the declarations back into the
8420 skeleton tree. */
8421
8422 static dw_die_ref
8423 remove_child_or_replace_with_skeleton (dw_die_ref unit, dw_die_ref child,
8424 dw_die_ref prev)
8425 {
8426 dw_die_ref skeleton, orig_parent;
8427
8428 /* Copy the declaration context to the type unit DIE. If the returned
8429 ORIG_PARENT is not NULL, the skeleton needs to be added as a child of
8430 that DIE. */
8431 orig_parent = copy_declaration_context (unit, child);
8432
8433 skeleton = generate_skeleton (child);
8434 if (skeleton == NULL)
8435 remove_child_with_prev (child, prev);
8436 else
8437 {
8438 skeleton->comdat_type_p = true;
8439 skeleton->die_id.die_type_node = child->die_id.die_type_node;
8440
8441 /* If the original DIE was a specification, we need to put
8442 the skeleton under the parent DIE of the declaration.
8443 This leaves the original declaration in the tree, but
8444 it will be pruned later since there are no longer any
8445 references to it. */
8446 if (orig_parent != NULL)
8447 {
8448 remove_child_with_prev (child, prev);
8449 add_child_die (orig_parent, skeleton);
8450 }
8451 else
8452 replace_child (child, skeleton, prev);
8453 }
8454
8455 return skeleton;
8456 }
8457
8458 static void
8459 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8460 comdat_type_node *type_node,
8461 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs);
8462
8463 /* Helper for copy_dwarf_procs_ref_in_dies. Make a copy of the DIE DWARF
8464 procedure, put it under TYPE_NODE and return the copy. Continue looking for
8465 DWARF procedure references in the DW_AT_location attribute. */
8466
8467 static dw_die_ref
8468 copy_dwarf_procedure (dw_die_ref die,
8469 comdat_type_node *type_node,
8470 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8471 {
8472 gcc_assert (die->die_tag == DW_TAG_dwarf_procedure);
8473
8474 /* DWARF procedures are not supposed to have children... */
8475 gcc_assert (die->die_child == NULL);
8476
8477 /* ... and they are supposed to have only one attribute: DW_AT_location. */
8478 gcc_assert (vec_safe_length (die->die_attr) == 1
8479 && ((*die->die_attr)[0].dw_attr == DW_AT_location));
8480
8481 /* Do not copy more than once DWARF procedures. */
8482 bool existed;
8483 dw_die_ref &die_copy = copied_dwarf_procs.get_or_insert (die, &existed);
8484 if (existed)
8485 return die_copy;
8486
8487 die_copy = clone_die (die);
8488 add_child_die (type_node->root_die, die_copy);
8489 copy_dwarf_procs_ref_in_attrs (die_copy, type_node, copied_dwarf_procs);
8490 return die_copy;
8491 }
8492
8493 /* Helper for copy_dwarf_procs_ref_in_dies. Look for references to DWARF
8494 procedures in DIE's attributes. */
8495
8496 static void
8497 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8498 comdat_type_node *type_node,
8499 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8500 {
8501 dw_attr_node *a;
8502 unsigned i;
8503
8504 FOR_EACH_VEC_SAFE_ELT (die->die_attr, i, a)
8505 {
8506 dw_loc_descr_ref loc;
8507
8508 if (a->dw_attr_val.val_class != dw_val_class_loc)
8509 continue;
8510
8511 for (loc = a->dw_attr_val.v.val_loc; loc != NULL; loc = loc->dw_loc_next)
8512 {
8513 switch (loc->dw_loc_opc)
8514 {
8515 case DW_OP_call2:
8516 case DW_OP_call4:
8517 case DW_OP_call_ref:
8518 gcc_assert (loc->dw_loc_oprnd1.val_class
8519 == dw_val_class_die_ref);
8520 loc->dw_loc_oprnd1.v.val_die_ref.die
8521 = copy_dwarf_procedure (loc->dw_loc_oprnd1.v.val_die_ref.die,
8522 type_node,
8523 copied_dwarf_procs);
8524
8525 default:
8526 break;
8527 }
8528 }
8529 }
8530 }
8531
8532 /* Copy DWARF procedures that are referenced by the DIE tree to TREE_NODE and
8533 rewrite references to point to the copies.
8534
8535 References are looked for in DIE's attributes and recursively in all its
8536 children attributes that are location descriptions. COPIED_DWARF_PROCS is a
8537 mapping from old DWARF procedures to their copy. It is used not to copy
8538 twice the same DWARF procedure under TYPE_NODE. */
8539
8540 static void
8541 copy_dwarf_procs_ref_in_dies (dw_die_ref die,
8542 comdat_type_node *type_node,
8543 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8544 {
8545 dw_die_ref c;
8546
8547 copy_dwarf_procs_ref_in_attrs (die, type_node, copied_dwarf_procs);
8548 FOR_EACH_CHILD (die, c, copy_dwarf_procs_ref_in_dies (c,
8549 type_node,
8550 copied_dwarf_procs));
8551 }
8552
8553 /* Traverse the DIE and set up additional .debug_types or .debug_info
8554 DW_UT_*type sections for each type worthy of being placed in a COMDAT
8555 section. */
8556
8557 static void
8558 break_out_comdat_types (dw_die_ref die)
8559 {
8560 dw_die_ref c;
8561 dw_die_ref first;
8562 dw_die_ref prev = NULL;
8563 dw_die_ref next = NULL;
8564 dw_die_ref unit = NULL;
8565
8566 first = c = die->die_child;
8567 if (c)
8568 next = c->die_sib;
8569 if (c) do {
8570 if (prev == NULL || prev->die_sib == c)
8571 prev = c;
8572 c = next;
8573 next = (c == first ? NULL : c->die_sib);
8574 if (should_move_die_to_comdat (c))
8575 {
8576 dw_die_ref replacement;
8577 comdat_type_node *type_node;
8578
8579 /* Break out nested types into their own type units. */
8580 break_out_comdat_types (c);
8581
8582 /* Create a new type unit DIE as the root for the new tree, and
8583 add it to the list of comdat types. */
8584 unit = new_die (DW_TAG_type_unit, NULL, NULL);
8585 add_AT_unsigned (unit, DW_AT_language,
8586 get_AT_unsigned (comp_unit_die (), DW_AT_language));
8587 type_node = ggc_cleared_alloc<comdat_type_node> ();
8588 type_node->root_die = unit;
8589 type_node->next = comdat_type_list;
8590 comdat_type_list = type_node;
8591
8592 /* Generate the type signature. */
8593 generate_type_signature (c, type_node);
8594
8595 /* Copy the declaration context, attributes, and children of the
8596 declaration into the new type unit DIE, then remove this DIE
8597 from the main CU (or replace it with a skeleton if necessary). */
8598 replacement = remove_child_or_replace_with_skeleton (unit, c, prev);
8599 type_node->skeleton_die = replacement;
8600
8601 /* Add the DIE to the new compunit. */
8602 add_child_die (unit, c);
8603
8604 /* Types can reference DWARF procedures for type size or data location
8605 expressions. Calls in DWARF expressions cannot target procedures
8606 that are not in the same section. So we must copy DWARF procedures
8607 along with this type and then rewrite references to them. */
8608 hash_map<dw_die_ref, dw_die_ref> copied_dwarf_procs;
8609 copy_dwarf_procs_ref_in_dies (c, type_node, copied_dwarf_procs);
8610
8611 if (replacement != NULL)
8612 c = replacement;
8613 }
8614 else if (c->die_tag == DW_TAG_namespace
8615 || c->die_tag == DW_TAG_class_type
8616 || c->die_tag == DW_TAG_structure_type
8617 || c->die_tag == DW_TAG_union_type)
8618 {
8619 /* Look for nested types that can be broken out. */
8620 break_out_comdat_types (c);
8621 }
8622 } while (next != NULL);
8623 }
8624
8625 /* Like clone_tree, but copy DW_TAG_subprogram DIEs as declarations.
8626 Enter all the cloned children into the hash table decl_table. */
8627
8628 static dw_die_ref
8629 clone_tree_partial (dw_die_ref die, decl_hash_type *decl_table)
8630 {
8631 dw_die_ref c;
8632 dw_die_ref clone;
8633 struct decl_table_entry *entry;
8634 decl_table_entry **slot;
8635
8636 if (die->die_tag == DW_TAG_subprogram)
8637 clone = clone_as_declaration (die);
8638 else
8639 clone = clone_die (die);
8640
8641 slot = decl_table->find_slot_with_hash (die,
8642 htab_hash_pointer (die), INSERT);
8643
8644 /* Assert that DIE isn't in the hash table yet. If it would be there
8645 before, the ancestors would be necessarily there as well, therefore
8646 clone_tree_partial wouldn't be called. */
8647 gcc_assert (*slot == HTAB_EMPTY_ENTRY);
8648
8649 entry = XCNEW (struct decl_table_entry);
8650 entry->orig = die;
8651 entry->copy = clone;
8652 *slot = entry;
8653
8654 if (die->die_tag != DW_TAG_subprogram)
8655 FOR_EACH_CHILD (die, c,
8656 add_child_die (clone, clone_tree_partial (c, decl_table)));
8657
8658 return clone;
8659 }
8660
8661 /* Walk the DIE and its children, looking for references to incomplete
8662 or trivial types that are unmarked (i.e., that are not in the current
8663 type_unit). */
8664
8665 static void
8666 copy_decls_walk (dw_die_ref unit, dw_die_ref die, decl_hash_type *decl_table)
8667 {
8668 dw_die_ref c;
8669 dw_attr_node *a;
8670 unsigned ix;
8671
8672 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8673 {
8674 if (AT_class (a) == dw_val_class_die_ref)
8675 {
8676 dw_die_ref targ = AT_ref (a);
8677 decl_table_entry **slot;
8678 struct decl_table_entry *entry;
8679
8680 if (targ->die_mark != 0 || targ->comdat_type_p)
8681 continue;
8682
8683 slot = decl_table->find_slot_with_hash (targ,
8684 htab_hash_pointer (targ),
8685 INSERT);
8686
8687 if (*slot != HTAB_EMPTY_ENTRY)
8688 {
8689 /* TARG has already been copied, so we just need to
8690 modify the reference to point to the copy. */
8691 entry = *slot;
8692 a->dw_attr_val.v.val_die_ref.die = entry->copy;
8693 }
8694 else
8695 {
8696 dw_die_ref parent = unit;
8697 dw_die_ref copy = clone_die (targ);
8698
8699 /* Record in DECL_TABLE that TARG has been copied.
8700 Need to do this now, before the recursive call,
8701 because DECL_TABLE may be expanded and SLOT
8702 would no longer be a valid pointer. */
8703 entry = XCNEW (struct decl_table_entry);
8704 entry->orig = targ;
8705 entry->copy = copy;
8706 *slot = entry;
8707
8708 /* If TARG is not a declaration DIE, we need to copy its
8709 children. */
8710 if (!is_declaration_die (targ))
8711 {
8712 FOR_EACH_CHILD (
8713 targ, c,
8714 add_child_die (copy,
8715 clone_tree_partial (c, decl_table)));
8716 }
8717
8718 /* Make sure the cloned tree is marked as part of the
8719 type unit. */
8720 mark_dies (copy);
8721
8722 /* If TARG has surrounding context, copy its ancestor tree
8723 into the new type unit. */
8724 if (targ->die_parent != NULL
8725 && !is_unit_die (targ->die_parent))
8726 parent = copy_ancestor_tree (unit, targ->die_parent,
8727 decl_table);
8728
8729 add_child_die (parent, copy);
8730 a->dw_attr_val.v.val_die_ref.die = copy;
8731
8732 /* Make sure the newly-copied DIE is walked. If it was
8733 installed in a previously-added context, it won't
8734 get visited otherwise. */
8735 if (parent != unit)
8736 {
8737 /* Find the highest point of the newly-added tree,
8738 mark each node along the way, and walk from there. */
8739 parent->die_mark = 1;
8740 while (parent->die_parent
8741 && parent->die_parent->die_mark == 0)
8742 {
8743 parent = parent->die_parent;
8744 parent->die_mark = 1;
8745 }
8746 copy_decls_walk (unit, parent, decl_table);
8747 }
8748 }
8749 }
8750 }
8751
8752 FOR_EACH_CHILD (die, c, copy_decls_walk (unit, c, decl_table));
8753 }
8754
8755 /* Copy declarations for "unworthy" types into the new comdat section.
8756 Incomplete types, modified types, and certain other types aren't broken
8757 out into comdat sections of their own, so they don't have a signature,
8758 and we need to copy the declaration into the same section so that we
8759 don't have an external reference. */
8760
8761 static void
8762 copy_decls_for_unworthy_types (dw_die_ref unit)
8763 {
8764 mark_dies (unit);
8765 decl_hash_type decl_table (10);
8766 copy_decls_walk (unit, unit, &decl_table);
8767 unmark_dies (unit);
8768 }
8769
8770 /* Traverse the DIE and add a sibling attribute if it may have the
8771 effect of speeding up access to siblings. To save some space,
8772 avoid generating sibling attributes for DIE's without children. */
8773
8774 static void
8775 add_sibling_attributes (dw_die_ref die)
8776 {
8777 dw_die_ref c;
8778
8779 if (! die->die_child)
8780 return;
8781
8782 if (die->die_parent && die != die->die_parent->die_child)
8783 add_AT_die_ref (die, DW_AT_sibling, die->die_sib);
8784
8785 FOR_EACH_CHILD (die, c, add_sibling_attributes (c));
8786 }
8787
8788 /* Output all location lists for the DIE and its children. */
8789
8790 static void
8791 output_location_lists (dw_die_ref die)
8792 {
8793 dw_die_ref c;
8794 dw_attr_node *a;
8795 unsigned ix;
8796
8797 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8798 if (AT_class (a) == dw_val_class_loc_list)
8799 output_loc_list (AT_loc_list (a));
8800
8801 FOR_EACH_CHILD (die, c, output_location_lists (c));
8802 }
8803
8804 /* During assign_location_list_indexes and output_loclists_offset the
8805 current index, after it the number of assigned indexes (i.e. how
8806 large the .debug_loclists* offset table should be). */
8807 static unsigned int loc_list_idx;
8808
8809 /* Output all location list offsets for the DIE and its children. */
8810
8811 static void
8812 output_loclists_offsets (dw_die_ref die)
8813 {
8814 dw_die_ref c;
8815 dw_attr_node *a;
8816 unsigned ix;
8817
8818 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8819 if (AT_class (a) == dw_val_class_loc_list)
8820 {
8821 dw_loc_list_ref l = AT_loc_list (a);
8822 if (l->offset_emitted)
8823 continue;
8824 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l->ll_symbol,
8825 loc_section_label, NULL);
8826 gcc_assert (l->hash == loc_list_idx);
8827 loc_list_idx++;
8828 l->offset_emitted = true;
8829 }
8830
8831 FOR_EACH_CHILD (die, c, output_loclists_offsets (c));
8832 }
8833
8834 /* Recursively set indexes of location lists. */
8835
8836 static void
8837 assign_location_list_indexes (dw_die_ref die)
8838 {
8839 dw_die_ref c;
8840 dw_attr_node *a;
8841 unsigned ix;
8842
8843 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8844 if (AT_class (a) == dw_val_class_loc_list)
8845 {
8846 dw_loc_list_ref list = AT_loc_list (a);
8847 if (!list->num_assigned)
8848 {
8849 list->num_assigned = true;
8850 list->hash = loc_list_idx++;
8851 }
8852 }
8853
8854 FOR_EACH_CHILD (die, c, assign_location_list_indexes (c));
8855 }
8856
8857 /* We want to limit the number of external references, because they are
8858 larger than local references: a relocation takes multiple words, and
8859 even a sig8 reference is always eight bytes, whereas a local reference
8860 can be as small as one byte (though DW_FORM_ref is usually 4 in GCC).
8861 So if we encounter multiple external references to the same type DIE, we
8862 make a local typedef stub for it and redirect all references there.
8863
8864 This is the element of the hash table for keeping track of these
8865 references. */
8866
8867 struct external_ref
8868 {
8869 dw_die_ref type;
8870 dw_die_ref stub;
8871 unsigned n_refs;
8872 };
8873
8874 /* Hashtable helpers. */
8875
8876 struct external_ref_hasher : free_ptr_hash <external_ref>
8877 {
8878 static inline hashval_t hash (const external_ref *);
8879 static inline bool equal (const external_ref *, const external_ref *);
8880 };
8881
8882 inline hashval_t
8883 external_ref_hasher::hash (const external_ref *r)
8884 {
8885 dw_die_ref die = r->type;
8886 hashval_t h = 0;
8887
8888 /* We can't use the address of the DIE for hashing, because
8889 that will make the order of the stub DIEs non-deterministic. */
8890 if (! die->comdat_type_p)
8891 /* We have a symbol; use it to compute a hash. */
8892 h = htab_hash_string (die->die_id.die_symbol);
8893 else
8894 {
8895 /* We have a type signature; use a subset of the bits as the hash.
8896 The 8-byte signature is at least as large as hashval_t. */
8897 comdat_type_node *type_node = die->die_id.die_type_node;
8898 memcpy (&h, type_node->signature, sizeof (h));
8899 }
8900 return h;
8901 }
8902
8903 inline bool
8904 external_ref_hasher::equal (const external_ref *r1, const external_ref *r2)
8905 {
8906 return r1->type == r2->type;
8907 }
8908
8909 typedef hash_table<external_ref_hasher> external_ref_hash_type;
8910
8911 /* Return a pointer to the external_ref for references to DIE. */
8912
8913 static struct external_ref *
8914 lookup_external_ref (external_ref_hash_type *map, dw_die_ref die)
8915 {
8916 struct external_ref ref, *ref_p;
8917 external_ref **slot;
8918
8919 ref.type = die;
8920 slot = map->find_slot (&ref, INSERT);
8921 if (*slot != HTAB_EMPTY_ENTRY)
8922 return *slot;
8923
8924 ref_p = XCNEW (struct external_ref);
8925 ref_p->type = die;
8926 *slot = ref_p;
8927 return ref_p;
8928 }
8929
8930 /* Subroutine of optimize_external_refs, below.
8931
8932 If we see a type skeleton, record it as our stub. If we see external
8933 references, remember how many we've seen. */
8934
8935 static void
8936 optimize_external_refs_1 (dw_die_ref die, external_ref_hash_type *map)
8937 {
8938 dw_die_ref c;
8939 dw_attr_node *a;
8940 unsigned ix;
8941 struct external_ref *ref_p;
8942
8943 if (is_type_die (die)
8944 && (c = get_AT_ref (die, DW_AT_signature)))
8945 {
8946 /* This is a local skeleton; use it for local references. */
8947 ref_p = lookup_external_ref (map, c);
8948 ref_p->stub = die;
8949 }
8950
8951 /* Scan the DIE references, and remember any that refer to DIEs from
8952 other CUs (i.e. those which are not marked). */
8953 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8954 if (AT_class (a) == dw_val_class_die_ref
8955 && (c = AT_ref (a))->die_mark == 0
8956 && is_type_die (c))
8957 {
8958 ref_p = lookup_external_ref (map, c);
8959 ref_p->n_refs++;
8960 }
8961
8962 FOR_EACH_CHILD (die, c, optimize_external_refs_1 (c, map));
8963 }
8964
8965 /* htab_traverse callback function for optimize_external_refs, below. SLOT
8966 points to an external_ref, DATA is the CU we're processing. If we don't
8967 already have a local stub, and we have multiple refs, build a stub. */
8968
8969 int
8970 dwarf2_build_local_stub (external_ref **slot, dw_die_ref data)
8971 {
8972 struct external_ref *ref_p = *slot;
8973
8974 if (ref_p->stub == NULL && ref_p->n_refs > 1 && !dwarf_strict)
8975 {
8976 /* We have multiple references to this type, so build a small stub.
8977 Both of these forms are a bit dodgy from the perspective of the
8978 DWARF standard, since technically they should have names. */
8979 dw_die_ref cu = data;
8980 dw_die_ref type = ref_p->type;
8981 dw_die_ref stub = NULL;
8982
8983 if (type->comdat_type_p)
8984 {
8985 /* If we refer to this type via sig8, use AT_signature. */
8986 stub = new_die (type->die_tag, cu, NULL_TREE);
8987 add_AT_die_ref (stub, DW_AT_signature, type);
8988 }
8989 else
8990 {
8991 /* Otherwise, use a typedef with no name. */
8992 stub = new_die (DW_TAG_typedef, cu, NULL_TREE);
8993 add_AT_die_ref (stub, DW_AT_type, type);
8994 }
8995
8996 stub->die_mark++;
8997 ref_p->stub = stub;
8998 }
8999 return 1;
9000 }
9001
9002 /* DIE is a unit; look through all the DIE references to see if there are
9003 any external references to types, and if so, create local stubs for
9004 them which will be applied in build_abbrev_table. This is useful because
9005 references to local DIEs are smaller. */
9006
9007 static external_ref_hash_type *
9008 optimize_external_refs (dw_die_ref die)
9009 {
9010 external_ref_hash_type *map = new external_ref_hash_type (10);
9011 optimize_external_refs_1 (die, map);
9012 map->traverse <dw_die_ref, dwarf2_build_local_stub> (die);
9013 return map;
9014 }
9015
9016 /* The following 3 variables are temporaries that are computed only during the
9017 build_abbrev_table call and used and released during the following
9018 optimize_abbrev_table call. */
9019
9020 /* First abbrev_id that can be optimized based on usage. */
9021 static unsigned int abbrev_opt_start;
9022
9023 /* Maximum abbrev_id of a base type plus one (we can't optimize DIEs with
9024 abbrev_id smaller than this, because they must be already sized
9025 during build_abbrev_table). */
9026 static unsigned int abbrev_opt_base_type_end;
9027
9028 /* Vector of usage counts during build_abbrev_table. Indexed by
9029 abbrev_id - abbrev_opt_start. */
9030 static vec<unsigned int> abbrev_usage_count;
9031
9032 /* Vector of all DIEs added with die_abbrev >= abbrev_opt_start. */
9033 static vec<dw_die_ref> sorted_abbrev_dies;
9034
9035 /* The format of each DIE (and its attribute value pairs) is encoded in an
9036 abbreviation table. This routine builds the abbreviation table and assigns
9037 a unique abbreviation id for each abbreviation entry. The children of each
9038 die are visited recursively. */
9039
9040 static void
9041 build_abbrev_table (dw_die_ref die, external_ref_hash_type *extern_map)
9042 {
9043 unsigned int abbrev_id = 0;
9044 dw_die_ref c;
9045 dw_attr_node *a;
9046 unsigned ix;
9047 dw_die_ref abbrev;
9048
9049 /* Scan the DIE references, and replace any that refer to
9050 DIEs from other CUs (i.e. those which are not marked) with
9051 the local stubs we built in optimize_external_refs. */
9052 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9053 if (AT_class (a) == dw_val_class_die_ref
9054 && (c = AT_ref (a))->die_mark == 0)
9055 {
9056 struct external_ref *ref_p;
9057 gcc_assert (AT_ref (a)->comdat_type_p || AT_ref (a)->die_id.die_symbol);
9058
9059 ref_p = lookup_external_ref (extern_map, c);
9060 if (ref_p->stub && ref_p->stub != die)
9061 change_AT_die_ref (a, ref_p->stub);
9062 else
9063 /* We aren't changing this reference, so mark it external. */
9064 set_AT_ref_external (a, 1);
9065 }
9066
9067 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
9068 {
9069 dw_attr_node *die_a, *abbrev_a;
9070 unsigned ix;
9071 bool ok = true;
9072
9073 if (abbrev_id == 0)
9074 continue;
9075 if (abbrev->die_tag != die->die_tag)
9076 continue;
9077 if ((abbrev->die_child != NULL) != (die->die_child != NULL))
9078 continue;
9079
9080 if (vec_safe_length (abbrev->die_attr) != vec_safe_length (die->die_attr))
9081 continue;
9082
9083 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, die_a)
9084 {
9085 abbrev_a = &(*abbrev->die_attr)[ix];
9086 if ((abbrev_a->dw_attr != die_a->dw_attr)
9087 || (value_format (abbrev_a) != value_format (die_a)))
9088 {
9089 ok = false;
9090 break;
9091 }
9092 }
9093 if (ok)
9094 break;
9095 }
9096
9097 if (abbrev_id >= vec_safe_length (abbrev_die_table))
9098 {
9099 vec_safe_push (abbrev_die_table, die);
9100 if (abbrev_opt_start)
9101 abbrev_usage_count.safe_push (0);
9102 }
9103 if (abbrev_opt_start && abbrev_id >= abbrev_opt_start)
9104 {
9105 abbrev_usage_count[abbrev_id - abbrev_opt_start]++;
9106 sorted_abbrev_dies.safe_push (die);
9107 }
9108
9109 die->die_abbrev = abbrev_id;
9110 FOR_EACH_CHILD (die, c, build_abbrev_table (c, extern_map));
9111 }
9112
9113 /* Callback function for sorted_abbrev_dies vector sorting. We sort
9114 by die_abbrev's usage count, from the most commonly used
9115 abbreviation to the least. */
9116
9117 static int
9118 die_abbrev_cmp (const void *p1, const void *p2)
9119 {
9120 dw_die_ref die1 = *(const dw_die_ref *) p1;
9121 dw_die_ref die2 = *(const dw_die_ref *) p2;
9122
9123 gcc_checking_assert (die1->die_abbrev >= abbrev_opt_start);
9124 gcc_checking_assert (die2->die_abbrev >= abbrev_opt_start);
9125
9126 if (die1->die_abbrev >= abbrev_opt_base_type_end
9127 && die2->die_abbrev >= abbrev_opt_base_type_end)
9128 {
9129 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9130 > abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9131 return -1;
9132 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9133 < abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9134 return 1;
9135 }
9136
9137 /* Stabilize the sort. */
9138 if (die1->die_abbrev < die2->die_abbrev)
9139 return -1;
9140 if (die1->die_abbrev > die2->die_abbrev)
9141 return 1;
9142
9143 return 0;
9144 }
9145
9146 /* Convert dw_val_class_const and dw_val_class_unsigned_const class attributes
9147 of DIEs in between sorted_abbrev_dies[first_id] and abbrev_dies[end_id - 1]
9148 into dw_val_class_const_implicit or
9149 dw_val_class_unsigned_const_implicit. */
9150
9151 static void
9152 optimize_implicit_const (unsigned int first_id, unsigned int end,
9153 vec<bool> &implicit_consts)
9154 {
9155 /* It never makes sense if there is just one DIE using the abbreviation. */
9156 if (end < first_id + 2)
9157 return;
9158
9159 dw_attr_node *a;
9160 unsigned ix, i;
9161 dw_die_ref die = sorted_abbrev_dies[first_id];
9162 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9163 if (implicit_consts[ix])
9164 {
9165 enum dw_val_class new_class = dw_val_class_none;
9166 switch (AT_class (a))
9167 {
9168 case dw_val_class_unsigned_const:
9169 if ((HOST_WIDE_INT) AT_unsigned (a) < 0)
9170 continue;
9171
9172 /* The .debug_abbrev section will grow by
9173 size_of_sleb128 (AT_unsigned (a)) and we avoid the constants
9174 in all the DIEs using that abbreviation. */
9175 if (constant_size (AT_unsigned (a)) * (end - first_id)
9176 <= (unsigned) size_of_sleb128 (AT_unsigned (a)))
9177 continue;
9178
9179 new_class = dw_val_class_unsigned_const_implicit;
9180 break;
9181
9182 case dw_val_class_const:
9183 new_class = dw_val_class_const_implicit;
9184 break;
9185
9186 case dw_val_class_file:
9187 new_class = dw_val_class_file_implicit;
9188 break;
9189
9190 default:
9191 continue;
9192 }
9193 for (i = first_id; i < end; i++)
9194 (*sorted_abbrev_dies[i]->die_attr)[ix].dw_attr_val.val_class
9195 = new_class;
9196 }
9197 }
9198
9199 /* Attempt to optimize abbreviation table from abbrev_opt_start
9200 abbreviation above. */
9201
9202 static void
9203 optimize_abbrev_table (void)
9204 {
9205 if (abbrev_opt_start
9206 && vec_safe_length (abbrev_die_table) > abbrev_opt_start
9207 && (dwarf_version >= 5 || vec_safe_length (abbrev_die_table) > 127))
9208 {
9209 auto_vec<bool, 32> implicit_consts;
9210 sorted_abbrev_dies.qsort (die_abbrev_cmp);
9211
9212 unsigned int abbrev_id = abbrev_opt_start - 1;
9213 unsigned int first_id = ~0U;
9214 unsigned int last_abbrev_id = 0;
9215 unsigned int i;
9216 dw_die_ref die;
9217 if (abbrev_opt_base_type_end > abbrev_opt_start)
9218 abbrev_id = abbrev_opt_base_type_end - 1;
9219 /* Reassign abbreviation ids from abbrev_opt_start above, so that
9220 most commonly used abbreviations come first. */
9221 FOR_EACH_VEC_ELT (sorted_abbrev_dies, i, die)
9222 {
9223 dw_attr_node *a;
9224 unsigned ix;
9225
9226 /* If calc_base_type_die_sizes has been called, the CU and
9227 base types after it can't be optimized, because we've already
9228 calculated their DIE offsets. We've sorted them first. */
9229 if (die->die_abbrev < abbrev_opt_base_type_end)
9230 continue;
9231 if (die->die_abbrev != last_abbrev_id)
9232 {
9233 last_abbrev_id = die->die_abbrev;
9234 if (dwarf_version >= 5 && first_id != ~0U)
9235 optimize_implicit_const (first_id, i, implicit_consts);
9236 abbrev_id++;
9237 (*abbrev_die_table)[abbrev_id] = die;
9238 if (dwarf_version >= 5)
9239 {
9240 first_id = i;
9241 implicit_consts.truncate (0);
9242
9243 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9244 switch (AT_class (a))
9245 {
9246 case dw_val_class_const:
9247 case dw_val_class_unsigned_const:
9248 case dw_val_class_file:
9249 implicit_consts.safe_push (true);
9250 break;
9251 default:
9252 implicit_consts.safe_push (false);
9253 break;
9254 }
9255 }
9256 }
9257 else if (dwarf_version >= 5)
9258 {
9259 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9260 if (!implicit_consts[ix])
9261 continue;
9262 else
9263 {
9264 dw_attr_node *other_a
9265 = &(*(*abbrev_die_table)[abbrev_id]->die_attr)[ix];
9266 if (!dw_val_equal_p (&a->dw_attr_val,
9267 &other_a->dw_attr_val))
9268 implicit_consts[ix] = false;
9269 }
9270 }
9271 die->die_abbrev = abbrev_id;
9272 }
9273 gcc_assert (abbrev_id == vec_safe_length (abbrev_die_table) - 1);
9274 if (dwarf_version >= 5 && first_id != ~0U)
9275 optimize_implicit_const (first_id, i, implicit_consts);
9276 }
9277
9278 abbrev_opt_start = 0;
9279 abbrev_opt_base_type_end = 0;
9280 abbrev_usage_count.release ();
9281 sorted_abbrev_dies.release ();
9282 }
9283 \f
9284 /* Return the power-of-two number of bytes necessary to represent VALUE. */
9285
9286 static int
9287 constant_size (unsigned HOST_WIDE_INT value)
9288 {
9289 int log;
9290
9291 if (value == 0)
9292 log = 0;
9293 else
9294 log = floor_log2 (value);
9295
9296 log = log / 8;
9297 log = 1 << (floor_log2 (log) + 1);
9298
9299 return log;
9300 }
9301
9302 /* Return the size of a DIE as it is represented in the
9303 .debug_info section. */
9304
9305 static unsigned long
9306 size_of_die (dw_die_ref die)
9307 {
9308 unsigned long size = 0;
9309 dw_attr_node *a;
9310 unsigned ix;
9311 enum dwarf_form form;
9312
9313 size += size_of_uleb128 (die->die_abbrev);
9314 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9315 {
9316 switch (AT_class (a))
9317 {
9318 case dw_val_class_addr:
9319 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9320 {
9321 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9322 size += size_of_uleb128 (AT_index (a));
9323 }
9324 else
9325 size += DWARF2_ADDR_SIZE;
9326 break;
9327 case dw_val_class_offset:
9328 size += DWARF_OFFSET_SIZE;
9329 break;
9330 case dw_val_class_loc:
9331 {
9332 unsigned long lsize = size_of_locs (AT_loc (a));
9333
9334 /* Block length. */
9335 if (dwarf_version >= 4)
9336 size += size_of_uleb128 (lsize);
9337 else
9338 size += constant_size (lsize);
9339 size += lsize;
9340 }
9341 break;
9342 case dw_val_class_loc_list:
9343 case dw_val_class_view_list:
9344 if (dwarf_split_debug_info && dwarf_version >= 5)
9345 {
9346 gcc_assert (AT_loc_list (a)->num_assigned);
9347 size += size_of_uleb128 (AT_loc_list (a)->hash);
9348 }
9349 else
9350 size += DWARF_OFFSET_SIZE;
9351 break;
9352 case dw_val_class_range_list:
9353 if (value_format (a) == DW_FORM_rnglistx)
9354 {
9355 gcc_assert (rnglist_idx);
9356 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
9357 size += size_of_uleb128 (r->idx);
9358 }
9359 else
9360 size += DWARF_OFFSET_SIZE;
9361 break;
9362 case dw_val_class_const:
9363 size += size_of_sleb128 (AT_int (a));
9364 break;
9365 case dw_val_class_unsigned_const:
9366 {
9367 int csize = constant_size (AT_unsigned (a));
9368 if (dwarf_version == 3
9369 && a->dw_attr == DW_AT_data_member_location
9370 && csize >= 4)
9371 size += size_of_uleb128 (AT_unsigned (a));
9372 else
9373 size += csize;
9374 }
9375 break;
9376 case dw_val_class_symview:
9377 if (symview_upper_bound <= 0xff)
9378 size += 1;
9379 else if (symview_upper_bound <= 0xffff)
9380 size += 2;
9381 else if (symview_upper_bound <= 0xffffffff)
9382 size += 4;
9383 else
9384 size += 8;
9385 break;
9386 case dw_val_class_const_implicit:
9387 case dw_val_class_unsigned_const_implicit:
9388 case dw_val_class_file_implicit:
9389 /* These occupy no size in the DIE, just an extra sleb128 in
9390 .debug_abbrev. */
9391 break;
9392 case dw_val_class_const_double:
9393 size += HOST_BITS_PER_DOUBLE_INT / HOST_BITS_PER_CHAR;
9394 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
9395 size++; /* block */
9396 break;
9397 case dw_val_class_wide_int:
9398 size += (get_full_len (*a->dw_attr_val.v.val_wide)
9399 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
9400 if (get_full_len (*a->dw_attr_val.v.val_wide)
9401 * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
9402 size++; /* block */
9403 break;
9404 case dw_val_class_vec:
9405 size += constant_size (a->dw_attr_val.v.val_vec.length
9406 * a->dw_attr_val.v.val_vec.elt_size)
9407 + a->dw_attr_val.v.val_vec.length
9408 * a->dw_attr_val.v.val_vec.elt_size; /* block */
9409 break;
9410 case dw_val_class_flag:
9411 if (dwarf_version >= 4)
9412 /* Currently all add_AT_flag calls pass in 1 as last argument,
9413 so DW_FORM_flag_present can be used. If that ever changes,
9414 we'll need to use DW_FORM_flag and have some optimization
9415 in build_abbrev_table that will change those to
9416 DW_FORM_flag_present if it is set to 1 in all DIEs using
9417 the same abbrev entry. */
9418 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9419 else
9420 size += 1;
9421 break;
9422 case dw_val_class_die_ref:
9423 if (AT_ref_external (a))
9424 {
9425 /* In DWARF4, we use DW_FORM_ref_sig8; for earlier versions
9426 we use DW_FORM_ref_addr. In DWARF2, DW_FORM_ref_addr
9427 is sized by target address length, whereas in DWARF3
9428 it's always sized as an offset. */
9429 if (use_debug_types)
9430 size += DWARF_TYPE_SIGNATURE_SIZE;
9431 else if (dwarf_version == 2)
9432 size += DWARF2_ADDR_SIZE;
9433 else
9434 size += DWARF_OFFSET_SIZE;
9435 }
9436 else
9437 size += DWARF_OFFSET_SIZE;
9438 break;
9439 case dw_val_class_fde_ref:
9440 size += DWARF_OFFSET_SIZE;
9441 break;
9442 case dw_val_class_lbl_id:
9443 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9444 {
9445 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9446 size += size_of_uleb128 (AT_index (a));
9447 }
9448 else
9449 size += DWARF2_ADDR_SIZE;
9450 break;
9451 case dw_val_class_lineptr:
9452 case dw_val_class_macptr:
9453 case dw_val_class_loclistsptr:
9454 size += DWARF_OFFSET_SIZE;
9455 break;
9456 case dw_val_class_str:
9457 form = AT_string_form (a);
9458 if (form == DW_FORM_strp || form == DW_FORM_line_strp)
9459 size += DWARF_OFFSET_SIZE;
9460 else if (form == dwarf_FORM (DW_FORM_strx))
9461 size += size_of_uleb128 (AT_index (a));
9462 else
9463 size += strlen (a->dw_attr_val.v.val_str->str) + 1;
9464 break;
9465 case dw_val_class_file:
9466 size += constant_size (maybe_emit_file (a->dw_attr_val.v.val_file));
9467 break;
9468 case dw_val_class_data8:
9469 size += 8;
9470 break;
9471 case dw_val_class_vms_delta:
9472 size += DWARF_OFFSET_SIZE;
9473 break;
9474 case dw_val_class_high_pc:
9475 size += DWARF2_ADDR_SIZE;
9476 break;
9477 case dw_val_class_discr_value:
9478 size += size_of_discr_value (&a->dw_attr_val.v.val_discr_value);
9479 break;
9480 case dw_val_class_discr_list:
9481 {
9482 unsigned block_size = size_of_discr_list (AT_discr_list (a));
9483
9484 /* This is a block, so we have the block length and then its
9485 data. */
9486 size += constant_size (block_size) + block_size;
9487 }
9488 break;
9489 default:
9490 gcc_unreachable ();
9491 }
9492 }
9493
9494 return size;
9495 }
9496
9497 /* Size the debugging information associated with a given DIE. Visits the
9498 DIE's children recursively. Updates the global variable next_die_offset, on
9499 each time through. Uses the current value of next_die_offset to update the
9500 die_offset field in each DIE. */
9501
9502 static void
9503 calc_die_sizes (dw_die_ref die)
9504 {
9505 dw_die_ref c;
9506
9507 gcc_assert (die->die_offset == 0
9508 || (unsigned long int) die->die_offset == next_die_offset);
9509 die->die_offset = next_die_offset;
9510 next_die_offset += size_of_die (die);
9511
9512 FOR_EACH_CHILD (die, c, calc_die_sizes (c));
9513
9514 if (die->die_child != NULL)
9515 /* Count the null byte used to terminate sibling lists. */
9516 next_die_offset += 1;
9517 }
9518
9519 /* Size just the base type children at the start of the CU.
9520 This is needed because build_abbrev needs to size locs
9521 and sizing of type based stack ops needs to know die_offset
9522 values for the base types. */
9523
9524 static void
9525 calc_base_type_die_sizes (void)
9526 {
9527 unsigned long die_offset = (dwarf_split_debug_info
9528 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
9529 : DWARF_COMPILE_UNIT_HEADER_SIZE);
9530 unsigned int i;
9531 dw_die_ref base_type;
9532 #if ENABLE_ASSERT_CHECKING
9533 dw_die_ref prev = comp_unit_die ()->die_child;
9534 #endif
9535
9536 die_offset += size_of_die (comp_unit_die ());
9537 for (i = 0; base_types.iterate (i, &base_type); i++)
9538 {
9539 #if ENABLE_ASSERT_CHECKING
9540 gcc_assert (base_type->die_offset == 0
9541 && prev->die_sib == base_type
9542 && base_type->die_child == NULL
9543 && base_type->die_abbrev);
9544 prev = base_type;
9545 #endif
9546 if (abbrev_opt_start
9547 && base_type->die_abbrev >= abbrev_opt_base_type_end)
9548 abbrev_opt_base_type_end = base_type->die_abbrev + 1;
9549 base_type->die_offset = die_offset;
9550 die_offset += size_of_die (base_type);
9551 }
9552 }
9553
9554 /* Set the marks for a die and its children. We do this so
9555 that we know whether or not a reference needs to use FORM_ref_addr; only
9556 DIEs in the same CU will be marked. We used to clear out the offset
9557 and use that as the flag, but ran into ordering problems. */
9558
9559 static void
9560 mark_dies (dw_die_ref die)
9561 {
9562 dw_die_ref c;
9563
9564 gcc_assert (!die->die_mark);
9565
9566 die->die_mark = 1;
9567 FOR_EACH_CHILD (die, c, mark_dies (c));
9568 }
9569
9570 /* Clear the marks for a die and its children. */
9571
9572 static void
9573 unmark_dies (dw_die_ref die)
9574 {
9575 dw_die_ref c;
9576
9577 if (! use_debug_types)
9578 gcc_assert (die->die_mark);
9579
9580 die->die_mark = 0;
9581 FOR_EACH_CHILD (die, c, unmark_dies (c));
9582 }
9583
9584 /* Clear the marks for a die, its children and referred dies. */
9585
9586 static void
9587 unmark_all_dies (dw_die_ref die)
9588 {
9589 dw_die_ref c;
9590 dw_attr_node *a;
9591 unsigned ix;
9592
9593 if (!die->die_mark)
9594 return;
9595 die->die_mark = 0;
9596
9597 FOR_EACH_CHILD (die, c, unmark_all_dies (c));
9598
9599 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9600 if (AT_class (a) == dw_val_class_die_ref)
9601 unmark_all_dies (AT_ref (a));
9602 }
9603
9604 /* Calculate if the entry should appear in the final output file. It may be
9605 from a pruned a type. */
9606
9607 static bool
9608 include_pubname_in_output (vec<pubname_entry, va_gc> *table, pubname_entry *p)
9609 {
9610 /* By limiting gnu pubnames to definitions only, gold can generate a
9611 gdb index without entries for declarations, which don't include
9612 enough information to be useful. */
9613 if (debug_generate_pub_sections == 2 && is_declaration_die (p->die))
9614 return false;
9615
9616 if (table == pubname_table)
9617 {
9618 /* Enumerator names are part of the pubname table, but the
9619 parent DW_TAG_enumeration_type die may have been pruned.
9620 Don't output them if that is the case. */
9621 if (p->die->die_tag == DW_TAG_enumerator &&
9622 (p->die->die_parent == NULL
9623 || !p->die->die_parent->die_perennial_p))
9624 return false;
9625
9626 /* Everything else in the pubname table is included. */
9627 return true;
9628 }
9629
9630 /* The pubtypes table shouldn't include types that have been
9631 pruned. */
9632 return (p->die->die_offset != 0
9633 || !flag_eliminate_unused_debug_types);
9634 }
9635
9636 /* Return the size of the .debug_pubnames or .debug_pubtypes table
9637 generated for the compilation unit. */
9638
9639 static unsigned long
9640 size_of_pubnames (vec<pubname_entry, va_gc> *names)
9641 {
9642 unsigned long size;
9643 unsigned i;
9644 pubname_entry *p;
9645 int space_for_flags = (debug_generate_pub_sections == 2) ? 1 : 0;
9646
9647 size = DWARF_PUBNAMES_HEADER_SIZE;
9648 FOR_EACH_VEC_ELT (*names, i, p)
9649 if (include_pubname_in_output (names, p))
9650 size += strlen (p->name) + DWARF_OFFSET_SIZE + 1 + space_for_flags;
9651
9652 size += DWARF_OFFSET_SIZE;
9653 return size;
9654 }
9655
9656 /* Return the size of the information in the .debug_aranges section. */
9657
9658 static unsigned long
9659 size_of_aranges (void)
9660 {
9661 unsigned long size;
9662
9663 size = DWARF_ARANGES_HEADER_SIZE;
9664
9665 /* Count the address/length pair for this compilation unit. */
9666 if (text_section_used)
9667 size += 2 * DWARF2_ADDR_SIZE;
9668 if (cold_text_section_used)
9669 size += 2 * DWARF2_ADDR_SIZE;
9670 if (have_multiple_function_sections)
9671 {
9672 unsigned fde_idx;
9673 dw_fde_ref fde;
9674
9675 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
9676 {
9677 if (DECL_IGNORED_P (fde->decl))
9678 continue;
9679 if (!fde->in_std_section)
9680 size += 2 * DWARF2_ADDR_SIZE;
9681 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
9682 size += 2 * DWARF2_ADDR_SIZE;
9683 }
9684 }
9685
9686 /* Count the two zero words used to terminated the address range table. */
9687 size += 2 * DWARF2_ADDR_SIZE;
9688 return size;
9689 }
9690 \f
9691 /* Select the encoding of an attribute value. */
9692
9693 static enum dwarf_form
9694 value_format (dw_attr_node *a)
9695 {
9696 switch (AT_class (a))
9697 {
9698 case dw_val_class_addr:
9699 /* Only very few attributes allow DW_FORM_addr. */
9700 switch (a->dw_attr)
9701 {
9702 case DW_AT_low_pc:
9703 case DW_AT_high_pc:
9704 case DW_AT_entry_pc:
9705 case DW_AT_trampoline:
9706 return (AT_index (a) == NOT_INDEXED
9707 ? DW_FORM_addr : dwarf_FORM (DW_FORM_addrx));
9708 default:
9709 break;
9710 }
9711 switch (DWARF2_ADDR_SIZE)
9712 {
9713 case 1:
9714 return DW_FORM_data1;
9715 case 2:
9716 return DW_FORM_data2;
9717 case 4:
9718 return DW_FORM_data4;
9719 case 8:
9720 return DW_FORM_data8;
9721 default:
9722 gcc_unreachable ();
9723 }
9724 case dw_val_class_loc_list:
9725 case dw_val_class_view_list:
9726 if (dwarf_split_debug_info
9727 && dwarf_version >= 5
9728 && AT_loc_list (a)->num_assigned)
9729 return DW_FORM_loclistx;
9730 /* FALLTHRU */
9731 case dw_val_class_range_list:
9732 /* For range lists in DWARF 5, use DW_FORM_rnglistx from .debug_info.dwo
9733 but in .debug_info use DW_FORM_sec_offset, which is shorter if we
9734 care about sizes of .debug* sections in shared libraries and
9735 executables and don't take into account relocations that affect just
9736 relocatable objects - for DW_FORM_rnglistx we'd have to emit offset
9737 table in the .debug_rnglists section. */
9738 if (dwarf_split_debug_info
9739 && dwarf_version >= 5
9740 && AT_class (a) == dw_val_class_range_list
9741 && rnglist_idx
9742 && a->dw_attr_val.val_entry != RELOCATED_OFFSET)
9743 return DW_FORM_rnglistx;
9744 if (dwarf_version >= 4)
9745 return DW_FORM_sec_offset;
9746 /* FALLTHRU */
9747 case dw_val_class_vms_delta:
9748 case dw_val_class_offset:
9749 switch (DWARF_OFFSET_SIZE)
9750 {
9751 case 4:
9752 return DW_FORM_data4;
9753 case 8:
9754 return DW_FORM_data8;
9755 default:
9756 gcc_unreachable ();
9757 }
9758 case dw_val_class_loc:
9759 if (dwarf_version >= 4)
9760 return DW_FORM_exprloc;
9761 switch (constant_size (size_of_locs (AT_loc (a))))
9762 {
9763 case 1:
9764 return DW_FORM_block1;
9765 case 2:
9766 return DW_FORM_block2;
9767 case 4:
9768 return DW_FORM_block4;
9769 default:
9770 gcc_unreachable ();
9771 }
9772 case dw_val_class_const:
9773 return DW_FORM_sdata;
9774 case dw_val_class_unsigned_const:
9775 switch (constant_size (AT_unsigned (a)))
9776 {
9777 case 1:
9778 return DW_FORM_data1;
9779 case 2:
9780 return DW_FORM_data2;
9781 case 4:
9782 /* In DWARF3 DW_AT_data_member_location with
9783 DW_FORM_data4 or DW_FORM_data8 is a loclistptr, not
9784 constant, so we need to use DW_FORM_udata if we need
9785 a large constant. */
9786 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9787 return DW_FORM_udata;
9788 return DW_FORM_data4;
9789 case 8:
9790 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9791 return DW_FORM_udata;
9792 return DW_FORM_data8;
9793 default:
9794 gcc_unreachable ();
9795 }
9796 case dw_val_class_const_implicit:
9797 case dw_val_class_unsigned_const_implicit:
9798 case dw_val_class_file_implicit:
9799 return DW_FORM_implicit_const;
9800 case dw_val_class_const_double:
9801 switch (HOST_BITS_PER_WIDE_INT)
9802 {
9803 case 8:
9804 return DW_FORM_data2;
9805 case 16:
9806 return DW_FORM_data4;
9807 case 32:
9808 return DW_FORM_data8;
9809 case 64:
9810 if (dwarf_version >= 5)
9811 return DW_FORM_data16;
9812 /* FALLTHRU */
9813 default:
9814 return DW_FORM_block1;
9815 }
9816 case dw_val_class_wide_int:
9817 switch (get_full_len (*a->dw_attr_val.v.val_wide) * HOST_BITS_PER_WIDE_INT)
9818 {
9819 case 8:
9820 return DW_FORM_data1;
9821 case 16:
9822 return DW_FORM_data2;
9823 case 32:
9824 return DW_FORM_data4;
9825 case 64:
9826 return DW_FORM_data8;
9827 case 128:
9828 if (dwarf_version >= 5)
9829 return DW_FORM_data16;
9830 /* FALLTHRU */
9831 default:
9832 return DW_FORM_block1;
9833 }
9834 case dw_val_class_symview:
9835 /* ??? We might use uleb128, but then we'd have to compute
9836 .debug_info offsets in the assembler. */
9837 if (symview_upper_bound <= 0xff)
9838 return DW_FORM_data1;
9839 else if (symview_upper_bound <= 0xffff)
9840 return DW_FORM_data2;
9841 else if (symview_upper_bound <= 0xffffffff)
9842 return DW_FORM_data4;
9843 else
9844 return DW_FORM_data8;
9845 case dw_val_class_vec:
9846 switch (constant_size (a->dw_attr_val.v.val_vec.length
9847 * a->dw_attr_val.v.val_vec.elt_size))
9848 {
9849 case 1:
9850 return DW_FORM_block1;
9851 case 2:
9852 return DW_FORM_block2;
9853 case 4:
9854 return DW_FORM_block4;
9855 default:
9856 gcc_unreachable ();
9857 }
9858 case dw_val_class_flag:
9859 if (dwarf_version >= 4)
9860 {
9861 /* Currently all add_AT_flag calls pass in 1 as last argument,
9862 so DW_FORM_flag_present can be used. If that ever changes,
9863 we'll need to use DW_FORM_flag and have some optimization
9864 in build_abbrev_table that will change those to
9865 DW_FORM_flag_present if it is set to 1 in all DIEs using
9866 the same abbrev entry. */
9867 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9868 return DW_FORM_flag_present;
9869 }
9870 return DW_FORM_flag;
9871 case dw_val_class_die_ref:
9872 if (AT_ref_external (a))
9873 return use_debug_types ? DW_FORM_ref_sig8 : DW_FORM_ref_addr;
9874 else
9875 return DW_FORM_ref;
9876 case dw_val_class_fde_ref:
9877 return DW_FORM_data;
9878 case dw_val_class_lbl_id:
9879 return (AT_index (a) == NOT_INDEXED
9880 ? DW_FORM_addr : dwarf_FORM (DW_FORM_addrx));
9881 case dw_val_class_lineptr:
9882 case dw_val_class_macptr:
9883 case dw_val_class_loclistsptr:
9884 return dwarf_version >= 4 ? DW_FORM_sec_offset : DW_FORM_data;
9885 case dw_val_class_str:
9886 return AT_string_form (a);
9887 case dw_val_class_file:
9888 switch (constant_size (maybe_emit_file (a->dw_attr_val.v.val_file)))
9889 {
9890 case 1:
9891 return DW_FORM_data1;
9892 case 2:
9893 return DW_FORM_data2;
9894 case 4:
9895 return DW_FORM_data4;
9896 default:
9897 gcc_unreachable ();
9898 }
9899
9900 case dw_val_class_data8:
9901 return DW_FORM_data8;
9902
9903 case dw_val_class_high_pc:
9904 switch (DWARF2_ADDR_SIZE)
9905 {
9906 case 1:
9907 return DW_FORM_data1;
9908 case 2:
9909 return DW_FORM_data2;
9910 case 4:
9911 return DW_FORM_data4;
9912 case 8:
9913 return DW_FORM_data8;
9914 default:
9915 gcc_unreachable ();
9916 }
9917
9918 case dw_val_class_discr_value:
9919 return (a->dw_attr_val.v.val_discr_value.pos
9920 ? DW_FORM_udata
9921 : DW_FORM_sdata);
9922 case dw_val_class_discr_list:
9923 switch (constant_size (size_of_discr_list (AT_discr_list (a))))
9924 {
9925 case 1:
9926 return DW_FORM_block1;
9927 case 2:
9928 return DW_FORM_block2;
9929 case 4:
9930 return DW_FORM_block4;
9931 default:
9932 gcc_unreachable ();
9933 }
9934
9935 default:
9936 gcc_unreachable ();
9937 }
9938 }
9939
9940 /* Output the encoding of an attribute value. */
9941
9942 static void
9943 output_value_format (dw_attr_node *a)
9944 {
9945 enum dwarf_form form = value_format (a);
9946
9947 dw2_asm_output_data_uleb128 (form, "(%s)", dwarf_form_name (form));
9948 }
9949
9950 /* Given a die and id, produce the appropriate abbreviations. */
9951
9952 static void
9953 output_die_abbrevs (unsigned long abbrev_id, dw_die_ref abbrev)
9954 {
9955 unsigned ix;
9956 dw_attr_node *a_attr;
9957
9958 dw2_asm_output_data_uleb128 (abbrev_id, "(abbrev code)");
9959 dw2_asm_output_data_uleb128 (abbrev->die_tag, "(TAG: %s)",
9960 dwarf_tag_name (abbrev->die_tag));
9961
9962 if (abbrev->die_child != NULL)
9963 dw2_asm_output_data (1, DW_children_yes, "DW_children_yes");
9964 else
9965 dw2_asm_output_data (1, DW_children_no, "DW_children_no");
9966
9967 for (ix = 0; vec_safe_iterate (abbrev->die_attr, ix, &a_attr); ix++)
9968 {
9969 dw2_asm_output_data_uleb128 (a_attr->dw_attr, "(%s)",
9970 dwarf_attr_name (a_attr->dw_attr));
9971 output_value_format (a_attr);
9972 if (value_format (a_attr) == DW_FORM_implicit_const)
9973 {
9974 if (AT_class (a_attr) == dw_val_class_file_implicit)
9975 {
9976 int f = maybe_emit_file (a_attr->dw_attr_val.v.val_file);
9977 const char *filename = a_attr->dw_attr_val.v.val_file->filename;
9978 dw2_asm_output_data_sleb128 (f, "(%s)", filename);
9979 }
9980 else
9981 dw2_asm_output_data_sleb128 (a_attr->dw_attr_val.v.val_int, NULL);
9982 }
9983 }
9984
9985 dw2_asm_output_data (1, 0, NULL);
9986 dw2_asm_output_data (1, 0, NULL);
9987 }
9988
9989
9990 /* Output the .debug_abbrev section which defines the DIE abbreviation
9991 table. */
9992
9993 static void
9994 output_abbrev_section (void)
9995 {
9996 unsigned int abbrev_id;
9997 dw_die_ref abbrev;
9998
9999 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
10000 if (abbrev_id != 0)
10001 output_die_abbrevs (abbrev_id, abbrev);
10002
10003 /* Terminate the table. */
10004 dw2_asm_output_data (1, 0, NULL);
10005 }
10006
10007 /* Return a new location list, given the begin and end range, and the
10008 expression. */
10009
10010 static inline dw_loc_list_ref
10011 new_loc_list (dw_loc_descr_ref expr, const char *begin, var_loc_view vbegin,
10012 const char *end, var_loc_view vend,
10013 const char *section)
10014 {
10015 dw_loc_list_ref retlist = ggc_cleared_alloc<dw_loc_list_node> ();
10016
10017 retlist->begin = begin;
10018 retlist->begin_entry = NULL;
10019 retlist->end = end;
10020 retlist->expr = expr;
10021 retlist->section = section;
10022 retlist->vbegin = vbegin;
10023 retlist->vend = vend;
10024
10025 return retlist;
10026 }
10027
10028 /* Return true iff there's any nonzero view number in the loc list. */
10029
10030 static bool
10031 loc_list_has_views (dw_loc_list_ref list)
10032 {
10033 if (!debug_variable_location_views)
10034 return false;
10035
10036 for (dw_loc_list_ref loc = list;
10037 loc != NULL; loc = loc->dw_loc_next)
10038 if (!ZERO_VIEW_P (loc->vbegin) || !ZERO_VIEW_P (loc->vend))
10039 return true;
10040
10041 return false;
10042 }
10043
10044 /* Generate a new internal symbol for this location list node, if it
10045 hasn't got one yet. */
10046
10047 static inline void
10048 gen_llsym (dw_loc_list_ref list)
10049 {
10050 gcc_assert (!list->ll_symbol);
10051 list->ll_symbol = gen_internal_sym ("LLST");
10052
10053 if (!loc_list_has_views (list))
10054 return;
10055
10056 if (dwarf2out_locviews_in_attribute ())
10057 {
10058 /* Use the same label_num for the view list. */
10059 label_num--;
10060 list->vl_symbol = gen_internal_sym ("LVUS");
10061 }
10062 else
10063 list->vl_symbol = list->ll_symbol;
10064 }
10065
10066 /* Generate a symbol for the list, but only if we really want to emit
10067 it as a list. */
10068
10069 static inline void
10070 maybe_gen_llsym (dw_loc_list_ref list)
10071 {
10072 if (!list || (!list->dw_loc_next && !loc_list_has_views (list)))
10073 return;
10074
10075 gen_llsym (list);
10076 }
10077
10078 /* Determine whether or not to skip loc_list entry CURR. If SIZEP is
10079 NULL, don't consider size of the location expression. If we're not
10080 to skip it, and SIZEP is non-null, store the size of CURR->expr's
10081 representation in *SIZEP. */
10082
10083 static bool
10084 skip_loc_list_entry (dw_loc_list_ref curr, unsigned long *sizep = NULL)
10085 {
10086 /* Don't output an entry that starts and ends at the same address. */
10087 if (strcmp (curr->begin, curr->end) == 0
10088 && curr->vbegin == curr->vend && !curr->force)
10089 return true;
10090
10091 if (!sizep)
10092 return false;
10093
10094 unsigned long size = size_of_locs (curr->expr);
10095
10096 /* If the expression is too large, drop it on the floor. We could
10097 perhaps put it into DW_TAG_dwarf_procedure and refer to that
10098 in the expression, but >= 64KB expressions for a single value
10099 in a single range are unlikely very useful. */
10100 if (dwarf_version < 5 && size > 0xffff)
10101 return true;
10102
10103 *sizep = size;
10104
10105 return false;
10106 }
10107
10108 /* Output a view pair loclist entry for CURR, if it requires one. */
10109
10110 static void
10111 dwarf2out_maybe_output_loclist_view_pair (dw_loc_list_ref curr)
10112 {
10113 if (!dwarf2out_locviews_in_loclist ())
10114 return;
10115
10116 if (ZERO_VIEW_P (curr->vbegin) && ZERO_VIEW_P (curr->vend))
10117 return;
10118
10119 #ifdef DW_LLE_view_pair
10120 dw2_asm_output_data (1, DW_LLE_view_pair, "DW_LLE_view_pair");
10121
10122 if (dwarf2out_as_locview_support)
10123 {
10124 if (ZERO_VIEW_P (curr->vbegin))
10125 dw2_asm_output_data_uleb128 (0, "Location view begin");
10126 else
10127 {
10128 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10129 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10130 dw2_asm_output_symname_uleb128 (label, "Location view begin");
10131 }
10132
10133 if (ZERO_VIEW_P (curr->vend))
10134 dw2_asm_output_data_uleb128 (0, "Location view end");
10135 else
10136 {
10137 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10138 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10139 dw2_asm_output_symname_uleb128 (label, "Location view end");
10140 }
10141 }
10142 else
10143 {
10144 dw2_asm_output_data_uleb128 (curr->vbegin, "Location view begin");
10145 dw2_asm_output_data_uleb128 (curr->vend, "Location view end");
10146 }
10147 #endif /* DW_LLE_view_pair */
10148
10149 return;
10150 }
10151
10152 /* Output the location list given to us. */
10153
10154 static void
10155 output_loc_list (dw_loc_list_ref list_head)
10156 {
10157 int vcount = 0, lcount = 0;
10158
10159 if (list_head->emitted)
10160 return;
10161 list_head->emitted = true;
10162
10163 if (list_head->vl_symbol && dwarf2out_locviews_in_attribute ())
10164 {
10165 ASM_OUTPUT_LABEL (asm_out_file, list_head->vl_symbol);
10166
10167 for (dw_loc_list_ref curr = list_head; curr != NULL;
10168 curr = curr->dw_loc_next)
10169 {
10170 unsigned long size;
10171
10172 if (skip_loc_list_entry (curr, &size))
10173 continue;
10174
10175 vcount++;
10176
10177 /* ?? dwarf_split_debug_info? */
10178 if (dwarf2out_as_locview_support)
10179 {
10180 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10181
10182 if (!ZERO_VIEW_P (curr->vbegin))
10183 {
10184 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10185 dw2_asm_output_symname_uleb128 (label,
10186 "View list begin (%s)",
10187 list_head->vl_symbol);
10188 }
10189 else
10190 dw2_asm_output_data_uleb128 (0,
10191 "View list begin (%s)",
10192 list_head->vl_symbol);
10193
10194 if (!ZERO_VIEW_P (curr->vend))
10195 {
10196 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10197 dw2_asm_output_symname_uleb128 (label,
10198 "View list end (%s)",
10199 list_head->vl_symbol);
10200 }
10201 else
10202 dw2_asm_output_data_uleb128 (0,
10203 "View list end (%s)",
10204 list_head->vl_symbol);
10205 }
10206 else
10207 {
10208 dw2_asm_output_data_uleb128 (curr->vbegin,
10209 "View list begin (%s)",
10210 list_head->vl_symbol);
10211 dw2_asm_output_data_uleb128 (curr->vend,
10212 "View list end (%s)",
10213 list_head->vl_symbol);
10214 }
10215 }
10216 }
10217
10218 ASM_OUTPUT_LABEL (asm_out_file, list_head->ll_symbol);
10219
10220 const char *last_section = NULL;
10221 const char *base_label = NULL;
10222
10223 /* Walk the location list, and output each range + expression. */
10224 for (dw_loc_list_ref curr = list_head; curr != NULL;
10225 curr = curr->dw_loc_next)
10226 {
10227 unsigned long size;
10228
10229 /* Skip this entry? If we skip it here, we must skip it in the
10230 view list above as well. */
10231 if (skip_loc_list_entry (curr, &size))
10232 continue;
10233
10234 lcount++;
10235
10236 if (dwarf_version >= 5)
10237 {
10238 if (dwarf_split_debug_info)
10239 {
10240 dwarf2out_maybe_output_loclist_view_pair (curr);
10241 /* For -gsplit-dwarf, emit DW_LLE_starx_length, which has
10242 uleb128 index into .debug_addr and uleb128 length. */
10243 dw2_asm_output_data (1, DW_LLE_startx_length,
10244 "DW_LLE_startx_length (%s)",
10245 list_head->ll_symbol);
10246 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10247 "Location list range start index "
10248 "(%s)", curr->begin);
10249 /* FIXME: This will ICE ifndef HAVE_AS_LEB128.
10250 For that case we probably need to emit DW_LLE_startx_endx,
10251 but we'd need 2 .debug_addr entries rather than just one. */
10252 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10253 "Location list length (%s)",
10254 list_head->ll_symbol);
10255 }
10256 else if (!have_multiple_function_sections && HAVE_AS_LEB128)
10257 {
10258 dwarf2out_maybe_output_loclist_view_pair (curr);
10259 /* If all code is in .text section, the base address is
10260 already provided by the CU attributes. Use
10261 DW_LLE_offset_pair where both addresses are uleb128 encoded
10262 offsets against that base. */
10263 dw2_asm_output_data (1, DW_LLE_offset_pair,
10264 "DW_LLE_offset_pair (%s)",
10265 list_head->ll_symbol);
10266 dw2_asm_output_delta_uleb128 (curr->begin, curr->section,
10267 "Location list begin address (%s)",
10268 list_head->ll_symbol);
10269 dw2_asm_output_delta_uleb128 (curr->end, curr->section,
10270 "Location list end address (%s)",
10271 list_head->ll_symbol);
10272 }
10273 else if (HAVE_AS_LEB128)
10274 {
10275 /* Otherwise, find out how many consecutive entries could share
10276 the same base entry. If just one, emit DW_LLE_start_length,
10277 otherwise emit DW_LLE_base_address for the base address
10278 followed by a series of DW_LLE_offset_pair. */
10279 if (last_section == NULL || curr->section != last_section)
10280 {
10281 dw_loc_list_ref curr2;
10282 for (curr2 = curr->dw_loc_next; curr2 != NULL;
10283 curr2 = curr2->dw_loc_next)
10284 {
10285 if (strcmp (curr2->begin, curr2->end) == 0
10286 && !curr2->force)
10287 continue;
10288 break;
10289 }
10290 if (curr2 == NULL || curr->section != curr2->section)
10291 last_section = NULL;
10292 else
10293 {
10294 last_section = curr->section;
10295 base_label = curr->begin;
10296 dw2_asm_output_data (1, DW_LLE_base_address,
10297 "DW_LLE_base_address (%s)",
10298 list_head->ll_symbol);
10299 dw2_asm_output_addr (DWARF2_ADDR_SIZE, base_label,
10300 "Base address (%s)",
10301 list_head->ll_symbol);
10302 }
10303 }
10304 /* Only one entry with the same base address. Use
10305 DW_LLE_start_length with absolute address and uleb128
10306 length. */
10307 if (last_section == NULL)
10308 {
10309 dwarf2out_maybe_output_loclist_view_pair (curr);
10310 dw2_asm_output_data (1, DW_LLE_start_length,
10311 "DW_LLE_start_length (%s)",
10312 list_head->ll_symbol);
10313 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10314 "Location list begin address (%s)",
10315 list_head->ll_symbol);
10316 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10317 "Location list length "
10318 "(%s)", list_head->ll_symbol);
10319 }
10320 /* Otherwise emit DW_LLE_offset_pair, relative to above emitted
10321 DW_LLE_base_address. */
10322 else
10323 {
10324 dwarf2out_maybe_output_loclist_view_pair (curr);
10325 dw2_asm_output_data (1, DW_LLE_offset_pair,
10326 "DW_LLE_offset_pair (%s)",
10327 list_head->ll_symbol);
10328 dw2_asm_output_delta_uleb128 (curr->begin, base_label,
10329 "Location list begin address "
10330 "(%s)", list_head->ll_symbol);
10331 dw2_asm_output_delta_uleb128 (curr->end, base_label,
10332 "Location list end address "
10333 "(%s)", list_head->ll_symbol);
10334 }
10335 }
10336 /* The assembler does not support .uleb128 directive. Emit
10337 DW_LLE_start_end with a pair of absolute addresses. */
10338 else
10339 {
10340 dwarf2out_maybe_output_loclist_view_pair (curr);
10341 dw2_asm_output_data (1, DW_LLE_start_end,
10342 "DW_LLE_start_end (%s)",
10343 list_head->ll_symbol);
10344 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10345 "Location list begin address (%s)",
10346 list_head->ll_symbol);
10347 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10348 "Location list end address (%s)",
10349 list_head->ll_symbol);
10350 }
10351 }
10352 else if (dwarf_split_debug_info)
10353 {
10354 /* For -gsplit-dwarf -gdwarf-{2,3,4} emit index into .debug_addr
10355 and 4 byte length. */
10356 dw2_asm_output_data (1, DW_LLE_GNU_start_length_entry,
10357 "Location list start/length entry (%s)",
10358 list_head->ll_symbol);
10359 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10360 "Location list range start index (%s)",
10361 curr->begin);
10362 /* The length field is 4 bytes. If we ever need to support
10363 an 8-byte length, we can add a new DW_LLE code or fall back
10364 to DW_LLE_GNU_start_end_entry. */
10365 dw2_asm_output_delta (4, curr->end, curr->begin,
10366 "Location list range length (%s)",
10367 list_head->ll_symbol);
10368 }
10369 else if (!have_multiple_function_sections)
10370 {
10371 /* Pair of relative addresses against start of text section. */
10372 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->begin, curr->section,
10373 "Location list begin address (%s)",
10374 list_head->ll_symbol);
10375 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->end, curr->section,
10376 "Location list end address (%s)",
10377 list_head->ll_symbol);
10378 }
10379 else
10380 {
10381 /* Pair of absolute addresses. */
10382 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10383 "Location list begin address (%s)",
10384 list_head->ll_symbol);
10385 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10386 "Location list end address (%s)",
10387 list_head->ll_symbol);
10388 }
10389
10390 /* Output the block length for this list of location operations. */
10391 if (dwarf_version >= 5)
10392 dw2_asm_output_data_uleb128 (size, "Location expression size");
10393 else
10394 {
10395 gcc_assert (size <= 0xffff);
10396 dw2_asm_output_data (2, size, "Location expression size");
10397 }
10398
10399 output_loc_sequence (curr->expr, -1);
10400 }
10401
10402 /* And finally list termination. */
10403 if (dwarf_version >= 5)
10404 dw2_asm_output_data (1, DW_LLE_end_of_list,
10405 "DW_LLE_end_of_list (%s)", list_head->ll_symbol);
10406 else if (dwarf_split_debug_info)
10407 dw2_asm_output_data (1, DW_LLE_GNU_end_of_list_entry,
10408 "Location list terminator (%s)",
10409 list_head->ll_symbol);
10410 else
10411 {
10412 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10413 "Location list terminator begin (%s)",
10414 list_head->ll_symbol);
10415 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10416 "Location list terminator end (%s)",
10417 list_head->ll_symbol);
10418 }
10419
10420 gcc_assert (!list_head->vl_symbol
10421 || vcount == lcount * (dwarf2out_locviews_in_attribute () ? 1 : 0));
10422 }
10423
10424 /* Output a range_list offset into the .debug_ranges or .debug_rnglists
10425 section. Emit a relocated reference if val_entry is NULL, otherwise,
10426 emit an indirect reference. */
10427
10428 static void
10429 output_range_list_offset (dw_attr_node *a)
10430 {
10431 const char *name = dwarf_attr_name (a->dw_attr);
10432
10433 if (a->dw_attr_val.val_entry == RELOCATED_OFFSET)
10434 {
10435 if (dwarf_version >= 5)
10436 {
10437 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10438 dw2_asm_output_offset (DWARF_OFFSET_SIZE, r->label,
10439 debug_ranges_section, "%s", name);
10440 }
10441 else
10442 {
10443 char *p = strchr (ranges_section_label, '\0');
10444 sprintf (p, "+" HOST_WIDE_INT_PRINT_HEX,
10445 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE);
10446 dw2_asm_output_offset (DWARF_OFFSET_SIZE, ranges_section_label,
10447 debug_ranges_section, "%s", name);
10448 *p = '\0';
10449 }
10450 }
10451 else if (dwarf_version >= 5)
10452 {
10453 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10454 gcc_assert (rnglist_idx);
10455 dw2_asm_output_data_uleb128 (r->idx, "%s", name);
10456 }
10457 else
10458 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10459 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE,
10460 "%s (offset from %s)", name, ranges_section_label);
10461 }
10462
10463 /* Output the offset into the debug_loc section. */
10464
10465 static void
10466 output_loc_list_offset (dw_attr_node *a)
10467 {
10468 char *sym = AT_loc_list (a)->ll_symbol;
10469
10470 gcc_assert (sym);
10471 if (!dwarf_split_debug_info)
10472 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10473 "%s", dwarf_attr_name (a->dw_attr));
10474 else if (dwarf_version >= 5)
10475 {
10476 gcc_assert (AT_loc_list (a)->num_assigned);
10477 dw2_asm_output_data_uleb128 (AT_loc_list (a)->hash, "%s (%s)",
10478 dwarf_attr_name (a->dw_attr),
10479 sym);
10480 }
10481 else
10482 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10483 "%s", dwarf_attr_name (a->dw_attr));
10484 }
10485
10486 /* Output the offset into the debug_loc section. */
10487
10488 static void
10489 output_view_list_offset (dw_attr_node *a)
10490 {
10491 char *sym = (*AT_loc_list_ptr (a))->vl_symbol;
10492
10493 gcc_assert (sym);
10494 if (dwarf_split_debug_info)
10495 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10496 "%s", dwarf_attr_name (a->dw_attr));
10497 else
10498 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10499 "%s", dwarf_attr_name (a->dw_attr));
10500 }
10501
10502 /* Output an attribute's index or value appropriately. */
10503
10504 static void
10505 output_attr_index_or_value (dw_attr_node *a)
10506 {
10507 const char *name = dwarf_attr_name (a->dw_attr);
10508
10509 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
10510 {
10511 dw2_asm_output_data_uleb128 (AT_index (a), "%s", name);
10512 return;
10513 }
10514 switch (AT_class (a))
10515 {
10516 case dw_val_class_addr:
10517 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, AT_addr (a), "%s", name);
10518 break;
10519 case dw_val_class_high_pc:
10520 case dw_val_class_lbl_id:
10521 dw2_asm_output_addr (DWARF2_ADDR_SIZE, AT_lbl (a), "%s", name);
10522 break;
10523 default:
10524 gcc_unreachable ();
10525 }
10526 }
10527
10528 /* Output a type signature. */
10529
10530 static inline void
10531 output_signature (const char *sig, const char *name)
10532 {
10533 int i;
10534
10535 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
10536 dw2_asm_output_data (1, sig[i], i == 0 ? "%s" : NULL, name);
10537 }
10538
10539 /* Output a discriminant value. */
10540
10541 static inline void
10542 output_discr_value (dw_discr_value *discr_value, const char *name)
10543 {
10544 if (discr_value->pos)
10545 dw2_asm_output_data_uleb128 (discr_value->v.uval, "%s", name);
10546 else
10547 dw2_asm_output_data_sleb128 (discr_value->v.sval, "%s", name);
10548 }
10549
10550 /* Output the DIE and its attributes. Called recursively to generate
10551 the definitions of each child DIE. */
10552
10553 static void
10554 output_die (dw_die_ref die)
10555 {
10556 dw_attr_node *a;
10557 dw_die_ref c;
10558 unsigned long size;
10559 unsigned ix;
10560
10561 dw2_asm_output_data_uleb128 (die->die_abbrev, "(DIE (%#lx) %s)",
10562 (unsigned long)die->die_offset,
10563 dwarf_tag_name (die->die_tag));
10564
10565 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
10566 {
10567 const char *name = dwarf_attr_name (a->dw_attr);
10568
10569 switch (AT_class (a))
10570 {
10571 case dw_val_class_addr:
10572 output_attr_index_or_value (a);
10573 break;
10574
10575 case dw_val_class_offset:
10576 dw2_asm_output_data (DWARF_OFFSET_SIZE, a->dw_attr_val.v.val_offset,
10577 "%s", name);
10578 break;
10579
10580 case dw_val_class_range_list:
10581 output_range_list_offset (a);
10582 break;
10583
10584 case dw_val_class_loc:
10585 size = size_of_locs (AT_loc (a));
10586
10587 /* Output the block length for this list of location operations. */
10588 if (dwarf_version >= 4)
10589 dw2_asm_output_data_uleb128 (size, "%s", name);
10590 else
10591 dw2_asm_output_data (constant_size (size), size, "%s", name);
10592
10593 output_loc_sequence (AT_loc (a), -1);
10594 break;
10595
10596 case dw_val_class_const:
10597 /* ??? It would be slightly more efficient to use a scheme like is
10598 used for unsigned constants below, but gdb 4.x does not sign
10599 extend. Gdb 5.x does sign extend. */
10600 dw2_asm_output_data_sleb128 (AT_int (a), "%s", name);
10601 break;
10602
10603 case dw_val_class_unsigned_const:
10604 {
10605 int csize = constant_size (AT_unsigned (a));
10606 if (dwarf_version == 3
10607 && a->dw_attr == DW_AT_data_member_location
10608 && csize >= 4)
10609 dw2_asm_output_data_uleb128 (AT_unsigned (a), "%s", name);
10610 else
10611 dw2_asm_output_data (csize, AT_unsigned (a), "%s", name);
10612 }
10613 break;
10614
10615 case dw_val_class_symview:
10616 {
10617 int vsize;
10618 if (symview_upper_bound <= 0xff)
10619 vsize = 1;
10620 else if (symview_upper_bound <= 0xffff)
10621 vsize = 2;
10622 else if (symview_upper_bound <= 0xffffffff)
10623 vsize = 4;
10624 else
10625 vsize = 8;
10626 dw2_asm_output_addr (vsize, a->dw_attr_val.v.val_symbolic_view,
10627 "%s", name);
10628 }
10629 break;
10630
10631 case dw_val_class_const_implicit:
10632 if (flag_debug_asm)
10633 fprintf (asm_out_file, "\t\t\t%s %s ("
10634 HOST_WIDE_INT_PRINT_DEC ")\n",
10635 ASM_COMMENT_START, name, AT_int (a));
10636 break;
10637
10638 case dw_val_class_unsigned_const_implicit:
10639 if (flag_debug_asm)
10640 fprintf (asm_out_file, "\t\t\t%s %s ("
10641 HOST_WIDE_INT_PRINT_HEX ")\n",
10642 ASM_COMMENT_START, name, AT_unsigned (a));
10643 break;
10644
10645 case dw_val_class_const_double:
10646 {
10647 unsigned HOST_WIDE_INT first, second;
10648
10649 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
10650 dw2_asm_output_data (1,
10651 HOST_BITS_PER_DOUBLE_INT
10652 / HOST_BITS_PER_CHAR,
10653 NULL);
10654
10655 if (WORDS_BIG_ENDIAN)
10656 {
10657 first = a->dw_attr_val.v.val_double.high;
10658 second = a->dw_attr_val.v.val_double.low;
10659 }
10660 else
10661 {
10662 first = a->dw_attr_val.v.val_double.low;
10663 second = a->dw_attr_val.v.val_double.high;
10664 }
10665
10666 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10667 first, "%s", name);
10668 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10669 second, NULL);
10670 }
10671 break;
10672
10673 case dw_val_class_wide_int:
10674 {
10675 int i;
10676 int len = get_full_len (*a->dw_attr_val.v.val_wide);
10677 int l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
10678 if (len * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
10679 dw2_asm_output_data (1, get_full_len (*a->dw_attr_val.v.val_wide)
10680 * l, NULL);
10681
10682 if (WORDS_BIG_ENDIAN)
10683 for (i = len - 1; i >= 0; --i)
10684 {
10685 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10686 "%s", name);
10687 name = "";
10688 }
10689 else
10690 for (i = 0; i < len; ++i)
10691 {
10692 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10693 "%s", name);
10694 name = "";
10695 }
10696 }
10697 break;
10698
10699 case dw_val_class_vec:
10700 {
10701 unsigned int elt_size = a->dw_attr_val.v.val_vec.elt_size;
10702 unsigned int len = a->dw_attr_val.v.val_vec.length;
10703 unsigned int i;
10704 unsigned char *p;
10705
10706 dw2_asm_output_data (constant_size (len * elt_size),
10707 len * elt_size, "%s", name);
10708 if (elt_size > sizeof (HOST_WIDE_INT))
10709 {
10710 elt_size /= 2;
10711 len *= 2;
10712 }
10713 for (i = 0, p = (unsigned char *) a->dw_attr_val.v.val_vec.array;
10714 i < len;
10715 i++, p += elt_size)
10716 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
10717 "fp or vector constant word %u", i);
10718 break;
10719 }
10720
10721 case dw_val_class_flag:
10722 if (dwarf_version >= 4)
10723 {
10724 /* Currently all add_AT_flag calls pass in 1 as last argument,
10725 so DW_FORM_flag_present can be used. If that ever changes,
10726 we'll need to use DW_FORM_flag and have some optimization
10727 in build_abbrev_table that will change those to
10728 DW_FORM_flag_present if it is set to 1 in all DIEs using
10729 the same abbrev entry. */
10730 gcc_assert (AT_flag (a) == 1);
10731 if (flag_debug_asm)
10732 fprintf (asm_out_file, "\t\t\t%s %s\n",
10733 ASM_COMMENT_START, name);
10734 break;
10735 }
10736 dw2_asm_output_data (1, AT_flag (a), "%s", name);
10737 break;
10738
10739 case dw_val_class_loc_list:
10740 output_loc_list_offset (a);
10741 break;
10742
10743 case dw_val_class_view_list:
10744 output_view_list_offset (a);
10745 break;
10746
10747 case dw_val_class_die_ref:
10748 if (AT_ref_external (a))
10749 {
10750 if (AT_ref (a)->comdat_type_p)
10751 {
10752 comdat_type_node *type_node
10753 = AT_ref (a)->die_id.die_type_node;
10754
10755 gcc_assert (type_node);
10756 output_signature (type_node->signature, name);
10757 }
10758 else
10759 {
10760 const char *sym = AT_ref (a)->die_id.die_symbol;
10761 int size;
10762
10763 gcc_assert (sym);
10764 /* In DWARF2, DW_FORM_ref_addr is sized by target address
10765 length, whereas in DWARF3 it's always sized as an
10766 offset. */
10767 if (dwarf_version == 2)
10768 size = DWARF2_ADDR_SIZE;
10769 else
10770 size = DWARF_OFFSET_SIZE;
10771 /* ??? We cannot unconditionally output die_offset if
10772 non-zero - others might create references to those
10773 DIEs via symbols.
10774 And we do not clear its DIE offset after outputting it
10775 (and the label refers to the actual DIEs, not the
10776 DWARF CU unit header which is when using label + offset
10777 would be the correct thing to do).
10778 ??? This is the reason for the with_offset flag. */
10779 if (AT_ref (a)->with_offset)
10780 dw2_asm_output_offset (size, sym, AT_ref (a)->die_offset,
10781 debug_info_section, "%s", name);
10782 else
10783 dw2_asm_output_offset (size, sym, debug_info_section, "%s",
10784 name);
10785 }
10786 }
10787 else
10788 {
10789 gcc_assert (AT_ref (a)->die_offset);
10790 dw2_asm_output_data (DWARF_OFFSET_SIZE, AT_ref (a)->die_offset,
10791 "%s", name);
10792 }
10793 break;
10794
10795 case dw_val_class_fde_ref:
10796 {
10797 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
10798
10799 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_LABEL,
10800 a->dw_attr_val.v.val_fde_index * 2);
10801 dw2_asm_output_offset (DWARF_OFFSET_SIZE, l1, debug_frame_section,
10802 "%s", name);
10803 }
10804 break;
10805
10806 case dw_val_class_vms_delta:
10807 #ifdef ASM_OUTPUT_DWARF_VMS_DELTA
10808 dw2_asm_output_vms_delta (DWARF_OFFSET_SIZE,
10809 AT_vms_delta2 (a), AT_vms_delta1 (a),
10810 "%s", name);
10811 #else
10812 dw2_asm_output_delta (DWARF_OFFSET_SIZE,
10813 AT_vms_delta2 (a), AT_vms_delta1 (a),
10814 "%s", name);
10815 #endif
10816 break;
10817
10818 case dw_val_class_lbl_id:
10819 output_attr_index_or_value (a);
10820 break;
10821
10822 case dw_val_class_lineptr:
10823 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10824 debug_line_section, "%s", name);
10825 break;
10826
10827 case dw_val_class_macptr:
10828 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10829 debug_macinfo_section, "%s", name);
10830 break;
10831
10832 case dw_val_class_loclistsptr:
10833 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10834 debug_loc_section, "%s", name);
10835 break;
10836
10837 case dw_val_class_str:
10838 if (a->dw_attr_val.v.val_str->form == DW_FORM_strp)
10839 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10840 a->dw_attr_val.v.val_str->label,
10841 debug_str_section,
10842 "%s: \"%s\"", name, AT_string (a));
10843 else if (a->dw_attr_val.v.val_str->form == DW_FORM_line_strp)
10844 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10845 a->dw_attr_val.v.val_str->label,
10846 debug_line_str_section,
10847 "%s: \"%s\"", name, AT_string (a));
10848 else if (a->dw_attr_val.v.val_str->form == dwarf_FORM (DW_FORM_strx))
10849 dw2_asm_output_data_uleb128 (AT_index (a),
10850 "%s: \"%s\"", name, AT_string (a));
10851 else
10852 dw2_asm_output_nstring (AT_string (a), -1, "%s", name);
10853 break;
10854
10855 case dw_val_class_file:
10856 {
10857 int f = maybe_emit_file (a->dw_attr_val.v.val_file);
10858
10859 dw2_asm_output_data (constant_size (f), f, "%s (%s)", name,
10860 a->dw_attr_val.v.val_file->filename);
10861 break;
10862 }
10863
10864 case dw_val_class_file_implicit:
10865 if (flag_debug_asm)
10866 fprintf (asm_out_file, "\t\t\t%s %s (%d, %s)\n",
10867 ASM_COMMENT_START, name,
10868 maybe_emit_file (a->dw_attr_val.v.val_file),
10869 a->dw_attr_val.v.val_file->filename);
10870 break;
10871
10872 case dw_val_class_data8:
10873 {
10874 int i;
10875
10876 for (i = 0; i < 8; i++)
10877 dw2_asm_output_data (1, a->dw_attr_val.v.val_data8[i],
10878 i == 0 ? "%s" : NULL, name);
10879 break;
10880 }
10881
10882 case dw_val_class_high_pc:
10883 dw2_asm_output_delta (DWARF2_ADDR_SIZE, AT_lbl (a),
10884 get_AT_low_pc (die), "DW_AT_high_pc");
10885 break;
10886
10887 case dw_val_class_discr_value:
10888 output_discr_value (&a->dw_attr_val.v.val_discr_value, name);
10889 break;
10890
10891 case dw_val_class_discr_list:
10892 {
10893 dw_discr_list_ref list = AT_discr_list (a);
10894 const int size = size_of_discr_list (list);
10895
10896 /* This is a block, so output its length first. */
10897 dw2_asm_output_data (constant_size (size), size,
10898 "%s: block size", name);
10899
10900 for (; list != NULL; list = list->dw_discr_next)
10901 {
10902 /* One byte for the discriminant value descriptor, and then as
10903 many LEB128 numbers as required. */
10904 if (list->dw_discr_range)
10905 dw2_asm_output_data (1, DW_DSC_range,
10906 "%s: DW_DSC_range", name);
10907 else
10908 dw2_asm_output_data (1, DW_DSC_label,
10909 "%s: DW_DSC_label", name);
10910
10911 output_discr_value (&list->dw_discr_lower_bound, name);
10912 if (list->dw_discr_range)
10913 output_discr_value (&list->dw_discr_upper_bound, name);
10914 }
10915 break;
10916 }
10917
10918 default:
10919 gcc_unreachable ();
10920 }
10921 }
10922
10923 FOR_EACH_CHILD (die, c, output_die (c));
10924
10925 /* Add null byte to terminate sibling list. */
10926 if (die->die_child != NULL)
10927 dw2_asm_output_data (1, 0, "end of children of DIE %#lx",
10928 (unsigned long) die->die_offset);
10929 }
10930
10931 /* Output the dwarf version number. */
10932
10933 static void
10934 output_dwarf_version ()
10935 {
10936 /* ??? For now, if -gdwarf-6 is specified, we output version 5 with
10937 views in loclist. That will change eventually. */
10938 if (dwarf_version == 6)
10939 {
10940 static bool once;
10941 if (!once)
10942 {
10943 warning (0,
10944 "-gdwarf-6 is output as version 5 with incompatibilities");
10945 once = true;
10946 }
10947 dw2_asm_output_data (2, 5, "DWARF version number");
10948 }
10949 else
10950 dw2_asm_output_data (2, dwarf_version, "DWARF version number");
10951 }
10952
10953 /* Output the compilation unit that appears at the beginning of the
10954 .debug_info section, and precedes the DIE descriptions. */
10955
10956 static void
10957 output_compilation_unit_header (enum dwarf_unit_type ut)
10958 {
10959 if (!XCOFF_DEBUGGING_INFO)
10960 {
10961 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
10962 dw2_asm_output_data (4, 0xffffffff,
10963 "Initial length escape value indicating 64-bit DWARF extension");
10964 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10965 next_die_offset - DWARF_INITIAL_LENGTH_SIZE,
10966 "Length of Compilation Unit Info");
10967 }
10968
10969 output_dwarf_version ();
10970 if (dwarf_version >= 5)
10971 {
10972 const char *name;
10973 switch (ut)
10974 {
10975 case DW_UT_compile: name = "DW_UT_compile"; break;
10976 case DW_UT_type: name = "DW_UT_type"; break;
10977 case DW_UT_split_compile: name = "DW_UT_split_compile"; break;
10978 case DW_UT_split_type: name = "DW_UT_split_type"; break;
10979 default: gcc_unreachable ();
10980 }
10981 dw2_asm_output_data (1, ut, "%s", name);
10982 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10983 }
10984 dw2_asm_output_offset (DWARF_OFFSET_SIZE, abbrev_section_label,
10985 debug_abbrev_section,
10986 "Offset Into Abbrev. Section");
10987 if (dwarf_version < 5)
10988 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
10989 }
10990
10991 /* Output the compilation unit DIE and its children. */
10992
10993 static void
10994 output_comp_unit (dw_die_ref die, int output_if_empty,
10995 const unsigned char *dwo_id)
10996 {
10997 const char *secname, *oldsym;
10998 char *tmp;
10999
11000 /* Unless we are outputting main CU, we may throw away empty ones. */
11001 if (!output_if_empty && die->die_child == NULL)
11002 return;
11003
11004 /* Even if there are no children of this DIE, we must output the information
11005 about the compilation unit. Otherwise, on an empty translation unit, we
11006 will generate a present, but empty, .debug_info section. IRIX 6.5 `nm'
11007 will then complain when examining the file. First mark all the DIEs in
11008 this CU so we know which get local refs. */
11009 mark_dies (die);
11010
11011 external_ref_hash_type *extern_map = optimize_external_refs (die);
11012
11013 /* For now, optimize only the main CU, in order to optimize the rest
11014 we'd need to see all of them earlier. Leave the rest for post-linking
11015 tools like DWZ. */
11016 if (die == comp_unit_die ())
11017 abbrev_opt_start = vec_safe_length (abbrev_die_table);
11018
11019 build_abbrev_table (die, extern_map);
11020
11021 optimize_abbrev_table ();
11022
11023 delete extern_map;
11024
11025 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11026 next_die_offset = (dwo_id
11027 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11028 : DWARF_COMPILE_UNIT_HEADER_SIZE);
11029 calc_die_sizes (die);
11030
11031 oldsym = die->die_id.die_symbol;
11032 if (oldsym && die->comdat_type_p)
11033 {
11034 tmp = XALLOCAVEC (char, strlen (oldsym) + 24);
11035
11036 sprintf (tmp, ".gnu.linkonce.wi.%s", oldsym);
11037 secname = tmp;
11038 die->die_id.die_symbol = NULL;
11039 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11040 }
11041 else
11042 {
11043 switch_to_section (debug_info_section);
11044 ASM_OUTPUT_LABEL (asm_out_file, debug_info_section_label);
11045 info_section_emitted = true;
11046 }
11047
11048 /* For LTO cross unit DIE refs we want a symbol on the start of the
11049 debuginfo section, not on the CU DIE. */
11050 if ((flag_generate_lto || flag_generate_offload) && oldsym)
11051 {
11052 /* ??? No way to get visibility assembled without a decl. */
11053 tree decl = build_decl (UNKNOWN_LOCATION, VAR_DECL,
11054 get_identifier (oldsym), char_type_node);
11055 TREE_PUBLIC (decl) = true;
11056 TREE_STATIC (decl) = true;
11057 DECL_ARTIFICIAL (decl) = true;
11058 DECL_VISIBILITY (decl) = VISIBILITY_HIDDEN;
11059 DECL_VISIBILITY_SPECIFIED (decl) = true;
11060 targetm.asm_out.assemble_visibility (decl, VISIBILITY_HIDDEN);
11061 #ifdef ASM_WEAKEN_LABEL
11062 /* We prefer a .weak because that handles duplicates from duplicate
11063 archive members in a graceful way. */
11064 ASM_WEAKEN_LABEL (asm_out_file, oldsym);
11065 #else
11066 targetm.asm_out.globalize_label (asm_out_file, oldsym);
11067 #endif
11068 ASM_OUTPUT_LABEL (asm_out_file, oldsym);
11069 }
11070
11071 /* Output debugging information. */
11072 output_compilation_unit_header (dwo_id
11073 ? DW_UT_split_compile : DW_UT_compile);
11074 if (dwarf_version >= 5)
11075 {
11076 if (dwo_id != NULL)
11077 for (int i = 0; i < 8; i++)
11078 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11079 }
11080 output_die (die);
11081
11082 /* Leave the marks on the main CU, so we can check them in
11083 output_pubnames. */
11084 if (oldsym)
11085 {
11086 unmark_dies (die);
11087 die->die_id.die_symbol = oldsym;
11088 }
11089 }
11090
11091 /* Whether to generate the DWARF accelerator tables in .debug_pubnames
11092 and .debug_pubtypes. This is configured per-target, but can be
11093 overridden by the -gpubnames or -gno-pubnames options. */
11094
11095 static inline bool
11096 want_pubnames (void)
11097 {
11098 if (debug_info_level <= DINFO_LEVEL_TERSE)
11099 return false;
11100 if (debug_generate_pub_sections != -1)
11101 return debug_generate_pub_sections;
11102 return targetm.want_debug_pub_sections;
11103 }
11104
11105 /* Add the DW_AT_GNU_pubnames and DW_AT_GNU_pubtypes attributes. */
11106
11107 static void
11108 add_AT_pubnames (dw_die_ref die)
11109 {
11110 if (want_pubnames ())
11111 add_AT_flag (die, DW_AT_GNU_pubnames, 1);
11112 }
11113
11114 /* Add a string attribute value to a skeleton DIE. */
11115
11116 static inline void
11117 add_skeleton_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind,
11118 const char *str)
11119 {
11120 dw_attr_node attr;
11121 struct indirect_string_node *node;
11122
11123 if (! skeleton_debug_str_hash)
11124 skeleton_debug_str_hash
11125 = hash_table<indirect_string_hasher>::create_ggc (10);
11126
11127 node = find_AT_string_in_table (str, skeleton_debug_str_hash);
11128 find_string_form (node);
11129 if (node->form == dwarf_FORM (DW_FORM_strx))
11130 node->form = DW_FORM_strp;
11131
11132 attr.dw_attr = attr_kind;
11133 attr.dw_attr_val.val_class = dw_val_class_str;
11134 attr.dw_attr_val.val_entry = NULL;
11135 attr.dw_attr_val.v.val_str = node;
11136 add_dwarf_attr (die, &attr);
11137 }
11138
11139 /* Helper function to generate top-level dies for skeleton debug_info and
11140 debug_types. */
11141
11142 static void
11143 add_top_level_skeleton_die_attrs (dw_die_ref die)
11144 {
11145 const char *dwo_file_name = concat (aux_base_name, ".dwo", NULL);
11146 const char *comp_dir = comp_dir_string ();
11147
11148 add_skeleton_AT_string (die, dwarf_AT (DW_AT_dwo_name), dwo_file_name);
11149 if (comp_dir != NULL)
11150 add_skeleton_AT_string (die, DW_AT_comp_dir, comp_dir);
11151 add_AT_pubnames (die);
11152 add_AT_lineptr (die, dwarf_AT (DW_AT_addr_base), debug_addr_section_label);
11153 }
11154
11155 /* Output skeleton debug sections that point to the dwo file. */
11156
11157 static void
11158 output_skeleton_debug_sections (dw_die_ref comp_unit,
11159 const unsigned char *dwo_id)
11160 {
11161 /* These attributes will be found in the full debug_info section. */
11162 remove_AT (comp_unit, DW_AT_producer);
11163 remove_AT (comp_unit, DW_AT_language);
11164
11165 switch_to_section (debug_skeleton_info_section);
11166 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_info_section_label);
11167
11168 /* Produce the skeleton compilation-unit header. This one differs enough from
11169 a normal CU header that it's better not to call output_compilation_unit
11170 header. */
11171 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11172 dw2_asm_output_data (4, 0xffffffff,
11173 "Initial length escape value indicating 64-bit "
11174 "DWARF extension");
11175
11176 dw2_asm_output_data (DWARF_OFFSET_SIZE,
11177 DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11178 - DWARF_INITIAL_LENGTH_SIZE
11179 + size_of_die (comp_unit),
11180 "Length of Compilation Unit Info");
11181 output_dwarf_version ();
11182 if (dwarf_version >= 5)
11183 {
11184 dw2_asm_output_data (1, DW_UT_skeleton, "DW_UT_skeleton");
11185 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11186 }
11187 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_abbrev_section_label,
11188 debug_skeleton_abbrev_section,
11189 "Offset Into Abbrev. Section");
11190 if (dwarf_version < 5)
11191 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11192 else
11193 for (int i = 0; i < 8; i++)
11194 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11195
11196 comp_unit->die_abbrev = SKELETON_COMP_DIE_ABBREV;
11197 output_die (comp_unit);
11198
11199 /* Build the skeleton debug_abbrev section. */
11200 switch_to_section (debug_skeleton_abbrev_section);
11201 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_abbrev_section_label);
11202
11203 output_die_abbrevs (SKELETON_COMP_DIE_ABBREV, comp_unit);
11204
11205 dw2_asm_output_data (1, 0, "end of skeleton .debug_abbrev");
11206 }
11207
11208 /* Output a comdat type unit DIE and its children. */
11209
11210 static void
11211 output_comdat_type_unit (comdat_type_node *node)
11212 {
11213 const char *secname;
11214 char *tmp;
11215 int i;
11216 #if defined (OBJECT_FORMAT_ELF)
11217 tree comdat_key;
11218 #endif
11219
11220 /* First mark all the DIEs in this CU so we know which get local refs. */
11221 mark_dies (node->root_die);
11222
11223 external_ref_hash_type *extern_map = optimize_external_refs (node->root_die);
11224
11225 build_abbrev_table (node->root_die, extern_map);
11226
11227 delete extern_map;
11228 extern_map = NULL;
11229
11230 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11231 next_die_offset = DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE;
11232 calc_die_sizes (node->root_die);
11233
11234 #if defined (OBJECT_FORMAT_ELF)
11235 if (dwarf_version >= 5)
11236 {
11237 if (!dwarf_split_debug_info)
11238 secname = ".debug_info";
11239 else
11240 secname = ".debug_info.dwo";
11241 }
11242 else if (!dwarf_split_debug_info)
11243 secname = ".debug_types";
11244 else
11245 secname = ".debug_types.dwo";
11246
11247 tmp = XALLOCAVEC (char, 4 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11248 sprintf (tmp, dwarf_version >= 5 ? "wi." : "wt.");
11249 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11250 sprintf (tmp + 3 + i * 2, "%02x", node->signature[i] & 0xff);
11251 comdat_key = get_identifier (tmp);
11252 targetm.asm_out.named_section (secname,
11253 SECTION_DEBUG | SECTION_LINKONCE,
11254 comdat_key);
11255 #else
11256 tmp = XALLOCAVEC (char, 18 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11257 sprintf (tmp, (dwarf_version >= 5
11258 ? ".gnu.linkonce.wi." : ".gnu.linkonce.wt."));
11259 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11260 sprintf (tmp + 17 + i * 2, "%02x", node->signature[i] & 0xff);
11261 secname = tmp;
11262 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11263 #endif
11264
11265 /* Output debugging information. */
11266 output_compilation_unit_header (dwarf_split_debug_info
11267 ? DW_UT_split_type : DW_UT_type);
11268 output_signature (node->signature, "Type Signature");
11269 dw2_asm_output_data (DWARF_OFFSET_SIZE, node->type_die->die_offset,
11270 "Offset to Type DIE");
11271 output_die (node->root_die);
11272
11273 unmark_dies (node->root_die);
11274 }
11275
11276 /* Return the DWARF2/3 pubname associated with a decl. */
11277
11278 static const char *
11279 dwarf2_name (tree decl, int scope)
11280 {
11281 if (DECL_NAMELESS (decl))
11282 return NULL;
11283 return lang_hooks.dwarf_name (decl, scope ? 1 : 0);
11284 }
11285
11286 /* Add a new entry to .debug_pubnames if appropriate. */
11287
11288 static void
11289 add_pubname_string (const char *str, dw_die_ref die)
11290 {
11291 pubname_entry e;
11292
11293 e.die = die;
11294 e.name = xstrdup (str);
11295 vec_safe_push (pubname_table, e);
11296 }
11297
11298 static void
11299 add_pubname (tree decl, dw_die_ref die)
11300 {
11301 if (!want_pubnames ())
11302 return;
11303
11304 /* Don't add items to the table when we expect that the consumer will have
11305 just read the enclosing die. For example, if the consumer is looking at a
11306 class_member, it will either be inside the class already, or will have just
11307 looked up the class to find the member. Either way, searching the class is
11308 faster than searching the index. */
11309 if ((TREE_PUBLIC (decl) && !class_scope_p (die->die_parent))
11310 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11311 {
11312 const char *name = dwarf2_name (decl, 1);
11313
11314 if (name)
11315 add_pubname_string (name, die);
11316 }
11317 }
11318
11319 /* Add an enumerator to the pubnames section. */
11320
11321 static void
11322 add_enumerator_pubname (const char *scope_name, dw_die_ref die)
11323 {
11324 pubname_entry e;
11325
11326 gcc_assert (scope_name);
11327 e.name = concat (scope_name, get_AT_string (die, DW_AT_name), NULL);
11328 e.die = die;
11329 vec_safe_push (pubname_table, e);
11330 }
11331
11332 /* Add a new entry to .debug_pubtypes if appropriate. */
11333
11334 static void
11335 add_pubtype (tree decl, dw_die_ref die)
11336 {
11337 pubname_entry e;
11338
11339 if (!want_pubnames ())
11340 return;
11341
11342 if ((TREE_PUBLIC (decl)
11343 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11344 && (die->die_tag == DW_TAG_typedef || COMPLETE_TYPE_P (decl)))
11345 {
11346 tree scope = NULL;
11347 const char *scope_name = "";
11348 const char *sep = is_cxx () ? "::" : ".";
11349 const char *name;
11350
11351 scope = TYPE_P (decl) ? TYPE_CONTEXT (decl) : NULL;
11352 if (scope && TREE_CODE (scope) == NAMESPACE_DECL)
11353 {
11354 scope_name = lang_hooks.dwarf_name (scope, 1);
11355 if (scope_name != NULL && scope_name[0] != '\0')
11356 scope_name = concat (scope_name, sep, NULL);
11357 else
11358 scope_name = "";
11359 }
11360
11361 if (TYPE_P (decl))
11362 name = type_tag (decl);
11363 else
11364 name = lang_hooks.dwarf_name (decl, 1);
11365
11366 /* If we don't have a name for the type, there's no point in adding
11367 it to the table. */
11368 if (name != NULL && name[0] != '\0')
11369 {
11370 e.die = die;
11371 e.name = concat (scope_name, name, NULL);
11372 vec_safe_push (pubtype_table, e);
11373 }
11374
11375 /* Although it might be more consistent to add the pubinfo for the
11376 enumerators as their dies are created, they should only be added if the
11377 enum type meets the criteria above. So rather than re-check the parent
11378 enum type whenever an enumerator die is created, just output them all
11379 here. This isn't protected by the name conditional because anonymous
11380 enums don't have names. */
11381 if (die->die_tag == DW_TAG_enumeration_type)
11382 {
11383 dw_die_ref c;
11384
11385 FOR_EACH_CHILD (die, c, add_enumerator_pubname (scope_name, c));
11386 }
11387 }
11388 }
11389
11390 /* Output a single entry in the pubnames table. */
11391
11392 static void
11393 output_pubname (dw_offset die_offset, pubname_entry *entry)
11394 {
11395 dw_die_ref die = entry->die;
11396 int is_static = get_AT_flag (die, DW_AT_external) ? 0 : 1;
11397
11398 dw2_asm_output_data (DWARF_OFFSET_SIZE, die_offset, "DIE offset");
11399
11400 if (debug_generate_pub_sections == 2)
11401 {
11402 /* This logic follows gdb's method for determining the value of the flag
11403 byte. */
11404 uint32_t flags = GDB_INDEX_SYMBOL_KIND_NONE;
11405 switch (die->die_tag)
11406 {
11407 case DW_TAG_typedef:
11408 case DW_TAG_base_type:
11409 case DW_TAG_subrange_type:
11410 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11411 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11412 break;
11413 case DW_TAG_enumerator:
11414 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11415 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11416 if (!is_cxx ())
11417 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11418 break;
11419 case DW_TAG_subprogram:
11420 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11421 GDB_INDEX_SYMBOL_KIND_FUNCTION);
11422 if (!is_ada ())
11423 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11424 break;
11425 case DW_TAG_constant:
11426 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11427 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11428 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11429 break;
11430 case DW_TAG_variable:
11431 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11432 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11433 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11434 break;
11435 case DW_TAG_namespace:
11436 case DW_TAG_imported_declaration:
11437 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11438 break;
11439 case DW_TAG_class_type:
11440 case DW_TAG_interface_type:
11441 case DW_TAG_structure_type:
11442 case DW_TAG_union_type:
11443 case DW_TAG_enumeration_type:
11444 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11445 if (!is_cxx ())
11446 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11447 break;
11448 default:
11449 /* An unusual tag. Leave the flag-byte empty. */
11450 break;
11451 }
11452 dw2_asm_output_data (1, flags >> GDB_INDEX_CU_BITSIZE,
11453 "GDB-index flags");
11454 }
11455
11456 dw2_asm_output_nstring (entry->name, -1, "external name");
11457 }
11458
11459
11460 /* Output the public names table used to speed up access to externally
11461 visible names; or the public types table used to find type definitions. */
11462
11463 static void
11464 output_pubnames (vec<pubname_entry, va_gc> *names)
11465 {
11466 unsigned i;
11467 unsigned long pubnames_length = size_of_pubnames (names);
11468 pubname_entry *pub;
11469
11470 if (!XCOFF_DEBUGGING_INFO)
11471 {
11472 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11473 dw2_asm_output_data (4, 0xffffffff,
11474 "Initial length escape value indicating 64-bit DWARF extension");
11475 dw2_asm_output_data (DWARF_OFFSET_SIZE, pubnames_length,
11476 "Pub Info Length");
11477 }
11478
11479 /* Version number for pubnames/pubtypes is independent of dwarf version. */
11480 dw2_asm_output_data (2, 2, "DWARF pubnames/pubtypes version");
11481
11482 if (dwarf_split_debug_info)
11483 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11484 debug_skeleton_info_section,
11485 "Offset of Compilation Unit Info");
11486 else
11487 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11488 debug_info_section,
11489 "Offset of Compilation Unit Info");
11490 dw2_asm_output_data (DWARF_OFFSET_SIZE, next_die_offset,
11491 "Compilation Unit Length");
11492
11493 FOR_EACH_VEC_ELT (*names, i, pub)
11494 {
11495 if (include_pubname_in_output (names, pub))
11496 {
11497 dw_offset die_offset = pub->die->die_offset;
11498
11499 /* We shouldn't see pubnames for DIEs outside of the main CU. */
11500 if (names == pubname_table && pub->die->die_tag != DW_TAG_enumerator)
11501 gcc_assert (pub->die->die_mark);
11502
11503 /* If we're putting types in their own .debug_types sections,
11504 the .debug_pubtypes table will still point to the compile
11505 unit (not the type unit), so we want to use the offset of
11506 the skeleton DIE (if there is one). */
11507 if (pub->die->comdat_type_p && names == pubtype_table)
11508 {
11509 comdat_type_node *type_node = pub->die->die_id.die_type_node;
11510
11511 if (type_node != NULL)
11512 die_offset = (type_node->skeleton_die != NULL
11513 ? type_node->skeleton_die->die_offset
11514 : comp_unit_die ()->die_offset);
11515 }
11516
11517 output_pubname (die_offset, pub);
11518 }
11519 }
11520
11521 dw2_asm_output_data (DWARF_OFFSET_SIZE, 0, NULL);
11522 }
11523
11524 /* Output public names and types tables if necessary. */
11525
11526 static void
11527 output_pubtables (void)
11528 {
11529 if (!want_pubnames () || !info_section_emitted)
11530 return;
11531
11532 switch_to_section (debug_pubnames_section);
11533 output_pubnames (pubname_table);
11534 /* ??? Only defined by DWARF3, but emitted by Darwin for DWARF2.
11535 It shouldn't hurt to emit it always, since pure DWARF2 consumers
11536 simply won't look for the section. */
11537 switch_to_section (debug_pubtypes_section);
11538 output_pubnames (pubtype_table);
11539 }
11540
11541
11542 /* Output the information that goes into the .debug_aranges table.
11543 Namely, define the beginning and ending address range of the
11544 text section generated for this compilation unit. */
11545
11546 static void
11547 output_aranges (void)
11548 {
11549 unsigned i;
11550 unsigned long aranges_length = size_of_aranges ();
11551
11552 if (!XCOFF_DEBUGGING_INFO)
11553 {
11554 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11555 dw2_asm_output_data (4, 0xffffffff,
11556 "Initial length escape value indicating 64-bit DWARF extension");
11557 dw2_asm_output_data (DWARF_OFFSET_SIZE, aranges_length,
11558 "Length of Address Ranges Info");
11559 }
11560
11561 /* Version number for aranges is still 2, even up to DWARF5. */
11562 dw2_asm_output_data (2, 2, "DWARF aranges version");
11563 if (dwarf_split_debug_info)
11564 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11565 debug_skeleton_info_section,
11566 "Offset of Compilation Unit Info");
11567 else
11568 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11569 debug_info_section,
11570 "Offset of Compilation Unit Info");
11571 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
11572 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
11573
11574 /* We need to align to twice the pointer size here. */
11575 if (DWARF_ARANGES_PAD_SIZE)
11576 {
11577 /* Pad using a 2 byte words so that padding is correct for any
11578 pointer size. */
11579 dw2_asm_output_data (2, 0, "Pad to %d byte boundary",
11580 2 * DWARF2_ADDR_SIZE);
11581 for (i = 2; i < (unsigned) DWARF_ARANGES_PAD_SIZE; i += 2)
11582 dw2_asm_output_data (2, 0, NULL);
11583 }
11584
11585 /* It is necessary not to output these entries if the sections were
11586 not used; if the sections were not used, the length will be 0 and
11587 the address may end up as 0 if the section is discarded by ld
11588 --gc-sections, leaving an invalid (0, 0) entry that can be
11589 confused with the terminator. */
11590 if (text_section_used)
11591 {
11592 dw2_asm_output_addr (DWARF2_ADDR_SIZE, text_section_label, "Address");
11593 dw2_asm_output_delta (DWARF2_ADDR_SIZE, text_end_label,
11594 text_section_label, "Length");
11595 }
11596 if (cold_text_section_used)
11597 {
11598 dw2_asm_output_addr (DWARF2_ADDR_SIZE, cold_text_section_label,
11599 "Address");
11600 dw2_asm_output_delta (DWARF2_ADDR_SIZE, cold_end_label,
11601 cold_text_section_label, "Length");
11602 }
11603
11604 if (have_multiple_function_sections)
11605 {
11606 unsigned fde_idx;
11607 dw_fde_ref fde;
11608
11609 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
11610 {
11611 if (DECL_IGNORED_P (fde->decl))
11612 continue;
11613 if (!fde->in_std_section)
11614 {
11615 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_begin,
11616 "Address");
11617 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_end,
11618 fde->dw_fde_begin, "Length");
11619 }
11620 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
11621 {
11622 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_second_begin,
11623 "Address");
11624 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_second_end,
11625 fde->dw_fde_second_begin, "Length");
11626 }
11627 }
11628 }
11629
11630 /* Output the terminator words. */
11631 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11632 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11633 }
11634
11635 /* Add a new entry to .debug_ranges. Return its index into
11636 ranges_table vector. */
11637
11638 static unsigned int
11639 add_ranges_num (int num, bool maybe_new_sec)
11640 {
11641 dw_ranges r = { NULL, num, 0, maybe_new_sec };
11642 vec_safe_push (ranges_table, r);
11643 return vec_safe_length (ranges_table) - 1;
11644 }
11645
11646 /* Add a new entry to .debug_ranges corresponding to a block, or a
11647 range terminator if BLOCK is NULL. MAYBE_NEW_SEC is true if
11648 this entry might be in a different section from previous range. */
11649
11650 static unsigned int
11651 add_ranges (const_tree block, bool maybe_new_sec)
11652 {
11653 return add_ranges_num (block ? BLOCK_NUMBER (block) : 0, maybe_new_sec);
11654 }
11655
11656 /* Note that (*rnglist_table)[offset] is either a head of a rnglist
11657 chain, or middle entry of a chain that will be directly referred to. */
11658
11659 static void
11660 note_rnglist_head (unsigned int offset)
11661 {
11662 if (dwarf_version < 5 || (*ranges_table)[offset].label)
11663 return;
11664 (*ranges_table)[offset].label = gen_internal_sym ("LLRL");
11665 }
11666
11667 /* Add a new entry to .debug_ranges corresponding to a pair of labels.
11668 When using dwarf_split_debug_info, address attributes in dies destined
11669 for the final executable should be direct references--setting the
11670 parameter force_direct ensures this behavior. */
11671
11672 static void
11673 add_ranges_by_labels (dw_die_ref die, const char *begin, const char *end,
11674 bool *added, bool force_direct)
11675 {
11676 unsigned int in_use = vec_safe_length (ranges_by_label);
11677 unsigned int offset;
11678 dw_ranges_by_label rbl = { begin, end };
11679 vec_safe_push (ranges_by_label, rbl);
11680 offset = add_ranges_num (-(int)in_use - 1, true);
11681 if (!*added)
11682 {
11683 add_AT_range_list (die, DW_AT_ranges, offset, force_direct);
11684 *added = true;
11685 note_rnglist_head (offset);
11686 }
11687 }
11688
11689 /* Emit .debug_ranges section. */
11690
11691 static void
11692 output_ranges (void)
11693 {
11694 unsigned i;
11695 static const char *const start_fmt = "Offset %#x";
11696 const char *fmt = start_fmt;
11697 dw_ranges *r;
11698
11699 switch_to_section (debug_ranges_section);
11700 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11701 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11702 {
11703 int block_num = r->num;
11704
11705 if (block_num > 0)
11706 {
11707 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11708 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11709
11710 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11711 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11712
11713 /* If all code is in the text section, then the compilation
11714 unit base address defaults to DW_AT_low_pc, which is the
11715 base of the text section. */
11716 if (!have_multiple_function_sections)
11717 {
11718 dw2_asm_output_delta (DWARF2_ADDR_SIZE, blabel,
11719 text_section_label,
11720 fmt, i * 2 * DWARF2_ADDR_SIZE);
11721 dw2_asm_output_delta (DWARF2_ADDR_SIZE, elabel,
11722 text_section_label, NULL);
11723 }
11724
11725 /* Otherwise, the compilation unit base address is zero,
11726 which allows us to use absolute addresses, and not worry
11727 about whether the target supports cross-section
11728 arithmetic. */
11729 else
11730 {
11731 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11732 fmt, i * 2 * DWARF2_ADDR_SIZE);
11733 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel, NULL);
11734 }
11735
11736 fmt = NULL;
11737 }
11738
11739 /* Negative block_num stands for an index into ranges_by_label. */
11740 else if (block_num < 0)
11741 {
11742 int lab_idx = - block_num - 1;
11743
11744 if (!have_multiple_function_sections)
11745 {
11746 gcc_unreachable ();
11747 #if 0
11748 /* If we ever use add_ranges_by_labels () for a single
11749 function section, all we have to do is to take out
11750 the #if 0 above. */
11751 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11752 (*ranges_by_label)[lab_idx].begin,
11753 text_section_label,
11754 fmt, i * 2 * DWARF2_ADDR_SIZE);
11755 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11756 (*ranges_by_label)[lab_idx].end,
11757 text_section_label, NULL);
11758 #endif
11759 }
11760 else
11761 {
11762 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11763 (*ranges_by_label)[lab_idx].begin,
11764 fmt, i * 2 * DWARF2_ADDR_SIZE);
11765 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11766 (*ranges_by_label)[lab_idx].end,
11767 NULL);
11768 }
11769 }
11770 else
11771 {
11772 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11773 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11774 fmt = start_fmt;
11775 }
11776 }
11777 }
11778
11779 /* Non-zero if .debug_line_str should be used for .debug_line section
11780 strings or strings that are likely shareable with those. */
11781 #define DWARF5_USE_DEBUG_LINE_STR \
11782 (!DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET \
11783 && (DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) != 0 \
11784 /* FIXME: there is no .debug_line_str.dwo section, \
11785 for -gsplit-dwarf we should use DW_FORM_strx instead. */ \
11786 && !dwarf_split_debug_info)
11787
11788 /* Assign .debug_rnglists indexes. */
11789
11790 static void
11791 index_rnglists (void)
11792 {
11793 unsigned i;
11794 dw_ranges *r;
11795
11796 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11797 if (r->label)
11798 r->idx = rnglist_idx++;
11799 }
11800
11801 /* Emit .debug_rnglists section. */
11802
11803 static void
11804 output_rnglists (unsigned generation)
11805 {
11806 unsigned i;
11807 dw_ranges *r;
11808 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
11809 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
11810 char basebuf[MAX_ARTIFICIAL_LABEL_BYTES];
11811
11812 switch_to_section (debug_ranges_section);
11813 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11814 /* There are up to 4 unique ranges labels per generation.
11815 See also init_sections_and_labels. */
11816 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_RANGES_SECTION_LABEL,
11817 2 + generation * 4);
11818 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_RANGES_SECTION_LABEL,
11819 3 + generation * 4);
11820 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11821 dw2_asm_output_data (4, 0xffffffff,
11822 "Initial length escape value indicating "
11823 "64-bit DWARF extension");
11824 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
11825 "Length of Range Lists");
11826 ASM_OUTPUT_LABEL (asm_out_file, l1);
11827 output_dwarf_version ();
11828 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
11829 dw2_asm_output_data (1, 0, "Segment Size");
11830 /* Emit the offset table only for -gsplit-dwarf. If we don't care
11831 about relocation sizes and primarily care about the size of .debug*
11832 sections in linked shared libraries and executables, then
11833 the offset table plus corresponding DW_FORM_rnglistx uleb128 indexes
11834 into it are usually larger than just DW_FORM_sec_offset offsets
11835 into the .debug_rnglists section. */
11836 dw2_asm_output_data (4, dwarf_split_debug_info ? rnglist_idx : 0,
11837 "Offset Entry Count");
11838 if (dwarf_split_debug_info)
11839 {
11840 ASM_OUTPUT_LABEL (asm_out_file, ranges_base_label);
11841 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11842 if (r->label)
11843 dw2_asm_output_delta (DWARF_OFFSET_SIZE, r->label,
11844 ranges_base_label, NULL);
11845 }
11846
11847 const char *lab = "";
11848 unsigned int len = vec_safe_length (ranges_table);
11849 const char *base = NULL;
11850 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11851 {
11852 int block_num = r->num;
11853
11854 if (r->label)
11855 {
11856 ASM_OUTPUT_LABEL (asm_out_file, r->label);
11857 lab = r->label;
11858 }
11859 if (HAVE_AS_LEB128 && (r->label || r->maybe_new_sec))
11860 base = NULL;
11861 if (block_num > 0)
11862 {
11863 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11864 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11865
11866 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11867 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11868
11869 if (HAVE_AS_LEB128)
11870 {
11871 /* If all code is in the text section, then the compilation
11872 unit base address defaults to DW_AT_low_pc, which is the
11873 base of the text section. */
11874 if (!have_multiple_function_sections)
11875 {
11876 dw2_asm_output_data (1, DW_RLE_offset_pair,
11877 "DW_RLE_offset_pair (%s)", lab);
11878 dw2_asm_output_delta_uleb128 (blabel, text_section_label,
11879 "Range begin address (%s)", lab);
11880 dw2_asm_output_delta_uleb128 (elabel, text_section_label,
11881 "Range end address (%s)", lab);
11882 continue;
11883 }
11884 if (base == NULL)
11885 {
11886 dw_ranges *r2 = NULL;
11887 if (i < len - 1)
11888 r2 = &(*ranges_table)[i + 1];
11889 if (r2
11890 && r2->num != 0
11891 && r2->label == NULL
11892 && !r2->maybe_new_sec)
11893 {
11894 dw2_asm_output_data (1, DW_RLE_base_address,
11895 "DW_RLE_base_address (%s)", lab);
11896 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11897 "Base address (%s)", lab);
11898 strcpy (basebuf, blabel);
11899 base = basebuf;
11900 }
11901 }
11902 if (base)
11903 {
11904 dw2_asm_output_data (1, DW_RLE_offset_pair,
11905 "DW_RLE_offset_pair (%s)", lab);
11906 dw2_asm_output_delta_uleb128 (blabel, base,
11907 "Range begin address (%s)", lab);
11908 dw2_asm_output_delta_uleb128 (elabel, base,
11909 "Range end address (%s)", lab);
11910 continue;
11911 }
11912 dw2_asm_output_data (1, DW_RLE_start_length,
11913 "DW_RLE_start_length (%s)", lab);
11914 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11915 "Range begin address (%s)", lab);
11916 dw2_asm_output_delta_uleb128 (elabel, blabel,
11917 "Range length (%s)", lab);
11918 }
11919 else
11920 {
11921 dw2_asm_output_data (1, DW_RLE_start_end,
11922 "DW_RLE_start_end (%s)", lab);
11923 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11924 "Range begin address (%s)", lab);
11925 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11926 "Range end address (%s)", lab);
11927 }
11928 }
11929
11930 /* Negative block_num stands for an index into ranges_by_label. */
11931 else if (block_num < 0)
11932 {
11933 int lab_idx = - block_num - 1;
11934 const char *blabel = (*ranges_by_label)[lab_idx].begin;
11935 const char *elabel = (*ranges_by_label)[lab_idx].end;
11936
11937 if (!have_multiple_function_sections)
11938 gcc_unreachable ();
11939 if (HAVE_AS_LEB128)
11940 {
11941 dw2_asm_output_data (1, DW_RLE_start_length,
11942 "DW_RLE_start_length (%s)", lab);
11943 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11944 "Range begin address (%s)", lab);
11945 dw2_asm_output_delta_uleb128 (elabel, blabel,
11946 "Range length (%s)", lab);
11947 }
11948 else
11949 {
11950 dw2_asm_output_data (1, DW_RLE_start_end,
11951 "DW_RLE_start_end (%s)", lab);
11952 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11953 "Range begin address (%s)", lab);
11954 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11955 "Range end address (%s)", lab);
11956 }
11957 }
11958 else
11959 dw2_asm_output_data (1, DW_RLE_end_of_list,
11960 "DW_RLE_end_of_list (%s)", lab);
11961 }
11962 ASM_OUTPUT_LABEL (asm_out_file, l2);
11963 }
11964
11965 /* Data structure containing information about input files. */
11966 struct file_info
11967 {
11968 const char *path; /* Complete file name. */
11969 const char *fname; /* File name part. */
11970 int length; /* Length of entire string. */
11971 struct dwarf_file_data * file_idx; /* Index in input file table. */
11972 int dir_idx; /* Index in directory table. */
11973 };
11974
11975 /* Data structure containing information about directories with source
11976 files. */
11977 struct dir_info
11978 {
11979 const char *path; /* Path including directory name. */
11980 int length; /* Path length. */
11981 int prefix; /* Index of directory entry which is a prefix. */
11982 int count; /* Number of files in this directory. */
11983 int dir_idx; /* Index of directory used as base. */
11984 };
11985
11986 /* Callback function for file_info comparison. We sort by looking at
11987 the directories in the path. */
11988
11989 static int
11990 file_info_cmp (const void *p1, const void *p2)
11991 {
11992 const struct file_info *const s1 = (const struct file_info *) p1;
11993 const struct file_info *const s2 = (const struct file_info *) p2;
11994 const unsigned char *cp1;
11995 const unsigned char *cp2;
11996
11997 /* Take care of file names without directories. We need to make sure that
11998 we return consistent values to qsort since some will get confused if
11999 we return the same value when identical operands are passed in opposite
12000 orders. So if neither has a directory, return 0 and otherwise return
12001 1 or -1 depending on which one has the directory. We want the one with
12002 the directory to sort after the one without, so all no directory files
12003 are at the start (normally only the compilation unit file). */
12004 if ((s1->path == s1->fname || s2->path == s2->fname))
12005 return (s2->path == s2->fname) - (s1->path == s1->fname);
12006
12007 cp1 = (const unsigned char *) s1->path;
12008 cp2 = (const unsigned char *) s2->path;
12009
12010 while (1)
12011 {
12012 ++cp1;
12013 ++cp2;
12014 /* Reached the end of the first path? If so, handle like above,
12015 but now we want longer directory prefixes before shorter ones. */
12016 if ((cp1 == (const unsigned char *) s1->fname)
12017 || (cp2 == (const unsigned char *) s2->fname))
12018 return ((cp1 == (const unsigned char *) s1->fname)
12019 - (cp2 == (const unsigned char *) s2->fname));
12020
12021 /* Character of current path component the same? */
12022 else if (*cp1 != *cp2)
12023 return *cp1 - *cp2;
12024 }
12025 }
12026
12027 struct file_name_acquire_data
12028 {
12029 struct file_info *files;
12030 int used_files;
12031 int max_files;
12032 };
12033
12034 /* Traversal function for the hash table. */
12035
12036 int
12037 file_name_acquire (dwarf_file_data **slot, file_name_acquire_data *fnad)
12038 {
12039 struct dwarf_file_data *d = *slot;
12040 struct file_info *fi;
12041 const char *f;
12042
12043 gcc_assert (fnad->max_files >= d->emitted_number);
12044
12045 if (! d->emitted_number)
12046 return 1;
12047
12048 gcc_assert (fnad->max_files != fnad->used_files);
12049
12050 fi = fnad->files + fnad->used_files++;
12051
12052 /* Skip all leading "./". */
12053 f = d->filename;
12054 while (f[0] == '.' && IS_DIR_SEPARATOR (f[1]))
12055 f += 2;
12056
12057 /* Create a new array entry. */
12058 fi->path = f;
12059 fi->length = strlen (f);
12060 fi->file_idx = d;
12061
12062 /* Search for the file name part. */
12063 f = strrchr (f, DIR_SEPARATOR);
12064 #if defined (DIR_SEPARATOR_2)
12065 {
12066 char *g = strrchr (fi->path, DIR_SEPARATOR_2);
12067
12068 if (g != NULL)
12069 {
12070 if (f == NULL || f < g)
12071 f = g;
12072 }
12073 }
12074 #endif
12075
12076 fi->fname = f == NULL ? fi->path : f + 1;
12077 return 1;
12078 }
12079
12080 /* Helper function for output_file_names. Emit a FORM encoded
12081 string STR, with assembly comment start ENTRY_KIND and
12082 index IDX */
12083
12084 static void
12085 output_line_string (enum dwarf_form form, const char *str,
12086 const char *entry_kind, unsigned int idx)
12087 {
12088 switch (form)
12089 {
12090 case DW_FORM_string:
12091 dw2_asm_output_nstring (str, -1, "%s: %#x", entry_kind, idx);
12092 break;
12093 case DW_FORM_line_strp:
12094 if (!debug_line_str_hash)
12095 debug_line_str_hash
12096 = hash_table<indirect_string_hasher>::create_ggc (10);
12097
12098 struct indirect_string_node *node;
12099 node = find_AT_string_in_table (str, debug_line_str_hash);
12100 set_indirect_string (node);
12101 node->form = form;
12102 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
12103 debug_line_str_section, "%s: %#x: \"%s\"",
12104 entry_kind, 0, node->str);
12105 break;
12106 default:
12107 gcc_unreachable ();
12108 }
12109 }
12110
12111 /* Output the directory table and the file name table. We try to minimize
12112 the total amount of memory needed. A heuristic is used to avoid large
12113 slowdowns with many input files. */
12114
12115 static void
12116 output_file_names (void)
12117 {
12118 struct file_name_acquire_data fnad;
12119 int numfiles;
12120 struct file_info *files;
12121 struct dir_info *dirs;
12122 int *saved;
12123 int *savehere;
12124 int *backmap;
12125 int ndirs;
12126 int idx_offset;
12127 int i;
12128
12129 if (!last_emitted_file)
12130 {
12131 if (dwarf_version >= 5)
12132 {
12133 dw2_asm_output_data (1, 0, "Directory entry format count");
12134 dw2_asm_output_data_uleb128 (0, "Directories count");
12135 dw2_asm_output_data (1, 0, "File name entry format count");
12136 dw2_asm_output_data_uleb128 (0, "File names count");
12137 }
12138 else
12139 {
12140 dw2_asm_output_data (1, 0, "End directory table");
12141 dw2_asm_output_data (1, 0, "End file name table");
12142 }
12143 return;
12144 }
12145
12146 numfiles = last_emitted_file->emitted_number;
12147
12148 /* Allocate the various arrays we need. */
12149 files = XALLOCAVEC (struct file_info, numfiles);
12150 dirs = XALLOCAVEC (struct dir_info, numfiles);
12151
12152 fnad.files = files;
12153 fnad.used_files = 0;
12154 fnad.max_files = numfiles;
12155 file_table->traverse<file_name_acquire_data *, file_name_acquire> (&fnad);
12156 gcc_assert (fnad.used_files == fnad.max_files);
12157
12158 qsort (files, numfiles, sizeof (files[0]), file_info_cmp);
12159
12160 /* Find all the different directories used. */
12161 dirs[0].path = files[0].path;
12162 dirs[0].length = files[0].fname - files[0].path;
12163 dirs[0].prefix = -1;
12164 dirs[0].count = 1;
12165 dirs[0].dir_idx = 0;
12166 files[0].dir_idx = 0;
12167 ndirs = 1;
12168
12169 for (i = 1; i < numfiles; i++)
12170 if (files[i].fname - files[i].path == dirs[ndirs - 1].length
12171 && memcmp (dirs[ndirs - 1].path, files[i].path,
12172 dirs[ndirs - 1].length) == 0)
12173 {
12174 /* Same directory as last entry. */
12175 files[i].dir_idx = ndirs - 1;
12176 ++dirs[ndirs - 1].count;
12177 }
12178 else
12179 {
12180 int j;
12181
12182 /* This is a new directory. */
12183 dirs[ndirs].path = files[i].path;
12184 dirs[ndirs].length = files[i].fname - files[i].path;
12185 dirs[ndirs].count = 1;
12186 dirs[ndirs].dir_idx = ndirs;
12187 files[i].dir_idx = ndirs;
12188
12189 /* Search for a prefix. */
12190 dirs[ndirs].prefix = -1;
12191 for (j = 0; j < ndirs; j++)
12192 if (dirs[j].length < dirs[ndirs].length
12193 && dirs[j].length > 1
12194 && (dirs[ndirs].prefix == -1
12195 || dirs[j].length > dirs[dirs[ndirs].prefix].length)
12196 && memcmp (dirs[j].path, dirs[ndirs].path, dirs[j].length) == 0)
12197 dirs[ndirs].prefix = j;
12198
12199 ++ndirs;
12200 }
12201
12202 /* Now to the actual work. We have to find a subset of the directories which
12203 allow expressing the file name using references to the directory table
12204 with the least amount of characters. We do not do an exhaustive search
12205 where we would have to check out every combination of every single
12206 possible prefix. Instead we use a heuristic which provides nearly optimal
12207 results in most cases and never is much off. */
12208 saved = XALLOCAVEC (int, ndirs);
12209 savehere = XALLOCAVEC (int, ndirs);
12210
12211 memset (saved, '\0', ndirs * sizeof (saved[0]));
12212 for (i = 0; i < ndirs; i++)
12213 {
12214 int j;
12215 int total;
12216
12217 /* We can always save some space for the current directory. But this
12218 does not mean it will be enough to justify adding the directory. */
12219 savehere[i] = dirs[i].length;
12220 total = (savehere[i] - saved[i]) * dirs[i].count;
12221
12222 for (j = i + 1; j < ndirs; j++)
12223 {
12224 savehere[j] = 0;
12225 if (saved[j] < dirs[i].length)
12226 {
12227 /* Determine whether the dirs[i] path is a prefix of the
12228 dirs[j] path. */
12229 int k;
12230
12231 k = dirs[j].prefix;
12232 while (k != -1 && k != (int) i)
12233 k = dirs[k].prefix;
12234
12235 if (k == (int) i)
12236 {
12237 /* Yes it is. We can possibly save some memory by
12238 writing the filenames in dirs[j] relative to
12239 dirs[i]. */
12240 savehere[j] = dirs[i].length;
12241 total += (savehere[j] - saved[j]) * dirs[j].count;
12242 }
12243 }
12244 }
12245
12246 /* Check whether we can save enough to justify adding the dirs[i]
12247 directory. */
12248 if (total > dirs[i].length + 1)
12249 {
12250 /* It's worthwhile adding. */
12251 for (j = i; j < ndirs; j++)
12252 if (savehere[j] > 0)
12253 {
12254 /* Remember how much we saved for this directory so far. */
12255 saved[j] = savehere[j];
12256
12257 /* Remember the prefix directory. */
12258 dirs[j].dir_idx = i;
12259 }
12260 }
12261 }
12262
12263 /* Emit the directory name table. */
12264 idx_offset = dirs[0].length > 0 ? 1 : 0;
12265 enum dwarf_form str_form = DW_FORM_string;
12266 enum dwarf_form idx_form = DW_FORM_udata;
12267 if (dwarf_version >= 5)
12268 {
12269 const char *comp_dir = comp_dir_string ();
12270 if (comp_dir == NULL)
12271 comp_dir = "";
12272 dw2_asm_output_data (1, 1, "Directory entry format count");
12273 if (DWARF5_USE_DEBUG_LINE_STR)
12274 str_form = DW_FORM_line_strp;
12275 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12276 dw2_asm_output_data_uleb128 (str_form, "%s",
12277 get_DW_FORM_name (str_form));
12278 dw2_asm_output_data_uleb128 (ndirs + idx_offset, "Directories count");
12279 if (str_form == DW_FORM_string)
12280 {
12281 dw2_asm_output_nstring (comp_dir, -1, "Directory Entry: %#x", 0);
12282 for (i = 1 - idx_offset; i < ndirs; i++)
12283 dw2_asm_output_nstring (dirs[i].path,
12284 dirs[i].length
12285 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12286 "Directory Entry: %#x", i + idx_offset);
12287 }
12288 else
12289 {
12290 output_line_string (str_form, comp_dir, "Directory Entry", 0);
12291 for (i = 1 - idx_offset; i < ndirs; i++)
12292 {
12293 const char *str
12294 = ggc_alloc_string (dirs[i].path,
12295 dirs[i].length
12296 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR);
12297 output_line_string (str_form, str, "Directory Entry",
12298 (unsigned) i + idx_offset);
12299 }
12300 }
12301 }
12302 else
12303 {
12304 for (i = 1 - idx_offset; i < ndirs; i++)
12305 dw2_asm_output_nstring (dirs[i].path,
12306 dirs[i].length
12307 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12308 "Directory Entry: %#x", i + idx_offset);
12309
12310 dw2_asm_output_data (1, 0, "End directory table");
12311 }
12312
12313 /* We have to emit them in the order of emitted_number since that's
12314 used in the debug info generation. To do this efficiently we
12315 generate a back-mapping of the indices first. */
12316 backmap = XALLOCAVEC (int, numfiles);
12317 for (i = 0; i < numfiles; i++)
12318 backmap[files[i].file_idx->emitted_number - 1] = i;
12319
12320 if (dwarf_version >= 5)
12321 {
12322 const char *filename0 = get_AT_string (comp_unit_die (), DW_AT_name);
12323 if (filename0 == NULL)
12324 filename0 = "";
12325 /* DW_LNCT_directory_index can use DW_FORM_udata, DW_FORM_data1 and
12326 DW_FORM_data2. Choose one based on the number of directories
12327 and how much space would they occupy in each encoding.
12328 If we have at most 256 directories, all indexes fit into
12329 a single byte, so DW_FORM_data1 is most compact (if there
12330 are at most 128 directories, DW_FORM_udata would be as
12331 compact as that, but not shorter and slower to decode). */
12332 if (ndirs + idx_offset <= 256)
12333 idx_form = DW_FORM_data1;
12334 /* If there are more than 65536 directories, we have to use
12335 DW_FORM_udata, DW_FORM_data2 can't refer to them.
12336 Otherwise, compute what space would occupy if all the indexes
12337 used DW_FORM_udata - sum - and compare that to how large would
12338 be DW_FORM_data2 encoding, and pick the more efficient one. */
12339 else if (ndirs + idx_offset <= 65536)
12340 {
12341 unsigned HOST_WIDE_INT sum = 1;
12342 for (i = 0; i < numfiles; i++)
12343 {
12344 int file_idx = backmap[i];
12345 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12346 sum += size_of_uleb128 (dir_idx);
12347 }
12348 if (sum >= HOST_WIDE_INT_UC (2) * (numfiles + 1))
12349 idx_form = DW_FORM_data2;
12350 }
12351 #ifdef VMS_DEBUGGING_INFO
12352 dw2_asm_output_data (1, 4, "File name entry format count");
12353 #else
12354 dw2_asm_output_data (1, 2, "File name entry format count");
12355 #endif
12356 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12357 dw2_asm_output_data_uleb128 (str_form, "%s",
12358 get_DW_FORM_name (str_form));
12359 dw2_asm_output_data_uleb128 (DW_LNCT_directory_index,
12360 "DW_LNCT_directory_index");
12361 dw2_asm_output_data_uleb128 (idx_form, "%s",
12362 get_DW_FORM_name (idx_form));
12363 #ifdef VMS_DEBUGGING_INFO
12364 dw2_asm_output_data_uleb128 (DW_LNCT_timestamp, "DW_LNCT_timestamp");
12365 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12366 dw2_asm_output_data_uleb128 (DW_LNCT_size, "DW_LNCT_size");
12367 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12368 #endif
12369 dw2_asm_output_data_uleb128 (numfiles + 1, "File names count");
12370
12371 output_line_string (str_form, filename0, "File Entry", 0);
12372
12373 /* Include directory index. */
12374 if (idx_form != DW_FORM_udata)
12375 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12376 0, NULL);
12377 else
12378 dw2_asm_output_data_uleb128 (0, NULL);
12379
12380 #ifdef VMS_DEBUGGING_INFO
12381 dw2_asm_output_data_uleb128 (0, NULL);
12382 dw2_asm_output_data_uleb128 (0, NULL);
12383 #endif
12384 }
12385
12386 /* Now write all the file names. */
12387 for (i = 0; i < numfiles; i++)
12388 {
12389 int file_idx = backmap[i];
12390 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12391
12392 #ifdef VMS_DEBUGGING_INFO
12393 #define MAX_VMS_VERSION_LEN 6 /* ";32768" */
12394
12395 /* Setting these fields can lead to debugger miscomparisons,
12396 but VMS Debug requires them to be set correctly. */
12397
12398 int ver;
12399 long long cdt;
12400 long siz;
12401 int maxfilelen = (strlen (files[file_idx].path)
12402 + dirs[dir_idx].length
12403 + MAX_VMS_VERSION_LEN + 1);
12404 char *filebuf = XALLOCAVEC (char, maxfilelen);
12405
12406 vms_file_stats_name (files[file_idx].path, 0, 0, 0, &ver);
12407 snprintf (filebuf, maxfilelen, "%s;%d",
12408 files[file_idx].path + dirs[dir_idx].length, ver);
12409
12410 output_line_string (str_form, filebuf, "File Entry", (unsigned) i + 1);
12411
12412 /* Include directory index. */
12413 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12414 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12415 dir_idx + idx_offset, NULL);
12416 else
12417 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12418
12419 /* Modification time. */
12420 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12421 &cdt, 0, 0, 0) == 0)
12422 ? cdt : 0, NULL);
12423
12424 /* File length in bytes. */
12425 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12426 0, &siz, 0, 0) == 0)
12427 ? siz : 0, NULL);
12428 #else
12429 output_line_string (str_form,
12430 files[file_idx].path + dirs[dir_idx].length,
12431 "File Entry", (unsigned) i + 1);
12432
12433 /* Include directory index. */
12434 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12435 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12436 dir_idx + idx_offset, NULL);
12437 else
12438 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12439
12440 if (dwarf_version >= 5)
12441 continue;
12442
12443 /* Modification time. */
12444 dw2_asm_output_data_uleb128 (0, NULL);
12445
12446 /* File length in bytes. */
12447 dw2_asm_output_data_uleb128 (0, NULL);
12448 #endif /* VMS_DEBUGGING_INFO */
12449 }
12450
12451 if (dwarf_version < 5)
12452 dw2_asm_output_data (1, 0, "End file name table");
12453 }
12454
12455
12456 /* Output one line number table into the .debug_line section. */
12457
12458 static void
12459 output_one_line_info_table (dw_line_info_table *table)
12460 {
12461 char line_label[MAX_ARTIFICIAL_LABEL_BYTES];
12462 unsigned int current_line = 1;
12463 bool current_is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
12464 dw_line_info_entry *ent, *prev_addr;
12465 size_t i;
12466 unsigned int view;
12467
12468 view = 0;
12469
12470 FOR_EACH_VEC_SAFE_ELT (table->entries, i, ent)
12471 {
12472 switch (ent->opcode)
12473 {
12474 case LI_set_address:
12475 /* ??? Unfortunately, we have little choice here currently, and
12476 must always use the most general form. GCC does not know the
12477 address delta itself, so we can't use DW_LNS_advance_pc. Many
12478 ports do have length attributes which will give an upper bound
12479 on the address range. We could perhaps use length attributes
12480 to determine when it is safe to use DW_LNS_fixed_advance_pc. */
12481 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12482
12483 view = 0;
12484
12485 /* This can handle any delta. This takes
12486 4+DWARF2_ADDR_SIZE bytes. */
12487 dw2_asm_output_data (1, 0, "set address %s%s", line_label,
12488 debug_variable_location_views
12489 ? ", reset view to 0" : "");
12490 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12491 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12492 dw2_asm_output_addr (DWARF2_ADDR_SIZE, line_label, NULL);
12493
12494 prev_addr = ent;
12495 break;
12496
12497 case LI_adv_address:
12498 {
12499 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12500 char prev_label[MAX_ARTIFICIAL_LABEL_BYTES];
12501 ASM_GENERATE_INTERNAL_LABEL (prev_label, LINE_CODE_LABEL, prev_addr->val);
12502
12503 view++;
12504
12505 dw2_asm_output_data (1, DW_LNS_fixed_advance_pc, "fixed advance PC, increment view to %i", view);
12506 dw2_asm_output_delta (2, line_label, prev_label,
12507 "from %s to %s", prev_label, line_label);
12508
12509 prev_addr = ent;
12510 break;
12511 }
12512
12513 case LI_set_line:
12514 if (ent->val == current_line)
12515 {
12516 /* We still need to start a new row, so output a copy insn. */
12517 dw2_asm_output_data (1, DW_LNS_copy,
12518 "copy line %u", current_line);
12519 }
12520 else
12521 {
12522 int line_offset = ent->val - current_line;
12523 int line_delta = line_offset - DWARF_LINE_BASE;
12524
12525 current_line = ent->val;
12526 if (line_delta >= 0 && line_delta < (DWARF_LINE_RANGE - 1))
12527 {
12528 /* This can handle deltas from -10 to 234, using the current
12529 definitions of DWARF_LINE_BASE and DWARF_LINE_RANGE.
12530 This takes 1 byte. */
12531 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE + line_delta,
12532 "line %u", current_line);
12533 }
12534 else
12535 {
12536 /* This can handle any delta. This takes at least 4 bytes,
12537 depending on the value being encoded. */
12538 dw2_asm_output_data (1, DW_LNS_advance_line,
12539 "advance to line %u", current_line);
12540 dw2_asm_output_data_sleb128 (line_offset, NULL);
12541 dw2_asm_output_data (1, DW_LNS_copy, NULL);
12542 }
12543 }
12544 break;
12545
12546 case LI_set_file:
12547 dw2_asm_output_data (1, DW_LNS_set_file, "set file %u", ent->val);
12548 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12549 break;
12550
12551 case LI_set_column:
12552 dw2_asm_output_data (1, DW_LNS_set_column, "column %u", ent->val);
12553 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12554 break;
12555
12556 case LI_negate_stmt:
12557 current_is_stmt = !current_is_stmt;
12558 dw2_asm_output_data (1, DW_LNS_negate_stmt,
12559 "is_stmt %d", current_is_stmt);
12560 break;
12561
12562 case LI_set_prologue_end:
12563 dw2_asm_output_data (1, DW_LNS_set_prologue_end,
12564 "set prologue end");
12565 break;
12566
12567 case LI_set_epilogue_begin:
12568 dw2_asm_output_data (1, DW_LNS_set_epilogue_begin,
12569 "set epilogue begin");
12570 break;
12571
12572 case LI_set_discriminator:
12573 dw2_asm_output_data (1, 0, "discriminator %u", ent->val);
12574 dw2_asm_output_data_uleb128 (1 + size_of_uleb128 (ent->val), NULL);
12575 dw2_asm_output_data (1, DW_LNE_set_discriminator, NULL);
12576 dw2_asm_output_data_uleb128 (ent->val, NULL);
12577 break;
12578 }
12579 }
12580
12581 /* Emit debug info for the address of the end of the table. */
12582 dw2_asm_output_data (1, 0, "set address %s", table->end_label);
12583 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12584 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12585 dw2_asm_output_addr (DWARF2_ADDR_SIZE, table->end_label, NULL);
12586
12587 dw2_asm_output_data (1, 0, "end sequence");
12588 dw2_asm_output_data_uleb128 (1, NULL);
12589 dw2_asm_output_data (1, DW_LNE_end_sequence, NULL);
12590 }
12591
12592 /* Output the source line number correspondence information. This
12593 information goes into the .debug_line section. */
12594
12595 static void
12596 output_line_info (bool prologue_only)
12597 {
12598 static unsigned int generation;
12599 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
12600 char p1[MAX_ARTIFICIAL_LABEL_BYTES], p2[MAX_ARTIFICIAL_LABEL_BYTES];
12601 bool saw_one = false;
12602 int opc;
12603
12604 ASM_GENERATE_INTERNAL_LABEL (l1, LINE_NUMBER_BEGIN_LABEL, generation);
12605 ASM_GENERATE_INTERNAL_LABEL (l2, LINE_NUMBER_END_LABEL, generation);
12606 ASM_GENERATE_INTERNAL_LABEL (p1, LN_PROLOG_AS_LABEL, generation);
12607 ASM_GENERATE_INTERNAL_LABEL (p2, LN_PROLOG_END_LABEL, generation++);
12608
12609 if (!XCOFF_DEBUGGING_INFO)
12610 {
12611 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
12612 dw2_asm_output_data (4, 0xffffffff,
12613 "Initial length escape value indicating 64-bit DWARF extension");
12614 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
12615 "Length of Source Line Info");
12616 }
12617
12618 ASM_OUTPUT_LABEL (asm_out_file, l1);
12619
12620 output_dwarf_version ();
12621 if (dwarf_version >= 5)
12622 {
12623 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
12624 dw2_asm_output_data (1, 0, "Segment Size");
12625 }
12626 dw2_asm_output_delta (DWARF_OFFSET_SIZE, p2, p1, "Prolog Length");
12627 ASM_OUTPUT_LABEL (asm_out_file, p1);
12628
12629 /* Define the architecture-dependent minimum instruction length (in bytes).
12630 In this implementation of DWARF, this field is used for information
12631 purposes only. Since GCC generates assembly language, we have no
12632 a priori knowledge of how many instruction bytes are generated for each
12633 source line, and therefore can use only the DW_LNE_set_address and
12634 DW_LNS_fixed_advance_pc line information commands. Accordingly, we fix
12635 this as '1', which is "correct enough" for all architectures,
12636 and don't let the target override. */
12637 dw2_asm_output_data (1, 1, "Minimum Instruction Length");
12638
12639 if (dwarf_version >= 4)
12640 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN,
12641 "Maximum Operations Per Instruction");
12642 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_IS_STMT_START,
12643 "Default is_stmt_start flag");
12644 dw2_asm_output_data (1, DWARF_LINE_BASE,
12645 "Line Base Value (Special Opcodes)");
12646 dw2_asm_output_data (1, DWARF_LINE_RANGE,
12647 "Line Range Value (Special Opcodes)");
12648 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE,
12649 "Special Opcode Base");
12650
12651 for (opc = 1; opc < DWARF_LINE_OPCODE_BASE; opc++)
12652 {
12653 int n_op_args;
12654 switch (opc)
12655 {
12656 case DW_LNS_advance_pc:
12657 case DW_LNS_advance_line:
12658 case DW_LNS_set_file:
12659 case DW_LNS_set_column:
12660 case DW_LNS_fixed_advance_pc:
12661 case DW_LNS_set_isa:
12662 n_op_args = 1;
12663 break;
12664 default:
12665 n_op_args = 0;
12666 break;
12667 }
12668
12669 dw2_asm_output_data (1, n_op_args, "opcode: %#x has %d args",
12670 opc, n_op_args);
12671 }
12672
12673 /* Write out the information about the files we use. */
12674 output_file_names ();
12675 ASM_OUTPUT_LABEL (asm_out_file, p2);
12676 if (prologue_only)
12677 {
12678 /* Output the marker for the end of the line number info. */
12679 ASM_OUTPUT_LABEL (asm_out_file, l2);
12680 return;
12681 }
12682
12683 if (separate_line_info)
12684 {
12685 dw_line_info_table *table;
12686 size_t i;
12687
12688 FOR_EACH_VEC_ELT (*separate_line_info, i, table)
12689 if (table->in_use)
12690 {
12691 output_one_line_info_table (table);
12692 saw_one = true;
12693 }
12694 }
12695 if (cold_text_section_line_info && cold_text_section_line_info->in_use)
12696 {
12697 output_one_line_info_table (cold_text_section_line_info);
12698 saw_one = true;
12699 }
12700
12701 /* ??? Some Darwin linkers crash on a .debug_line section with no
12702 sequences. Further, merely a DW_LNE_end_sequence entry is not
12703 sufficient -- the address column must also be initialized.
12704 Make sure to output at least one set_address/end_sequence pair,
12705 choosing .text since that section is always present. */
12706 if (text_section_line_info->in_use || !saw_one)
12707 output_one_line_info_table (text_section_line_info);
12708
12709 /* Output the marker for the end of the line number info. */
12710 ASM_OUTPUT_LABEL (asm_out_file, l2);
12711 }
12712 \f
12713 /* Return true if DW_AT_endianity should be emitted according to REVERSE. */
12714
12715 static inline bool
12716 need_endianity_attribute_p (bool reverse)
12717 {
12718 return reverse && (dwarf_version >= 3 || !dwarf_strict);
12719 }
12720
12721 /* Given a pointer to a tree node for some base type, return a pointer to
12722 a DIE that describes the given type. REVERSE is true if the type is
12723 to be interpreted in the reverse storage order wrt the target order.
12724
12725 This routine must only be called for GCC type nodes that correspond to
12726 Dwarf base (fundamental) types. */
12727
12728 static dw_die_ref
12729 base_type_die (tree type, bool reverse)
12730 {
12731 dw_die_ref base_type_result;
12732 enum dwarf_type encoding;
12733 bool fpt_used = false;
12734 struct fixed_point_type_info fpt_info;
12735 tree type_bias = NULL_TREE;
12736
12737 /* If this is a subtype that should not be emitted as a subrange type,
12738 use the base type. See subrange_type_for_debug_p. */
12739 if (TREE_CODE (type) == INTEGER_TYPE && TREE_TYPE (type) != NULL_TREE)
12740 type = TREE_TYPE (type);
12741
12742 switch (TREE_CODE (type))
12743 {
12744 case INTEGER_TYPE:
12745 if ((dwarf_version >= 4 || !dwarf_strict)
12746 && TYPE_NAME (type)
12747 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
12748 && DECL_IS_BUILTIN (TYPE_NAME (type))
12749 && DECL_NAME (TYPE_NAME (type)))
12750 {
12751 const char *name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
12752 if (strcmp (name, "char16_t") == 0
12753 || strcmp (name, "char32_t") == 0)
12754 {
12755 encoding = DW_ATE_UTF;
12756 break;
12757 }
12758 }
12759 if ((dwarf_version >= 3 || !dwarf_strict)
12760 && lang_hooks.types.get_fixed_point_type_info)
12761 {
12762 memset (&fpt_info, 0, sizeof (fpt_info));
12763 if (lang_hooks.types.get_fixed_point_type_info (type, &fpt_info))
12764 {
12765 fpt_used = true;
12766 encoding = ((TYPE_UNSIGNED (type))
12767 ? DW_ATE_unsigned_fixed
12768 : DW_ATE_signed_fixed);
12769 break;
12770 }
12771 }
12772 if (TYPE_STRING_FLAG (type))
12773 {
12774 if (TYPE_UNSIGNED (type))
12775 encoding = DW_ATE_unsigned_char;
12776 else
12777 encoding = DW_ATE_signed_char;
12778 }
12779 else if (TYPE_UNSIGNED (type))
12780 encoding = DW_ATE_unsigned;
12781 else
12782 encoding = DW_ATE_signed;
12783
12784 if (!dwarf_strict
12785 && lang_hooks.types.get_type_bias)
12786 type_bias = lang_hooks.types.get_type_bias (type);
12787 break;
12788
12789 case REAL_TYPE:
12790 if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type)))
12791 {
12792 if (dwarf_version >= 3 || !dwarf_strict)
12793 encoding = DW_ATE_decimal_float;
12794 else
12795 encoding = DW_ATE_lo_user;
12796 }
12797 else
12798 encoding = DW_ATE_float;
12799 break;
12800
12801 case FIXED_POINT_TYPE:
12802 if (!(dwarf_version >= 3 || !dwarf_strict))
12803 encoding = DW_ATE_lo_user;
12804 else if (TYPE_UNSIGNED (type))
12805 encoding = DW_ATE_unsigned_fixed;
12806 else
12807 encoding = DW_ATE_signed_fixed;
12808 break;
12809
12810 /* Dwarf2 doesn't know anything about complex ints, so use
12811 a user defined type for it. */
12812 case COMPLEX_TYPE:
12813 if (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE)
12814 encoding = DW_ATE_complex_float;
12815 else
12816 encoding = DW_ATE_lo_user;
12817 break;
12818
12819 case BOOLEAN_TYPE:
12820 /* GNU FORTRAN/Ada/C++ BOOLEAN type. */
12821 encoding = DW_ATE_boolean;
12822 break;
12823
12824 default:
12825 /* No other TREE_CODEs are Dwarf fundamental types. */
12826 gcc_unreachable ();
12827 }
12828
12829 base_type_result = new_die_raw (DW_TAG_base_type);
12830
12831 add_AT_unsigned (base_type_result, DW_AT_byte_size,
12832 int_size_in_bytes (type));
12833 add_AT_unsigned (base_type_result, DW_AT_encoding, encoding);
12834
12835 if (need_endianity_attribute_p (reverse))
12836 add_AT_unsigned (base_type_result, DW_AT_endianity,
12837 BYTES_BIG_ENDIAN ? DW_END_little : DW_END_big);
12838
12839 add_alignment_attribute (base_type_result, type);
12840
12841 if (fpt_used)
12842 {
12843 switch (fpt_info.scale_factor_kind)
12844 {
12845 case fixed_point_scale_factor_binary:
12846 add_AT_int (base_type_result, DW_AT_binary_scale,
12847 fpt_info.scale_factor.binary);
12848 break;
12849
12850 case fixed_point_scale_factor_decimal:
12851 add_AT_int (base_type_result, DW_AT_decimal_scale,
12852 fpt_info.scale_factor.decimal);
12853 break;
12854
12855 case fixed_point_scale_factor_arbitrary:
12856 /* Arbitrary scale factors cannot be described in standard DWARF,
12857 yet. */
12858 if (!dwarf_strict)
12859 {
12860 /* Describe the scale factor as a rational constant. */
12861 const dw_die_ref scale_factor
12862 = new_die (DW_TAG_constant, comp_unit_die (), type);
12863
12864 add_AT_unsigned (scale_factor, DW_AT_GNU_numerator,
12865 fpt_info.scale_factor.arbitrary.numerator);
12866 add_AT_int (scale_factor, DW_AT_GNU_denominator,
12867 fpt_info.scale_factor.arbitrary.denominator);
12868
12869 add_AT_die_ref (base_type_result, DW_AT_small, scale_factor);
12870 }
12871 break;
12872
12873 default:
12874 gcc_unreachable ();
12875 }
12876 }
12877
12878 if (type_bias)
12879 add_scalar_info (base_type_result, DW_AT_GNU_bias, type_bias,
12880 dw_scalar_form_constant
12881 | dw_scalar_form_exprloc
12882 | dw_scalar_form_reference,
12883 NULL);
12884
12885 return base_type_result;
12886 }
12887
12888 /* A C++ function with deduced return type can have a TEMPLATE_TYPE_PARM
12889 named 'auto' in its type: return true for it, false otherwise. */
12890
12891 static inline bool
12892 is_cxx_auto (tree type)
12893 {
12894 if (is_cxx ())
12895 {
12896 tree name = TYPE_IDENTIFIER (type);
12897 if (name == get_identifier ("auto")
12898 || name == get_identifier ("decltype(auto)"))
12899 return true;
12900 }
12901 return false;
12902 }
12903
12904 /* Given a pointer to an arbitrary ..._TYPE tree node, return nonzero if the
12905 given input type is a Dwarf "fundamental" type. Otherwise return null. */
12906
12907 static inline int
12908 is_base_type (tree type)
12909 {
12910 switch (TREE_CODE (type))
12911 {
12912 case INTEGER_TYPE:
12913 case REAL_TYPE:
12914 case FIXED_POINT_TYPE:
12915 case COMPLEX_TYPE:
12916 case BOOLEAN_TYPE:
12917 case POINTER_BOUNDS_TYPE:
12918 return 1;
12919
12920 case VOID_TYPE:
12921 case ARRAY_TYPE:
12922 case RECORD_TYPE:
12923 case UNION_TYPE:
12924 case QUAL_UNION_TYPE:
12925 case ENUMERAL_TYPE:
12926 case FUNCTION_TYPE:
12927 case METHOD_TYPE:
12928 case POINTER_TYPE:
12929 case REFERENCE_TYPE:
12930 case NULLPTR_TYPE:
12931 case OFFSET_TYPE:
12932 case LANG_TYPE:
12933 case VECTOR_TYPE:
12934 return 0;
12935
12936 default:
12937 if (is_cxx_auto (type))
12938 return 0;
12939 gcc_unreachable ();
12940 }
12941
12942 return 0;
12943 }
12944
12945 /* Given a pointer to a tree node, assumed to be some kind of a ..._TYPE
12946 node, return the size in bits for the type if it is a constant, or else
12947 return the alignment for the type if the type's size is not constant, or
12948 else return BITS_PER_WORD if the type actually turns out to be an
12949 ERROR_MARK node. */
12950
12951 static inline unsigned HOST_WIDE_INT
12952 simple_type_size_in_bits (const_tree type)
12953 {
12954 if (TREE_CODE (type) == ERROR_MARK)
12955 return BITS_PER_WORD;
12956 else if (TYPE_SIZE (type) == NULL_TREE)
12957 return 0;
12958 else if (tree_fits_uhwi_p (TYPE_SIZE (type)))
12959 return tree_to_uhwi (TYPE_SIZE (type));
12960 else
12961 return TYPE_ALIGN (type);
12962 }
12963
12964 /* Similarly, but return an offset_int instead of UHWI. */
12965
12966 static inline offset_int
12967 offset_int_type_size_in_bits (const_tree type)
12968 {
12969 if (TREE_CODE (type) == ERROR_MARK)
12970 return BITS_PER_WORD;
12971 else if (TYPE_SIZE (type) == NULL_TREE)
12972 return 0;
12973 else if (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
12974 return wi::to_offset (TYPE_SIZE (type));
12975 else
12976 return TYPE_ALIGN (type);
12977 }
12978
12979 /* Given a pointer to a tree node for a subrange type, return a pointer
12980 to a DIE that describes the given type. */
12981
12982 static dw_die_ref
12983 subrange_type_die (tree type, tree low, tree high, tree bias,
12984 dw_die_ref context_die)
12985 {
12986 dw_die_ref subrange_die;
12987 const HOST_WIDE_INT size_in_bytes = int_size_in_bytes (type);
12988
12989 if (context_die == NULL)
12990 context_die = comp_unit_die ();
12991
12992 subrange_die = new_die (DW_TAG_subrange_type, context_die, type);
12993
12994 if (int_size_in_bytes (TREE_TYPE (type)) != size_in_bytes)
12995 {
12996 /* The size of the subrange type and its base type do not match,
12997 so we need to generate a size attribute for the subrange type. */
12998 add_AT_unsigned (subrange_die, DW_AT_byte_size, size_in_bytes);
12999 }
13000
13001 add_alignment_attribute (subrange_die, type);
13002
13003 if (low)
13004 add_bound_info (subrange_die, DW_AT_lower_bound, low, NULL);
13005 if (high)
13006 add_bound_info (subrange_die, DW_AT_upper_bound, high, NULL);
13007 if (bias && !dwarf_strict)
13008 add_scalar_info (subrange_die, DW_AT_GNU_bias, bias,
13009 dw_scalar_form_constant
13010 | dw_scalar_form_exprloc
13011 | dw_scalar_form_reference,
13012 NULL);
13013
13014 return subrange_die;
13015 }
13016
13017 /* Returns the (const and/or volatile) cv_qualifiers associated with
13018 the decl node. This will normally be augmented with the
13019 cv_qualifiers of the underlying type in add_type_attribute. */
13020
13021 static int
13022 decl_quals (const_tree decl)
13023 {
13024 return ((TREE_READONLY (decl)
13025 /* The C++ front-end correctly marks reference-typed
13026 variables as readonly, but from a language (and debug
13027 info) standpoint they are not const-qualified. */
13028 && TREE_CODE (TREE_TYPE (decl)) != REFERENCE_TYPE
13029 ? TYPE_QUAL_CONST : TYPE_UNQUALIFIED)
13030 | (TREE_THIS_VOLATILE (decl)
13031 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED));
13032 }
13033
13034 /* Determine the TYPE whose qualifiers match the largest strict subset
13035 of the given TYPE_QUALS, and return its qualifiers. Ignore all
13036 qualifiers outside QUAL_MASK. */
13037
13038 static int
13039 get_nearest_type_subqualifiers (tree type, int type_quals, int qual_mask)
13040 {
13041 tree t;
13042 int best_rank = 0, best_qual = 0, max_rank;
13043
13044 type_quals &= qual_mask;
13045 max_rank = popcount_hwi (type_quals) - 1;
13046
13047 for (t = TYPE_MAIN_VARIANT (type); t && best_rank < max_rank;
13048 t = TYPE_NEXT_VARIANT (t))
13049 {
13050 int q = TYPE_QUALS (t) & qual_mask;
13051
13052 if ((q & type_quals) == q && q != type_quals
13053 && check_base_type (t, type))
13054 {
13055 int rank = popcount_hwi (q);
13056
13057 if (rank > best_rank)
13058 {
13059 best_rank = rank;
13060 best_qual = q;
13061 }
13062 }
13063 }
13064
13065 return best_qual;
13066 }
13067
13068 struct dwarf_qual_info_t { int q; enum dwarf_tag t; };
13069 static const dwarf_qual_info_t dwarf_qual_info[] =
13070 {
13071 { TYPE_QUAL_CONST, DW_TAG_const_type },
13072 { TYPE_QUAL_VOLATILE, DW_TAG_volatile_type },
13073 { TYPE_QUAL_RESTRICT, DW_TAG_restrict_type },
13074 { TYPE_QUAL_ATOMIC, DW_TAG_atomic_type }
13075 };
13076 static const unsigned int dwarf_qual_info_size
13077 = sizeof (dwarf_qual_info) / sizeof (dwarf_qual_info[0]);
13078
13079 /* If DIE is a qualified DIE of some base DIE with the same parent,
13080 return the base DIE, otherwise return NULL. Set MASK to the
13081 qualifiers added compared to the returned DIE. */
13082
13083 static dw_die_ref
13084 qualified_die_p (dw_die_ref die, int *mask, unsigned int depth)
13085 {
13086 unsigned int i;
13087 for (i = 0; i < dwarf_qual_info_size; i++)
13088 if (die->die_tag == dwarf_qual_info[i].t)
13089 break;
13090 if (i == dwarf_qual_info_size)
13091 return NULL;
13092 if (vec_safe_length (die->die_attr) != 1)
13093 return NULL;
13094 dw_die_ref type = get_AT_ref (die, DW_AT_type);
13095 if (type == NULL || type->die_parent != die->die_parent)
13096 return NULL;
13097 *mask |= dwarf_qual_info[i].q;
13098 if (depth)
13099 {
13100 dw_die_ref ret = qualified_die_p (type, mask, depth - 1);
13101 if (ret)
13102 return ret;
13103 }
13104 return type;
13105 }
13106
13107 /* Given a pointer to an arbitrary ..._TYPE tree node, return a debugging
13108 entry that chains the modifiers specified by CV_QUALS in front of the
13109 given type. REVERSE is true if the type is to be interpreted in the
13110 reverse storage order wrt the target order. */
13111
13112 static dw_die_ref
13113 modified_type_die (tree type, int cv_quals, bool reverse,
13114 dw_die_ref context_die)
13115 {
13116 enum tree_code code = TREE_CODE (type);
13117 dw_die_ref mod_type_die;
13118 dw_die_ref sub_die = NULL;
13119 tree item_type = NULL;
13120 tree qualified_type;
13121 tree name, low, high;
13122 dw_die_ref mod_scope;
13123 /* Only these cv-qualifiers are currently handled. */
13124 const int cv_qual_mask = (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE
13125 | TYPE_QUAL_RESTRICT | TYPE_QUAL_ATOMIC |
13126 ENCODE_QUAL_ADDR_SPACE(~0U));
13127 const bool reverse_base_type
13128 = need_endianity_attribute_p (reverse) && is_base_type (type);
13129
13130 if (code == ERROR_MARK)
13131 return NULL;
13132
13133 if (lang_hooks.types.get_debug_type)
13134 {
13135 tree debug_type = lang_hooks.types.get_debug_type (type);
13136
13137 if (debug_type != NULL_TREE && debug_type != type)
13138 return modified_type_die (debug_type, cv_quals, reverse, context_die);
13139 }
13140
13141 cv_quals &= cv_qual_mask;
13142
13143 /* Don't emit DW_TAG_restrict_type for DWARFv2, since it is a type
13144 tag modifier (and not an attribute) old consumers won't be able
13145 to handle it. */
13146 if (dwarf_version < 3)
13147 cv_quals &= ~TYPE_QUAL_RESTRICT;
13148
13149 /* Likewise for DW_TAG_atomic_type for DWARFv5. */
13150 if (dwarf_version < 5)
13151 cv_quals &= ~TYPE_QUAL_ATOMIC;
13152
13153 /* See if we already have the appropriately qualified variant of
13154 this type. */
13155 qualified_type = get_qualified_type (type, cv_quals);
13156
13157 if (qualified_type == sizetype)
13158 {
13159 /* Try not to expose the internal sizetype type's name. */
13160 if (TYPE_NAME (qualified_type)
13161 && TREE_CODE (TYPE_NAME (qualified_type)) == TYPE_DECL)
13162 {
13163 tree t = TREE_TYPE (TYPE_NAME (qualified_type));
13164
13165 gcc_checking_assert (TREE_CODE (t) == INTEGER_TYPE
13166 && (TYPE_PRECISION (t)
13167 == TYPE_PRECISION (qualified_type))
13168 && (TYPE_UNSIGNED (t)
13169 == TYPE_UNSIGNED (qualified_type)));
13170 qualified_type = t;
13171 }
13172 else if (qualified_type == sizetype
13173 && TREE_CODE (sizetype) == TREE_CODE (size_type_node)
13174 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (size_type_node)
13175 && TYPE_UNSIGNED (sizetype) == TYPE_UNSIGNED (size_type_node))
13176 qualified_type = size_type_node;
13177 }
13178
13179 /* If we do, then we can just use its DIE, if it exists. */
13180 if (qualified_type)
13181 {
13182 mod_type_die = lookup_type_die (qualified_type);
13183
13184 /* DW_AT_endianity doesn't come from a qualifier on the type, so it is
13185 dealt with specially: the DIE with the attribute, if it exists, is
13186 placed immediately after the regular DIE for the same base type. */
13187 if (mod_type_die
13188 && (!reverse_base_type
13189 || ((mod_type_die = mod_type_die->die_sib) != NULL
13190 && get_AT_unsigned (mod_type_die, DW_AT_endianity))))
13191 return mod_type_die;
13192 }
13193
13194 name = qualified_type ? TYPE_NAME (qualified_type) : NULL;
13195
13196 /* Handle C typedef types. */
13197 if (name
13198 && TREE_CODE (name) == TYPE_DECL
13199 && DECL_ORIGINAL_TYPE (name)
13200 && !DECL_ARTIFICIAL (name))
13201 {
13202 tree dtype = TREE_TYPE (name);
13203
13204 /* Skip the typedef for base types with DW_AT_endianity, no big deal. */
13205 if (qualified_type == dtype && !reverse_base_type)
13206 {
13207 tree origin = decl_ultimate_origin (name);
13208
13209 /* Typedef variants that have an abstract origin don't get their own
13210 type DIE (see gen_typedef_die), so fall back on the ultimate
13211 abstract origin instead. */
13212 if (origin != NULL && origin != name)
13213 return modified_type_die (TREE_TYPE (origin), cv_quals, reverse,
13214 context_die);
13215
13216 /* For a named type, use the typedef. */
13217 gen_type_die (qualified_type, context_die);
13218 return lookup_type_die (qualified_type);
13219 }
13220 else
13221 {
13222 int dquals = TYPE_QUALS_NO_ADDR_SPACE (dtype);
13223 dquals &= cv_qual_mask;
13224 if ((dquals & ~cv_quals) != TYPE_UNQUALIFIED
13225 || (cv_quals == dquals && DECL_ORIGINAL_TYPE (name) != type))
13226 /* cv-unqualified version of named type. Just use
13227 the unnamed type to which it refers. */
13228 return modified_type_die (DECL_ORIGINAL_TYPE (name), cv_quals,
13229 reverse, context_die);
13230 /* Else cv-qualified version of named type; fall through. */
13231 }
13232 }
13233
13234 mod_scope = scope_die_for (type, context_die);
13235
13236 if (cv_quals)
13237 {
13238 int sub_quals = 0, first_quals = 0;
13239 unsigned i;
13240 dw_die_ref first = NULL, last = NULL;
13241
13242 /* Determine a lesser qualified type that most closely matches
13243 this one. Then generate DW_TAG_* entries for the remaining
13244 qualifiers. */
13245 sub_quals = get_nearest_type_subqualifiers (type, cv_quals,
13246 cv_qual_mask);
13247 if (sub_quals && use_debug_types)
13248 {
13249 bool needed = false;
13250 /* If emitting type units, make sure the order of qualifiers
13251 is canonical. Thus, start from unqualified type if
13252 an earlier qualifier is missing in sub_quals, but some later
13253 one is present there. */
13254 for (i = 0; i < dwarf_qual_info_size; i++)
13255 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13256 needed = true;
13257 else if (needed && (dwarf_qual_info[i].q & cv_quals))
13258 {
13259 sub_quals = 0;
13260 break;
13261 }
13262 }
13263 mod_type_die = modified_type_die (type, sub_quals, reverse, context_die);
13264 if (mod_scope && mod_type_die && mod_type_die->die_parent == mod_scope)
13265 {
13266 /* As not all intermediate qualified DIEs have corresponding
13267 tree types, ensure that qualified DIEs in the same scope
13268 as their DW_AT_type are emitted after their DW_AT_type,
13269 only with other qualified DIEs for the same type possibly
13270 in between them. Determine the range of such qualified
13271 DIEs now (first being the base type, last being corresponding
13272 last qualified DIE for it). */
13273 unsigned int count = 0;
13274 first = qualified_die_p (mod_type_die, &first_quals,
13275 dwarf_qual_info_size);
13276 if (first == NULL)
13277 first = mod_type_die;
13278 gcc_assert ((first_quals & ~sub_quals) == 0);
13279 for (count = 0, last = first;
13280 count < (1U << dwarf_qual_info_size);
13281 count++, last = last->die_sib)
13282 {
13283 int quals = 0;
13284 if (last == mod_scope->die_child)
13285 break;
13286 if (qualified_die_p (last->die_sib, &quals, dwarf_qual_info_size)
13287 != first)
13288 break;
13289 }
13290 }
13291
13292 for (i = 0; i < dwarf_qual_info_size; i++)
13293 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13294 {
13295 dw_die_ref d;
13296 if (first && first != last)
13297 {
13298 for (d = first->die_sib; ; d = d->die_sib)
13299 {
13300 int quals = 0;
13301 qualified_die_p (d, &quals, dwarf_qual_info_size);
13302 if (quals == (first_quals | dwarf_qual_info[i].q))
13303 break;
13304 if (d == last)
13305 {
13306 d = NULL;
13307 break;
13308 }
13309 }
13310 if (d)
13311 {
13312 mod_type_die = d;
13313 continue;
13314 }
13315 }
13316 if (first)
13317 {
13318 d = new_die_raw (dwarf_qual_info[i].t);
13319 add_child_die_after (mod_scope, d, last);
13320 last = d;
13321 }
13322 else
13323 d = new_die (dwarf_qual_info[i].t, mod_scope, type);
13324 if (mod_type_die)
13325 add_AT_die_ref (d, DW_AT_type, mod_type_die);
13326 mod_type_die = d;
13327 first_quals |= dwarf_qual_info[i].q;
13328 }
13329 }
13330 else if (code == POINTER_TYPE || code == REFERENCE_TYPE)
13331 {
13332 dwarf_tag tag = DW_TAG_pointer_type;
13333 if (code == REFERENCE_TYPE)
13334 {
13335 if (TYPE_REF_IS_RVALUE (type) && dwarf_version >= 4)
13336 tag = DW_TAG_rvalue_reference_type;
13337 else
13338 tag = DW_TAG_reference_type;
13339 }
13340 mod_type_die = new_die (tag, mod_scope, type);
13341
13342 add_AT_unsigned (mod_type_die, DW_AT_byte_size,
13343 simple_type_size_in_bits (type) / BITS_PER_UNIT);
13344 add_alignment_attribute (mod_type_die, type);
13345 item_type = TREE_TYPE (type);
13346
13347 addr_space_t as = TYPE_ADDR_SPACE (item_type);
13348 if (!ADDR_SPACE_GENERIC_P (as))
13349 {
13350 int action = targetm.addr_space.debug (as);
13351 if (action >= 0)
13352 {
13353 /* Positive values indicate an address_class. */
13354 add_AT_unsigned (mod_type_die, DW_AT_address_class, action);
13355 }
13356 else
13357 {
13358 /* Negative values indicate an (inverted) segment base reg. */
13359 dw_loc_descr_ref d
13360 = one_reg_loc_descriptor (~action, VAR_INIT_STATUS_INITIALIZED);
13361 add_AT_loc (mod_type_die, DW_AT_segment, d);
13362 }
13363 }
13364 }
13365 else if (code == INTEGER_TYPE
13366 && TREE_TYPE (type) != NULL_TREE
13367 && subrange_type_for_debug_p (type, &low, &high))
13368 {
13369 tree bias = NULL_TREE;
13370 if (lang_hooks.types.get_type_bias)
13371 bias = lang_hooks.types.get_type_bias (type);
13372 mod_type_die = subrange_type_die (type, low, high, bias, context_die);
13373 item_type = TREE_TYPE (type);
13374 }
13375 else if (is_base_type (type))
13376 {
13377 mod_type_die = base_type_die (type, reverse);
13378
13379 /* The DIE with DW_AT_endianity is placed right after the naked DIE. */
13380 if (reverse_base_type)
13381 {
13382 dw_die_ref after_die
13383 = modified_type_die (type, cv_quals, false, context_die);
13384 add_child_die_after (comp_unit_die (), mod_type_die, after_die);
13385 }
13386 else
13387 add_child_die (comp_unit_die (), mod_type_die);
13388
13389 add_pubtype (type, mod_type_die);
13390 }
13391 else
13392 {
13393 gen_type_die (type, context_die);
13394
13395 /* We have to get the type_main_variant here (and pass that to the
13396 `lookup_type_die' routine) because the ..._TYPE node we have
13397 might simply be a *copy* of some original type node (where the
13398 copy was created to help us keep track of typedef names) and
13399 that copy might have a different TYPE_UID from the original
13400 ..._TYPE node. */
13401 if (TREE_CODE (type) == FUNCTION_TYPE
13402 || TREE_CODE (type) == METHOD_TYPE)
13403 {
13404 /* For function/method types, can't just use type_main_variant here,
13405 because that can have different ref-qualifiers for C++,
13406 but try to canonicalize. */
13407 tree main = TYPE_MAIN_VARIANT (type);
13408 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
13409 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
13410 && check_base_type (t, main)
13411 && check_lang_type (t, type))
13412 return lookup_type_die (t);
13413 return lookup_type_die (type);
13414 }
13415 else if (TREE_CODE (type) != VECTOR_TYPE
13416 && TREE_CODE (type) != ARRAY_TYPE)
13417 return lookup_type_die (type_main_variant (type));
13418 else
13419 /* Vectors have the debugging information in the type,
13420 not the main variant. */
13421 return lookup_type_die (type);
13422 }
13423
13424 /* Builtin types don't have a DECL_ORIGINAL_TYPE. For those,
13425 don't output a DW_TAG_typedef, since there isn't one in the
13426 user's program; just attach a DW_AT_name to the type.
13427 Don't attach a DW_AT_name to DW_TAG_const_type or DW_TAG_volatile_type
13428 if the base type already has the same name. */
13429 if (name
13430 && ((TREE_CODE (name) != TYPE_DECL
13431 && (qualified_type == TYPE_MAIN_VARIANT (type)
13432 || (cv_quals == TYPE_UNQUALIFIED)))
13433 || (TREE_CODE (name) == TYPE_DECL
13434 && TREE_TYPE (name) == qualified_type
13435 && DECL_NAME (name))))
13436 {
13437 if (TREE_CODE (name) == TYPE_DECL)
13438 /* Could just call add_name_and_src_coords_attributes here,
13439 but since this is a builtin type it doesn't have any
13440 useful source coordinates anyway. */
13441 name = DECL_NAME (name);
13442 add_name_attribute (mod_type_die, IDENTIFIER_POINTER (name));
13443 }
13444 /* This probably indicates a bug. */
13445 else if (mod_type_die && mod_type_die->die_tag == DW_TAG_base_type)
13446 {
13447 name = TYPE_IDENTIFIER (type);
13448 add_name_attribute (mod_type_die,
13449 name ? IDENTIFIER_POINTER (name) : "__unknown__");
13450 }
13451
13452 if (qualified_type && !reverse_base_type)
13453 equate_type_number_to_die (qualified_type, mod_type_die);
13454
13455 if (item_type)
13456 /* We must do this after the equate_type_number_to_die call, in case
13457 this is a recursive type. This ensures that the modified_type_die
13458 recursion will terminate even if the type is recursive. Recursive
13459 types are possible in Ada. */
13460 sub_die = modified_type_die (item_type,
13461 TYPE_QUALS_NO_ADDR_SPACE (item_type),
13462 reverse,
13463 context_die);
13464
13465 if (sub_die != NULL)
13466 add_AT_die_ref (mod_type_die, DW_AT_type, sub_die);
13467
13468 add_gnat_descriptive_type_attribute (mod_type_die, type, context_die);
13469 if (TYPE_ARTIFICIAL (type))
13470 add_AT_flag (mod_type_die, DW_AT_artificial, 1);
13471
13472 return mod_type_die;
13473 }
13474
13475 /* Generate DIEs for the generic parameters of T.
13476 T must be either a generic type or a generic function.
13477 See http://gcc.gnu.org/wiki/TemplateParmsDwarf for more. */
13478
13479 static void
13480 gen_generic_params_dies (tree t)
13481 {
13482 tree parms, args;
13483 int parms_num, i;
13484 dw_die_ref die = NULL;
13485 int non_default;
13486
13487 if (!t || (TYPE_P (t) && !COMPLETE_TYPE_P (t)))
13488 return;
13489
13490 if (TYPE_P (t))
13491 die = lookup_type_die (t);
13492 else if (DECL_P (t))
13493 die = lookup_decl_die (t);
13494
13495 gcc_assert (die);
13496
13497 parms = lang_hooks.get_innermost_generic_parms (t);
13498 if (!parms)
13499 /* T has no generic parameter. It means T is neither a generic type
13500 or function. End of story. */
13501 return;
13502
13503 parms_num = TREE_VEC_LENGTH (parms);
13504 args = lang_hooks.get_innermost_generic_args (t);
13505 if (TREE_CHAIN (args) && TREE_CODE (TREE_CHAIN (args)) == INTEGER_CST)
13506 non_default = int_cst_value (TREE_CHAIN (args));
13507 else
13508 non_default = TREE_VEC_LENGTH (args);
13509 for (i = 0; i < parms_num; i++)
13510 {
13511 tree parm, arg, arg_pack_elems;
13512 dw_die_ref parm_die;
13513
13514 parm = TREE_VEC_ELT (parms, i);
13515 arg = TREE_VEC_ELT (args, i);
13516 arg_pack_elems = lang_hooks.types.get_argument_pack_elems (arg);
13517 gcc_assert (parm && TREE_VALUE (parm) && arg);
13518
13519 if (parm && TREE_VALUE (parm) && arg)
13520 {
13521 /* If PARM represents a template parameter pack,
13522 emit a DW_TAG_GNU_template_parameter_pack DIE, followed
13523 by DW_TAG_template_*_parameter DIEs for the argument
13524 pack elements of ARG. Note that ARG would then be
13525 an argument pack. */
13526 if (arg_pack_elems)
13527 parm_die = template_parameter_pack_die (TREE_VALUE (parm),
13528 arg_pack_elems,
13529 die);
13530 else
13531 parm_die = generic_parameter_die (TREE_VALUE (parm), arg,
13532 true /* emit name */, die);
13533 if (i >= non_default)
13534 add_AT_flag (parm_die, DW_AT_default_value, 1);
13535 }
13536 }
13537 }
13538
13539 /* Create and return a DIE for PARM which should be
13540 the representation of a generic type parameter.
13541 For instance, in the C++ front end, PARM would be a template parameter.
13542 ARG is the argument to PARM.
13543 EMIT_NAME_P if tree, the DIE will have DW_AT_name attribute set to the
13544 name of the PARM.
13545 PARENT_DIE is the parent DIE which the new created DIE should be added to,
13546 as a child node. */
13547
13548 static dw_die_ref
13549 generic_parameter_die (tree parm, tree arg,
13550 bool emit_name_p,
13551 dw_die_ref parent_die)
13552 {
13553 dw_die_ref tmpl_die = NULL;
13554 const char *name = NULL;
13555
13556 if (!parm || !DECL_NAME (parm) || !arg)
13557 return NULL;
13558
13559 /* We support non-type generic parameters and arguments,
13560 type generic parameters and arguments, as well as
13561 generic generic parameters (a.k.a. template template parameters in C++)
13562 and arguments. */
13563 if (TREE_CODE (parm) == PARM_DECL)
13564 /* PARM is a nontype generic parameter */
13565 tmpl_die = new_die (DW_TAG_template_value_param, parent_die, parm);
13566 else if (TREE_CODE (parm) == TYPE_DECL)
13567 /* PARM is a type generic parameter. */
13568 tmpl_die = new_die (DW_TAG_template_type_param, parent_die, parm);
13569 else if (lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13570 /* PARM is a generic generic parameter.
13571 Its DIE is a GNU extension. It shall have a
13572 DW_AT_name attribute to represent the name of the template template
13573 parameter, and a DW_AT_GNU_template_name attribute to represent the
13574 name of the template template argument. */
13575 tmpl_die = new_die (DW_TAG_GNU_template_template_param,
13576 parent_die, parm);
13577 else
13578 gcc_unreachable ();
13579
13580 if (tmpl_die)
13581 {
13582 tree tmpl_type;
13583
13584 /* If PARM is a generic parameter pack, it means we are
13585 emitting debug info for a template argument pack element.
13586 In other terms, ARG is a template argument pack element.
13587 In that case, we don't emit any DW_AT_name attribute for
13588 the die. */
13589 if (emit_name_p)
13590 {
13591 name = IDENTIFIER_POINTER (DECL_NAME (parm));
13592 gcc_assert (name);
13593 add_AT_string (tmpl_die, DW_AT_name, name);
13594 }
13595
13596 if (!lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13597 {
13598 /* DWARF3, 5.6.8 says if PARM is a non-type generic parameter
13599 TMPL_DIE should have a child DW_AT_type attribute that is set
13600 to the type of the argument to PARM, which is ARG.
13601 If PARM is a type generic parameter, TMPL_DIE should have a
13602 child DW_AT_type that is set to ARG. */
13603 tmpl_type = TYPE_P (arg) ? arg : TREE_TYPE (arg);
13604 add_type_attribute (tmpl_die, tmpl_type,
13605 (TREE_THIS_VOLATILE (tmpl_type)
13606 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED),
13607 false, parent_die);
13608 }
13609 else
13610 {
13611 /* So TMPL_DIE is a DIE representing a
13612 a generic generic template parameter, a.k.a template template
13613 parameter in C++ and arg is a template. */
13614
13615 /* The DW_AT_GNU_template_name attribute of the DIE must be set
13616 to the name of the argument. */
13617 name = dwarf2_name (TYPE_P (arg) ? TYPE_NAME (arg) : arg, 1);
13618 if (name)
13619 add_AT_string (tmpl_die, DW_AT_GNU_template_name, name);
13620 }
13621
13622 if (TREE_CODE (parm) == PARM_DECL)
13623 /* So PARM is a non-type generic parameter.
13624 DWARF3 5.6.8 says we must set a DW_AT_const_value child
13625 attribute of TMPL_DIE which value represents the value
13626 of ARG.
13627 We must be careful here:
13628 The value of ARG might reference some function decls.
13629 We might currently be emitting debug info for a generic
13630 type and types are emitted before function decls, we don't
13631 know if the function decls referenced by ARG will actually be
13632 emitted after cgraph computations.
13633 So must defer the generation of the DW_AT_const_value to
13634 after cgraph is ready. */
13635 append_entry_to_tmpl_value_parm_die_table (tmpl_die, arg);
13636 }
13637
13638 return tmpl_die;
13639 }
13640
13641 /* Generate and return a DW_TAG_GNU_template_parameter_pack DIE representing.
13642 PARM_PACK must be a template parameter pack. The returned DIE
13643 will be child DIE of PARENT_DIE. */
13644
13645 static dw_die_ref
13646 template_parameter_pack_die (tree parm_pack,
13647 tree parm_pack_args,
13648 dw_die_ref parent_die)
13649 {
13650 dw_die_ref die;
13651 int j;
13652
13653 gcc_assert (parent_die && parm_pack);
13654
13655 die = new_die (DW_TAG_GNU_template_parameter_pack, parent_die, parm_pack);
13656 add_name_and_src_coords_attributes (die, parm_pack);
13657 for (j = 0; j < TREE_VEC_LENGTH (parm_pack_args); j++)
13658 generic_parameter_die (parm_pack,
13659 TREE_VEC_ELT (parm_pack_args, j),
13660 false /* Don't emit DW_AT_name */,
13661 die);
13662 return die;
13663 }
13664
13665 /* Given a pointer to an arbitrary ..._TYPE tree node, return true if it is
13666 an enumerated type. */
13667
13668 static inline int
13669 type_is_enum (const_tree type)
13670 {
13671 return TREE_CODE (type) == ENUMERAL_TYPE;
13672 }
13673
13674 /* Return the DBX register number described by a given RTL node. */
13675
13676 static unsigned int
13677 dbx_reg_number (const_rtx rtl)
13678 {
13679 unsigned regno = REGNO (rtl);
13680
13681 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
13682
13683 #ifdef LEAF_REG_REMAP
13684 if (crtl->uses_only_leaf_regs)
13685 {
13686 int leaf_reg = LEAF_REG_REMAP (regno);
13687 if (leaf_reg != -1)
13688 regno = (unsigned) leaf_reg;
13689 }
13690 #endif
13691
13692 regno = DBX_REGISTER_NUMBER (regno);
13693 gcc_assert (regno != INVALID_REGNUM);
13694 return regno;
13695 }
13696
13697 /* Optionally add a DW_OP_piece term to a location description expression.
13698 DW_OP_piece is only added if the location description expression already
13699 doesn't end with DW_OP_piece. */
13700
13701 static void
13702 add_loc_descr_op_piece (dw_loc_descr_ref *list_head, int size)
13703 {
13704 dw_loc_descr_ref loc;
13705
13706 if (*list_head != NULL)
13707 {
13708 /* Find the end of the chain. */
13709 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
13710 ;
13711
13712 if (loc->dw_loc_opc != DW_OP_piece)
13713 loc->dw_loc_next = new_loc_descr (DW_OP_piece, size, 0);
13714 }
13715 }
13716
13717 /* Return a location descriptor that designates a machine register or
13718 zero if there is none. */
13719
13720 static dw_loc_descr_ref
13721 reg_loc_descriptor (rtx rtl, enum var_init_status initialized)
13722 {
13723 rtx regs;
13724
13725 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
13726 return 0;
13727
13728 /* We only use "frame base" when we're sure we're talking about the
13729 post-prologue local stack frame. We do this by *not* running
13730 register elimination until this point, and recognizing the special
13731 argument pointer and soft frame pointer rtx's.
13732 Use DW_OP_fbreg offset DW_OP_stack_value in this case. */
13733 if ((rtl == arg_pointer_rtx || rtl == frame_pointer_rtx)
13734 && eliminate_regs (rtl, VOIDmode, NULL_RTX) != rtl)
13735 {
13736 dw_loc_descr_ref result = NULL;
13737
13738 if (dwarf_version >= 4 || !dwarf_strict)
13739 {
13740 result = mem_loc_descriptor (rtl, GET_MODE (rtl), VOIDmode,
13741 initialized);
13742 if (result)
13743 add_loc_descr (&result,
13744 new_loc_descr (DW_OP_stack_value, 0, 0));
13745 }
13746 return result;
13747 }
13748
13749 regs = targetm.dwarf_register_span (rtl);
13750
13751 if (REG_NREGS (rtl) > 1 || regs)
13752 return multiple_reg_loc_descriptor (rtl, regs, initialized);
13753 else
13754 {
13755 unsigned int dbx_regnum = dbx_reg_number (rtl);
13756 if (dbx_regnum == IGNORED_DWARF_REGNUM)
13757 return 0;
13758 return one_reg_loc_descriptor (dbx_regnum, initialized);
13759 }
13760 }
13761
13762 /* Return a location descriptor that designates a machine register for
13763 a given hard register number. */
13764
13765 static dw_loc_descr_ref
13766 one_reg_loc_descriptor (unsigned int regno, enum var_init_status initialized)
13767 {
13768 dw_loc_descr_ref reg_loc_descr;
13769
13770 if (regno <= 31)
13771 reg_loc_descr
13772 = new_loc_descr ((enum dwarf_location_atom) (DW_OP_reg0 + regno), 0, 0);
13773 else
13774 reg_loc_descr = new_loc_descr (DW_OP_regx, regno, 0);
13775
13776 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
13777 add_loc_descr (&reg_loc_descr, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13778
13779 return reg_loc_descr;
13780 }
13781
13782 /* Given an RTL of a register, return a location descriptor that
13783 designates a value that spans more than one register. */
13784
13785 static dw_loc_descr_ref
13786 multiple_reg_loc_descriptor (rtx rtl, rtx regs,
13787 enum var_init_status initialized)
13788 {
13789 int size, i;
13790 dw_loc_descr_ref loc_result = NULL;
13791
13792 /* Simple, contiguous registers. */
13793 if (regs == NULL_RTX)
13794 {
13795 unsigned reg = REGNO (rtl);
13796 int nregs;
13797
13798 #ifdef LEAF_REG_REMAP
13799 if (crtl->uses_only_leaf_regs)
13800 {
13801 int leaf_reg = LEAF_REG_REMAP (reg);
13802 if (leaf_reg != -1)
13803 reg = (unsigned) leaf_reg;
13804 }
13805 #endif
13806
13807 gcc_assert ((unsigned) DBX_REGISTER_NUMBER (reg) == dbx_reg_number (rtl));
13808 nregs = REG_NREGS (rtl);
13809
13810 /* At present we only track constant-sized pieces. */
13811 if (!GET_MODE_SIZE (GET_MODE (rtl)).is_constant (&size))
13812 return NULL;
13813 size /= nregs;
13814
13815 loc_result = NULL;
13816 while (nregs--)
13817 {
13818 dw_loc_descr_ref t;
13819
13820 t = one_reg_loc_descriptor (DBX_REGISTER_NUMBER (reg),
13821 VAR_INIT_STATUS_INITIALIZED);
13822 add_loc_descr (&loc_result, t);
13823 add_loc_descr_op_piece (&loc_result, size);
13824 ++reg;
13825 }
13826 return loc_result;
13827 }
13828
13829 /* Now onto stupid register sets in non contiguous locations. */
13830
13831 gcc_assert (GET_CODE (regs) == PARALLEL);
13832
13833 /* At present we only track constant-sized pieces. */
13834 if (!GET_MODE_SIZE (GET_MODE (XVECEXP (regs, 0, 0))).is_constant (&size))
13835 return NULL;
13836 loc_result = NULL;
13837
13838 for (i = 0; i < XVECLEN (regs, 0); ++i)
13839 {
13840 dw_loc_descr_ref t;
13841
13842 t = one_reg_loc_descriptor (dbx_reg_number (XVECEXP (regs, 0, i)),
13843 VAR_INIT_STATUS_INITIALIZED);
13844 add_loc_descr (&loc_result, t);
13845 add_loc_descr_op_piece (&loc_result, size);
13846 }
13847
13848 if (loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
13849 add_loc_descr (&loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13850 return loc_result;
13851 }
13852
13853 static unsigned long size_of_int_loc_descriptor (HOST_WIDE_INT);
13854
13855 /* Return a location descriptor that designates a constant i,
13856 as a compound operation from constant (i >> shift), constant shift
13857 and DW_OP_shl. */
13858
13859 static dw_loc_descr_ref
13860 int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
13861 {
13862 dw_loc_descr_ref ret = int_loc_descriptor (i >> shift);
13863 add_loc_descr (&ret, int_loc_descriptor (shift));
13864 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
13865 return ret;
13866 }
13867
13868 /* Return a location descriptor that designates constant POLY_I. */
13869
13870 static dw_loc_descr_ref
13871 int_loc_descriptor (poly_int64 poly_i)
13872 {
13873 enum dwarf_location_atom op;
13874
13875 HOST_WIDE_INT i;
13876 if (!poly_i.is_constant (&i))
13877 {
13878 /* Create location descriptions for the non-constant part and
13879 add any constant offset at the end. */
13880 dw_loc_descr_ref ret = NULL;
13881 HOST_WIDE_INT constant = poly_i.coeffs[0];
13882 for (unsigned int j = 1; j < NUM_POLY_INT_COEFFS; ++j)
13883 {
13884 HOST_WIDE_INT coeff = poly_i.coeffs[j];
13885 if (coeff != 0)
13886 {
13887 dw_loc_descr_ref start = ret;
13888 unsigned int factor;
13889 int bias;
13890 unsigned int regno = targetm.dwarf_poly_indeterminate_value
13891 (j, &factor, &bias);
13892
13893 /* Add COEFF * ((REGNO / FACTOR) - BIAS) to the value:
13894 add COEFF * (REGNO / FACTOR) now and subtract
13895 COEFF * BIAS from the final constant part. */
13896 constant -= coeff * bias;
13897 add_loc_descr (&ret, new_reg_loc_descr (regno, 0));
13898 if (coeff % factor == 0)
13899 coeff /= factor;
13900 else
13901 {
13902 int amount = exact_log2 (factor);
13903 gcc_assert (amount >= 0);
13904 add_loc_descr (&ret, int_loc_descriptor (amount));
13905 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
13906 }
13907 if (coeff != 1)
13908 {
13909 add_loc_descr (&ret, int_loc_descriptor (coeff));
13910 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
13911 }
13912 if (start)
13913 add_loc_descr (&ret, new_loc_descr (DW_OP_plus, 0, 0));
13914 }
13915 }
13916 loc_descr_plus_const (&ret, constant);
13917 return ret;
13918 }
13919
13920 /* Pick the smallest representation of a constant, rather than just
13921 defaulting to the LEB encoding. */
13922 if (i >= 0)
13923 {
13924 int clz = clz_hwi (i);
13925 int ctz = ctz_hwi (i);
13926 if (i <= 31)
13927 op = (enum dwarf_location_atom) (DW_OP_lit0 + i);
13928 else if (i <= 0xff)
13929 op = DW_OP_const1u;
13930 else if (i <= 0xffff)
13931 op = DW_OP_const2u;
13932 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
13933 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
13934 /* DW_OP_litX DW_OP_litY DW_OP_shl takes just 3 bytes and
13935 DW_OP_litX DW_OP_const1u Y DW_OP_shl takes just 4 bytes,
13936 while DW_OP_const4u is 5 bytes. */
13937 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 5);
13938 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13939 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
13940 /* DW_OP_const1u X DW_OP_litY DW_OP_shl takes just 4 bytes,
13941 while DW_OP_const4u is 5 bytes. */
13942 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13943
13944 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
13945 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
13946 <= 4)
13947 {
13948 /* As i >= 2**31, the double cast above will yield a negative number.
13949 Since wrapping is defined in DWARF expressions we can output big
13950 positive integers as small negative ones, regardless of the size
13951 of host wide ints.
13952
13953 Here, since the evaluator will handle 32-bit values and since i >=
13954 2**31, we know it's going to be interpreted as a negative literal:
13955 store it this way if we can do better than 5 bytes this way. */
13956 return int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
13957 }
13958 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
13959 op = DW_OP_const4u;
13960
13961 /* Past this point, i >= 0x100000000 and thus DW_OP_constu will take at
13962 least 6 bytes: see if we can do better before falling back to it. */
13963 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13964 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
13965 /* DW_OP_const1u X DW_OP_const1u Y DW_OP_shl takes just 5 bytes. */
13966 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13967 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
13968 && clz + 16 + (size_of_uleb128 (i) > 5 ? 255 : 31)
13969 >= HOST_BITS_PER_WIDE_INT)
13970 /* DW_OP_const2u X DW_OP_litY DW_OP_shl takes just 5 bytes,
13971 DW_OP_const2u X DW_OP_const1u Y DW_OP_shl takes 6 bytes. */
13972 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 16);
13973 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
13974 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
13975 && size_of_uleb128 (i) > 6)
13976 /* DW_OP_const4u X DW_OP_litY DW_OP_shl takes just 7 bytes. */
13977 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 32);
13978 else
13979 op = DW_OP_constu;
13980 }
13981 else
13982 {
13983 if (i >= -0x80)
13984 op = DW_OP_const1s;
13985 else if (i >= -0x8000)
13986 op = DW_OP_const2s;
13987 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
13988 {
13989 if (size_of_int_loc_descriptor (i) < 5)
13990 {
13991 dw_loc_descr_ref ret = int_loc_descriptor (-i);
13992 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
13993 return ret;
13994 }
13995 op = DW_OP_const4s;
13996 }
13997 else
13998 {
13999 if (size_of_int_loc_descriptor (i)
14000 < (unsigned long) 1 + size_of_sleb128 (i))
14001 {
14002 dw_loc_descr_ref ret = int_loc_descriptor (-i);
14003 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
14004 return ret;
14005 }
14006 op = DW_OP_consts;
14007 }
14008 }
14009
14010 return new_loc_descr (op, i, 0);
14011 }
14012
14013 /* Likewise, for unsigned constants. */
14014
14015 static dw_loc_descr_ref
14016 uint_loc_descriptor (unsigned HOST_WIDE_INT i)
14017 {
14018 const unsigned HOST_WIDE_INT max_int = INTTYPE_MAXIMUM (HOST_WIDE_INT);
14019 const unsigned HOST_WIDE_INT max_uint
14020 = INTTYPE_MAXIMUM (unsigned HOST_WIDE_INT);
14021
14022 /* If possible, use the clever signed constants handling. */
14023 if (i <= max_int)
14024 return int_loc_descriptor ((HOST_WIDE_INT) i);
14025
14026 /* Here, we are left with positive numbers that cannot be represented as
14027 HOST_WIDE_INT, i.e.:
14028 max (HOST_WIDE_INT) < i <= max (unsigned HOST_WIDE_INT)
14029
14030 Using DW_OP_const4/8/./u operation to encode them consumes a lot of bytes
14031 whereas may be better to output a negative integer: thanks to integer
14032 wrapping, we know that:
14033 x = x - 2 ** DWARF2_ADDR_SIZE
14034 = x - 2 * (max (HOST_WIDE_INT) + 1)
14035 So numbers close to max (unsigned HOST_WIDE_INT) could be represented as
14036 small negative integers. Let's try that in cases it will clearly improve
14037 the encoding: there is no gain turning DW_OP_const4u into
14038 DW_OP_const4s. */
14039 if (DWARF2_ADDR_SIZE * 8 == HOST_BITS_PER_WIDE_INT
14040 && ((DWARF2_ADDR_SIZE == 4 && i > max_uint - 0x8000)
14041 || (DWARF2_ADDR_SIZE == 8 && i > max_uint - 0x80000000)))
14042 {
14043 const unsigned HOST_WIDE_INT first_shift = i - max_int - 1;
14044
14045 /* Now, -1 < first_shift <= max (HOST_WIDE_INT)
14046 i.e. 0 <= first_shift <= max (HOST_WIDE_INT). */
14047 const HOST_WIDE_INT second_shift
14048 = (HOST_WIDE_INT) first_shift - (HOST_WIDE_INT) max_int - 1;
14049
14050 /* So we finally have:
14051 -max (HOST_WIDE_INT) - 1 <= second_shift <= -1.
14052 i.e. min (HOST_WIDE_INT) <= second_shift < 0. */
14053 return int_loc_descriptor (second_shift);
14054 }
14055
14056 /* Last chance: fallback to a simple constant operation. */
14057 return new_loc_descr
14058 ((HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14059 ? DW_OP_const4u
14060 : DW_OP_const8u,
14061 i, 0);
14062 }
14063
14064 /* Generate and return a location description that computes the unsigned
14065 comparison of the two stack top entries (a OP b where b is the top-most
14066 entry and a is the second one). The KIND of comparison can be LT_EXPR,
14067 LE_EXPR, GT_EXPR or GE_EXPR. */
14068
14069 static dw_loc_descr_ref
14070 uint_comparison_loc_list (enum tree_code kind)
14071 {
14072 enum dwarf_location_atom op, flip_op;
14073 dw_loc_descr_ref ret, bra_node, jmp_node, tmp;
14074
14075 switch (kind)
14076 {
14077 case LT_EXPR:
14078 op = DW_OP_lt;
14079 break;
14080 case LE_EXPR:
14081 op = DW_OP_le;
14082 break;
14083 case GT_EXPR:
14084 op = DW_OP_gt;
14085 break;
14086 case GE_EXPR:
14087 op = DW_OP_ge;
14088 break;
14089 default:
14090 gcc_unreachable ();
14091 }
14092
14093 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14094 jmp_node = new_loc_descr (DW_OP_skip, 0, 0);
14095
14096 /* Until DWARFv4, operations all work on signed integers. It is nevertheless
14097 possible to perform unsigned comparisons: we just have to distinguish
14098 three cases:
14099
14100 1. when a and b have the same sign (as signed integers); then we should
14101 return: a OP(signed) b;
14102
14103 2. when a is a negative signed integer while b is a positive one, then a
14104 is a greater unsigned integer than b; likewise when a and b's roles
14105 are flipped.
14106
14107 So first, compare the sign of the two operands. */
14108 ret = new_loc_descr (DW_OP_over, 0, 0);
14109 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
14110 add_loc_descr (&ret, new_loc_descr (DW_OP_xor, 0, 0));
14111 /* If they have different signs (i.e. they have different sign bits), then
14112 the stack top value has now the sign bit set and thus it's smaller than
14113 zero. */
14114 add_loc_descr (&ret, new_loc_descr (DW_OP_lit0, 0, 0));
14115 add_loc_descr (&ret, new_loc_descr (DW_OP_lt, 0, 0));
14116 add_loc_descr (&ret, bra_node);
14117
14118 /* We are in case 1. At this point, we know both operands have the same
14119 sign, to it's safe to use the built-in signed comparison. */
14120 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14121 add_loc_descr (&ret, jmp_node);
14122
14123 /* We are in case 2. Here, we know both operands do not have the same sign,
14124 so we have to flip the signed comparison. */
14125 flip_op = (kind == LT_EXPR || kind == LE_EXPR) ? DW_OP_gt : DW_OP_lt;
14126 tmp = new_loc_descr (flip_op, 0, 0);
14127 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14128 bra_node->dw_loc_oprnd1.v.val_loc = tmp;
14129 add_loc_descr (&ret, tmp);
14130
14131 /* This dummy operation is necessary to make the two branches join. */
14132 tmp = new_loc_descr (DW_OP_nop, 0, 0);
14133 jmp_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14134 jmp_node->dw_loc_oprnd1.v.val_loc = tmp;
14135 add_loc_descr (&ret, tmp);
14136
14137 return ret;
14138 }
14139
14140 /* Likewise, but takes the location description lists (might be destructive on
14141 them). Return NULL if either is NULL or if concatenation fails. */
14142
14143 static dw_loc_list_ref
14144 loc_list_from_uint_comparison (dw_loc_list_ref left, dw_loc_list_ref right,
14145 enum tree_code kind)
14146 {
14147 if (left == NULL || right == NULL)
14148 return NULL;
14149
14150 add_loc_list (&left, right);
14151 if (left == NULL)
14152 return NULL;
14153
14154 add_loc_descr_to_each (left, uint_comparison_loc_list (kind));
14155 return left;
14156 }
14157
14158 /* Return size_of_locs (int_shift_loc_descriptor (i, shift))
14159 without actually allocating it. */
14160
14161 static unsigned long
14162 size_of_int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
14163 {
14164 return size_of_int_loc_descriptor (i >> shift)
14165 + size_of_int_loc_descriptor (shift)
14166 + 1;
14167 }
14168
14169 /* Return size_of_locs (int_loc_descriptor (i)) without
14170 actually allocating it. */
14171
14172 static unsigned long
14173 size_of_int_loc_descriptor (HOST_WIDE_INT i)
14174 {
14175 unsigned long s;
14176
14177 if (i >= 0)
14178 {
14179 int clz, ctz;
14180 if (i <= 31)
14181 return 1;
14182 else if (i <= 0xff)
14183 return 2;
14184 else if (i <= 0xffff)
14185 return 3;
14186 clz = clz_hwi (i);
14187 ctz = ctz_hwi (i);
14188 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
14189 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
14190 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14191 - clz - 5);
14192 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14193 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
14194 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14195 - clz - 8);
14196 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
14197 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
14198 <= 4)
14199 return size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
14200 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14201 return 5;
14202 s = size_of_uleb128 ((unsigned HOST_WIDE_INT) i);
14203 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14204 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
14205 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14206 - clz - 8);
14207 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
14208 && clz + 16 + (s > 5 ? 255 : 31) >= HOST_BITS_PER_WIDE_INT)
14209 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14210 - clz - 16);
14211 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
14212 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
14213 && s > 6)
14214 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14215 - clz - 32);
14216 else
14217 return 1 + s;
14218 }
14219 else
14220 {
14221 if (i >= -0x80)
14222 return 2;
14223 else if (i >= -0x8000)
14224 return 3;
14225 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
14226 {
14227 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14228 {
14229 s = size_of_int_loc_descriptor (-i) + 1;
14230 if (s < 5)
14231 return s;
14232 }
14233 return 5;
14234 }
14235 else
14236 {
14237 unsigned long r = 1 + size_of_sleb128 (i);
14238 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14239 {
14240 s = size_of_int_loc_descriptor (-i) + 1;
14241 if (s < r)
14242 return s;
14243 }
14244 return r;
14245 }
14246 }
14247 }
14248
14249 /* Return loc description representing "address" of integer value.
14250 This can appear only as toplevel expression. */
14251
14252 static dw_loc_descr_ref
14253 address_of_int_loc_descriptor (int size, HOST_WIDE_INT i)
14254 {
14255 int litsize;
14256 dw_loc_descr_ref loc_result = NULL;
14257
14258 if (!(dwarf_version >= 4 || !dwarf_strict))
14259 return NULL;
14260
14261 litsize = size_of_int_loc_descriptor (i);
14262 /* Determine if DW_OP_stack_value or DW_OP_implicit_value
14263 is more compact. For DW_OP_stack_value we need:
14264 litsize + 1 (DW_OP_stack_value)
14265 and for DW_OP_implicit_value:
14266 1 (DW_OP_implicit_value) + 1 (length) + size. */
14267 if ((int) DWARF2_ADDR_SIZE >= size && litsize + 1 <= 1 + 1 + size)
14268 {
14269 loc_result = int_loc_descriptor (i);
14270 add_loc_descr (&loc_result,
14271 new_loc_descr (DW_OP_stack_value, 0, 0));
14272 return loc_result;
14273 }
14274
14275 loc_result = new_loc_descr (DW_OP_implicit_value,
14276 size, 0);
14277 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
14278 loc_result->dw_loc_oprnd2.v.val_int = i;
14279 return loc_result;
14280 }
14281
14282 /* Return a location descriptor that designates a base+offset location. */
14283
14284 static dw_loc_descr_ref
14285 based_loc_descr (rtx reg, poly_int64 offset,
14286 enum var_init_status initialized)
14287 {
14288 unsigned int regno;
14289 dw_loc_descr_ref result;
14290 dw_fde_ref fde = cfun->fde;
14291
14292 /* We only use "frame base" when we're sure we're talking about the
14293 post-prologue local stack frame. We do this by *not* running
14294 register elimination until this point, and recognizing the special
14295 argument pointer and soft frame pointer rtx's. */
14296 if (reg == arg_pointer_rtx || reg == frame_pointer_rtx)
14297 {
14298 rtx elim = (ira_use_lra_p
14299 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
14300 : eliminate_regs (reg, VOIDmode, NULL_RTX));
14301
14302 if (elim != reg)
14303 {
14304 elim = strip_offset_and_add (elim, &offset);
14305 gcc_assert ((SUPPORTS_STACK_ALIGNMENT
14306 && (elim == hard_frame_pointer_rtx
14307 || elim == stack_pointer_rtx))
14308 || elim == (frame_pointer_needed
14309 ? hard_frame_pointer_rtx
14310 : stack_pointer_rtx));
14311
14312 /* If drap register is used to align stack, use frame
14313 pointer + offset to access stack variables. If stack
14314 is aligned without drap, use stack pointer + offset to
14315 access stack variables. */
14316 if (crtl->stack_realign_tried
14317 && reg == frame_pointer_rtx)
14318 {
14319 int base_reg
14320 = DWARF_FRAME_REGNUM ((fde && fde->drap_reg != INVALID_REGNUM)
14321 ? HARD_FRAME_POINTER_REGNUM
14322 : REGNO (elim));
14323 return new_reg_loc_descr (base_reg, offset);
14324 }
14325
14326 gcc_assert (frame_pointer_fb_offset_valid);
14327 offset += frame_pointer_fb_offset;
14328 HOST_WIDE_INT const_offset;
14329 if (offset.is_constant (&const_offset))
14330 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14331 else
14332 {
14333 dw_loc_descr_ref ret = new_loc_descr (DW_OP_fbreg, 0, 0);
14334 loc_descr_plus_const (&ret, offset);
14335 return ret;
14336 }
14337 }
14338 }
14339
14340 regno = REGNO (reg);
14341 #ifdef LEAF_REG_REMAP
14342 if (crtl->uses_only_leaf_regs)
14343 {
14344 int leaf_reg = LEAF_REG_REMAP (regno);
14345 if (leaf_reg != -1)
14346 regno = (unsigned) leaf_reg;
14347 }
14348 #endif
14349 regno = DWARF_FRAME_REGNUM (regno);
14350
14351 HOST_WIDE_INT const_offset;
14352 if (!optimize && fde
14353 && (fde->drap_reg == regno || fde->vdrap_reg == regno)
14354 && offset.is_constant (&const_offset))
14355 {
14356 /* Use cfa+offset to represent the location of arguments passed
14357 on the stack when drap is used to align stack.
14358 Only do this when not optimizing, for optimized code var-tracking
14359 is supposed to track where the arguments live and the register
14360 used as vdrap or drap in some spot might be used for something
14361 else in other part of the routine. */
14362 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14363 }
14364
14365 result = new_reg_loc_descr (regno, offset);
14366
14367 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
14368 add_loc_descr (&result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
14369
14370 return result;
14371 }
14372
14373 /* Return true if this RTL expression describes a base+offset calculation. */
14374
14375 static inline int
14376 is_based_loc (const_rtx rtl)
14377 {
14378 return (GET_CODE (rtl) == PLUS
14379 && ((REG_P (XEXP (rtl, 0))
14380 && REGNO (XEXP (rtl, 0)) < FIRST_PSEUDO_REGISTER
14381 && CONST_INT_P (XEXP (rtl, 1)))));
14382 }
14383
14384 /* Try to handle TLS MEMs, for which mem_loc_descriptor on XEXP (mem, 0)
14385 failed. */
14386
14387 static dw_loc_descr_ref
14388 tls_mem_loc_descriptor (rtx mem)
14389 {
14390 tree base;
14391 dw_loc_descr_ref loc_result;
14392
14393 if (MEM_EXPR (mem) == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
14394 return NULL;
14395
14396 base = get_base_address (MEM_EXPR (mem));
14397 if (base == NULL
14398 || !VAR_P (base)
14399 || !DECL_THREAD_LOCAL_P (base))
14400 return NULL;
14401
14402 loc_result = loc_descriptor_from_tree (MEM_EXPR (mem), 1, NULL);
14403 if (loc_result == NULL)
14404 return NULL;
14405
14406 if (maybe_ne (MEM_OFFSET (mem), 0))
14407 loc_descr_plus_const (&loc_result, MEM_OFFSET (mem));
14408
14409 return loc_result;
14410 }
14411
14412 /* Output debug info about reason why we failed to expand expression as dwarf
14413 expression. */
14414
14415 static void
14416 expansion_failed (tree expr, rtx rtl, char const *reason)
14417 {
14418 if (dump_file && (dump_flags & TDF_DETAILS))
14419 {
14420 fprintf (dump_file, "Failed to expand as dwarf: ");
14421 if (expr)
14422 print_generic_expr (dump_file, expr, dump_flags);
14423 if (rtl)
14424 {
14425 fprintf (dump_file, "\n");
14426 print_rtl (dump_file, rtl);
14427 }
14428 fprintf (dump_file, "\nReason: %s\n", reason);
14429 }
14430 }
14431
14432 /* Helper function for const_ok_for_output. */
14433
14434 static bool
14435 const_ok_for_output_1 (rtx rtl)
14436 {
14437 if (targetm.const_not_ok_for_debug_p (rtl))
14438 {
14439 if (GET_CODE (rtl) != UNSPEC)
14440 {
14441 expansion_failed (NULL_TREE, rtl,
14442 "Expression rejected for debug by the backend.\n");
14443 return false;
14444 }
14445
14446 /* If delegitimize_address couldn't do anything with the UNSPEC, and
14447 the target hook doesn't explicitly allow it in debug info, assume
14448 we can't express it in the debug info. */
14449 /* Don't complain about TLS UNSPECs, those are just too hard to
14450 delegitimize. Note this could be a non-decl SYMBOL_REF such as
14451 one in a constant pool entry, so testing SYMBOL_REF_TLS_MODEL
14452 rather than DECL_THREAD_LOCAL_P is not just an optimization. */
14453 if (flag_checking
14454 && (XVECLEN (rtl, 0) == 0
14455 || GET_CODE (XVECEXP (rtl, 0, 0)) != SYMBOL_REF
14456 || SYMBOL_REF_TLS_MODEL (XVECEXP (rtl, 0, 0)) == TLS_MODEL_NONE))
14457 inform (current_function_decl
14458 ? DECL_SOURCE_LOCATION (current_function_decl)
14459 : UNKNOWN_LOCATION,
14460 #if NUM_UNSPEC_VALUES > 0
14461 "non-delegitimized UNSPEC %s (%d) found in variable location",
14462 ((XINT (rtl, 1) >= 0 && XINT (rtl, 1) < NUM_UNSPEC_VALUES)
14463 ? unspec_strings[XINT (rtl, 1)] : "unknown"),
14464 XINT (rtl, 1));
14465 #else
14466 "non-delegitimized UNSPEC %d found in variable location",
14467 XINT (rtl, 1));
14468 #endif
14469 expansion_failed (NULL_TREE, rtl,
14470 "UNSPEC hasn't been delegitimized.\n");
14471 return false;
14472 }
14473
14474 if (CONST_POLY_INT_P (rtl))
14475 return false;
14476
14477 if (targetm.const_not_ok_for_debug_p (rtl))
14478 {
14479 expansion_failed (NULL_TREE, rtl,
14480 "Expression rejected for debug by the backend.\n");
14481 return false;
14482 }
14483
14484 /* FIXME: Refer to PR60655. It is possible for simplification
14485 of rtl expressions in var tracking to produce such expressions.
14486 We should really identify / validate expressions
14487 enclosed in CONST that can be handled by assemblers on various
14488 targets and only handle legitimate cases here. */
14489 switch (GET_CODE (rtl))
14490 {
14491 case SYMBOL_REF:
14492 break;
14493 case NOT:
14494 case NEG:
14495 return false;
14496 default:
14497 return true;
14498 }
14499
14500 if (CONSTANT_POOL_ADDRESS_P (rtl))
14501 {
14502 bool marked;
14503 get_pool_constant_mark (rtl, &marked);
14504 /* If all references to this pool constant were optimized away,
14505 it was not output and thus we can't represent it. */
14506 if (!marked)
14507 {
14508 expansion_failed (NULL_TREE, rtl,
14509 "Constant was removed from constant pool.\n");
14510 return false;
14511 }
14512 }
14513
14514 if (SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
14515 return false;
14516
14517 /* Avoid references to external symbols in debug info, on several targets
14518 the linker might even refuse to link when linking a shared library,
14519 and in many other cases the relocations for .debug_info/.debug_loc are
14520 dropped, so the address becomes zero anyway. Hidden symbols, guaranteed
14521 to be defined within the same shared library or executable are fine. */
14522 if (SYMBOL_REF_EXTERNAL_P (rtl))
14523 {
14524 tree decl = SYMBOL_REF_DECL (rtl);
14525
14526 if (decl == NULL || !targetm.binds_local_p (decl))
14527 {
14528 expansion_failed (NULL_TREE, rtl,
14529 "Symbol not defined in current TU.\n");
14530 return false;
14531 }
14532 }
14533
14534 return true;
14535 }
14536
14537 /* Return true if constant RTL can be emitted in DW_OP_addr or
14538 DW_AT_const_value. TLS SYMBOL_REFs, external SYMBOL_REFs or
14539 non-marked constant pool SYMBOL_REFs can't be referenced in it. */
14540
14541 static bool
14542 const_ok_for_output (rtx rtl)
14543 {
14544 if (GET_CODE (rtl) == SYMBOL_REF)
14545 return const_ok_for_output_1 (rtl);
14546
14547 if (GET_CODE (rtl) == CONST)
14548 {
14549 subrtx_var_iterator::array_type array;
14550 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL)
14551 if (!const_ok_for_output_1 (*iter))
14552 return false;
14553 return true;
14554 }
14555
14556 return true;
14557 }
14558
14559 /* Return a reference to DW_TAG_base_type corresponding to MODE and UNSIGNEDP
14560 if possible, NULL otherwise. */
14561
14562 static dw_die_ref
14563 base_type_for_mode (machine_mode mode, bool unsignedp)
14564 {
14565 dw_die_ref type_die;
14566 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
14567
14568 if (type == NULL)
14569 return NULL;
14570 switch (TREE_CODE (type))
14571 {
14572 case INTEGER_TYPE:
14573 case REAL_TYPE:
14574 break;
14575 default:
14576 return NULL;
14577 }
14578 type_die = lookup_type_die (type);
14579 if (!type_die)
14580 type_die = modified_type_die (type, TYPE_UNQUALIFIED, false,
14581 comp_unit_die ());
14582 if (type_die == NULL || type_die->die_tag != DW_TAG_base_type)
14583 return NULL;
14584 return type_die;
14585 }
14586
14587 /* For OP descriptor assumed to be in unsigned MODE, convert it to a unsigned
14588 type matching MODE, or, if MODE is narrower than or as wide as
14589 DWARF2_ADDR_SIZE, untyped. Return NULL if the conversion is not
14590 possible. */
14591
14592 static dw_loc_descr_ref
14593 convert_descriptor_to_mode (scalar_int_mode mode, dw_loc_descr_ref op)
14594 {
14595 machine_mode outer_mode = mode;
14596 dw_die_ref type_die;
14597 dw_loc_descr_ref cvt;
14598
14599 if (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE)
14600 {
14601 add_loc_descr (&op, new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0));
14602 return op;
14603 }
14604 type_die = base_type_for_mode (outer_mode, 1);
14605 if (type_die == NULL)
14606 return NULL;
14607 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14608 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14609 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14610 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14611 add_loc_descr (&op, cvt);
14612 return op;
14613 }
14614
14615 /* Return location descriptor for comparison OP with operands OP0 and OP1. */
14616
14617 static dw_loc_descr_ref
14618 compare_loc_descriptor (enum dwarf_location_atom op, dw_loc_descr_ref op0,
14619 dw_loc_descr_ref op1)
14620 {
14621 dw_loc_descr_ref ret = op0;
14622 add_loc_descr (&ret, op1);
14623 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14624 if (STORE_FLAG_VALUE != 1)
14625 {
14626 add_loc_descr (&ret, int_loc_descriptor (STORE_FLAG_VALUE));
14627 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
14628 }
14629 return ret;
14630 }
14631
14632 /* Subroutine of scompare_loc_descriptor for the case in which we're
14633 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14634 and in which OP_MODE is bigger than DWARF2_ADDR_SIZE. */
14635
14636 static dw_loc_descr_ref
14637 scompare_loc_descriptor_wide (enum dwarf_location_atom op,
14638 scalar_int_mode op_mode,
14639 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14640 {
14641 dw_die_ref type_die = base_type_for_mode (op_mode, 0);
14642 dw_loc_descr_ref cvt;
14643
14644 if (type_die == NULL)
14645 return NULL;
14646 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14647 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14648 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14649 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14650 add_loc_descr (&op0, cvt);
14651 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14652 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14653 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14654 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14655 add_loc_descr (&op1, cvt);
14656 return compare_loc_descriptor (op, op0, op1);
14657 }
14658
14659 /* Subroutine of scompare_loc_descriptor for the case in which we're
14660 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14661 and in which OP_MODE is smaller than DWARF2_ADDR_SIZE. */
14662
14663 static dw_loc_descr_ref
14664 scompare_loc_descriptor_narrow (enum dwarf_location_atom op, rtx rtl,
14665 scalar_int_mode op_mode,
14666 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14667 {
14668 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (op_mode)) * BITS_PER_UNIT;
14669 /* For eq/ne, if the operands are known to be zero-extended,
14670 there is no need to do the fancy shifting up. */
14671 if (op == DW_OP_eq || op == DW_OP_ne)
14672 {
14673 dw_loc_descr_ref last0, last1;
14674 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14675 ;
14676 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14677 ;
14678 /* deref_size zero extends, and for constants we can check
14679 whether they are zero extended or not. */
14680 if (((last0->dw_loc_opc == DW_OP_deref_size
14681 && last0->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14682 || (CONST_INT_P (XEXP (rtl, 0))
14683 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 0))
14684 == (INTVAL (XEXP (rtl, 0)) & GET_MODE_MASK (op_mode))))
14685 && ((last1->dw_loc_opc == DW_OP_deref_size
14686 && last1->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14687 || (CONST_INT_P (XEXP (rtl, 1))
14688 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 1))
14689 == (INTVAL (XEXP (rtl, 1)) & GET_MODE_MASK (op_mode)))))
14690 return compare_loc_descriptor (op, op0, op1);
14691
14692 /* EQ/NE comparison against constant in narrower type than
14693 DWARF2_ADDR_SIZE can be performed either as
14694 DW_OP_const1u <shift> DW_OP_shl DW_OP_const* <cst << shift>
14695 DW_OP_{eq,ne}
14696 or
14697 DW_OP_const*u <mode_mask> DW_OP_and DW_OP_const* <cst & mode_mask>
14698 DW_OP_{eq,ne}. Pick whatever is shorter. */
14699 if (CONST_INT_P (XEXP (rtl, 1))
14700 && GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
14701 && (size_of_int_loc_descriptor (shift) + 1
14702 + size_of_int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift)
14703 >= size_of_int_loc_descriptor (GET_MODE_MASK (op_mode)) + 1
14704 + size_of_int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14705 & GET_MODE_MASK (op_mode))))
14706 {
14707 add_loc_descr (&op0, int_loc_descriptor (GET_MODE_MASK (op_mode)));
14708 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14709 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14710 & GET_MODE_MASK (op_mode));
14711 return compare_loc_descriptor (op, op0, op1);
14712 }
14713 }
14714 add_loc_descr (&op0, int_loc_descriptor (shift));
14715 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14716 if (CONST_INT_P (XEXP (rtl, 1)))
14717 op1 = int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift);
14718 else
14719 {
14720 add_loc_descr (&op1, int_loc_descriptor (shift));
14721 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14722 }
14723 return compare_loc_descriptor (op, op0, op1);
14724 }
14725
14726 /* Return location descriptor for unsigned comparison OP RTL. */
14727
14728 static dw_loc_descr_ref
14729 scompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14730 machine_mode mem_mode)
14731 {
14732 machine_mode op_mode = GET_MODE (XEXP (rtl, 0));
14733 dw_loc_descr_ref op0, op1;
14734
14735 if (op_mode == VOIDmode)
14736 op_mode = GET_MODE (XEXP (rtl, 1));
14737 if (op_mode == VOIDmode)
14738 return NULL;
14739
14740 scalar_int_mode int_op_mode;
14741 if (dwarf_strict
14742 && dwarf_version < 5
14743 && (!is_a <scalar_int_mode> (op_mode, &int_op_mode)
14744 || GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE))
14745 return NULL;
14746
14747 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14748 VAR_INIT_STATUS_INITIALIZED);
14749 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14750 VAR_INIT_STATUS_INITIALIZED);
14751
14752 if (op0 == NULL || op1 == NULL)
14753 return NULL;
14754
14755 if (is_a <scalar_int_mode> (op_mode, &int_op_mode))
14756 {
14757 if (GET_MODE_SIZE (int_op_mode) < DWARF2_ADDR_SIZE)
14758 return scompare_loc_descriptor_narrow (op, rtl, int_op_mode, op0, op1);
14759
14760 if (GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE)
14761 return scompare_loc_descriptor_wide (op, int_op_mode, op0, op1);
14762 }
14763 return compare_loc_descriptor (op, op0, op1);
14764 }
14765
14766 /* Return location descriptor for unsigned comparison OP RTL. */
14767
14768 static dw_loc_descr_ref
14769 ucompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14770 machine_mode mem_mode)
14771 {
14772 dw_loc_descr_ref op0, op1;
14773
14774 machine_mode test_op_mode = GET_MODE (XEXP (rtl, 0));
14775 if (test_op_mode == VOIDmode)
14776 test_op_mode = GET_MODE (XEXP (rtl, 1));
14777
14778 scalar_int_mode op_mode;
14779 if (!is_a <scalar_int_mode> (test_op_mode, &op_mode))
14780 return NULL;
14781
14782 if (dwarf_strict
14783 && dwarf_version < 5
14784 && GET_MODE_SIZE (op_mode) > DWARF2_ADDR_SIZE)
14785 return NULL;
14786
14787 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14788 VAR_INIT_STATUS_INITIALIZED);
14789 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14790 VAR_INIT_STATUS_INITIALIZED);
14791
14792 if (op0 == NULL || op1 == NULL)
14793 return NULL;
14794
14795 if (GET_MODE_SIZE (op_mode) < DWARF2_ADDR_SIZE)
14796 {
14797 HOST_WIDE_INT mask = GET_MODE_MASK (op_mode);
14798 dw_loc_descr_ref last0, last1;
14799 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14800 ;
14801 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14802 ;
14803 if (CONST_INT_P (XEXP (rtl, 0)))
14804 op0 = int_loc_descriptor (INTVAL (XEXP (rtl, 0)) & mask);
14805 /* deref_size zero extends, so no need to mask it again. */
14806 else if (last0->dw_loc_opc != DW_OP_deref_size
14807 || last0->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14808 {
14809 add_loc_descr (&op0, int_loc_descriptor (mask));
14810 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14811 }
14812 if (CONST_INT_P (XEXP (rtl, 1)))
14813 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) & mask);
14814 /* deref_size zero extends, so no need to mask it again. */
14815 else if (last1->dw_loc_opc != DW_OP_deref_size
14816 || last1->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14817 {
14818 add_loc_descr (&op1, int_loc_descriptor (mask));
14819 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14820 }
14821 }
14822 else if (GET_MODE_SIZE (op_mode) == DWARF2_ADDR_SIZE)
14823 {
14824 HOST_WIDE_INT bias = 1;
14825 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14826 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14827 if (CONST_INT_P (XEXP (rtl, 1)))
14828 op1 = int_loc_descriptor ((unsigned HOST_WIDE_INT) bias
14829 + INTVAL (XEXP (rtl, 1)));
14830 else
14831 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst,
14832 bias, 0));
14833 }
14834 return compare_loc_descriptor (op, op0, op1);
14835 }
14836
14837 /* Return location descriptor for {U,S}{MIN,MAX}. */
14838
14839 static dw_loc_descr_ref
14840 minmax_loc_descriptor (rtx rtl, machine_mode mode,
14841 machine_mode mem_mode)
14842 {
14843 enum dwarf_location_atom op;
14844 dw_loc_descr_ref op0, op1, ret;
14845 dw_loc_descr_ref bra_node, drop_node;
14846
14847 scalar_int_mode int_mode;
14848 if (dwarf_strict
14849 && dwarf_version < 5
14850 && (!is_a <scalar_int_mode> (mode, &int_mode)
14851 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE))
14852 return NULL;
14853
14854 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14855 VAR_INIT_STATUS_INITIALIZED);
14856 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14857 VAR_INIT_STATUS_INITIALIZED);
14858
14859 if (op0 == NULL || op1 == NULL)
14860 return NULL;
14861
14862 add_loc_descr (&op0, new_loc_descr (DW_OP_dup, 0, 0));
14863 add_loc_descr (&op1, new_loc_descr (DW_OP_swap, 0, 0));
14864 add_loc_descr (&op1, new_loc_descr (DW_OP_over, 0, 0));
14865 if (GET_CODE (rtl) == UMIN || GET_CODE (rtl) == UMAX)
14866 {
14867 /* Checked by the caller. */
14868 int_mode = as_a <scalar_int_mode> (mode);
14869 if (GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14870 {
14871 HOST_WIDE_INT mask = GET_MODE_MASK (int_mode);
14872 add_loc_descr (&op0, int_loc_descriptor (mask));
14873 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14874 add_loc_descr (&op1, int_loc_descriptor (mask));
14875 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14876 }
14877 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
14878 {
14879 HOST_WIDE_INT bias = 1;
14880 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14881 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14882 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14883 }
14884 }
14885 else if (is_a <scalar_int_mode> (mode, &int_mode)
14886 && GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14887 {
14888 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (int_mode)) * BITS_PER_UNIT;
14889 add_loc_descr (&op0, int_loc_descriptor (shift));
14890 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14891 add_loc_descr (&op1, int_loc_descriptor (shift));
14892 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14893 }
14894 else if (is_a <scalar_int_mode> (mode, &int_mode)
14895 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14896 {
14897 dw_die_ref type_die = base_type_for_mode (int_mode, 0);
14898 dw_loc_descr_ref cvt;
14899 if (type_die == NULL)
14900 return NULL;
14901 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14902 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14903 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14904 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14905 add_loc_descr (&op0, cvt);
14906 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14907 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14908 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14909 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14910 add_loc_descr (&op1, cvt);
14911 }
14912
14913 if (GET_CODE (rtl) == SMIN || GET_CODE (rtl) == UMIN)
14914 op = DW_OP_lt;
14915 else
14916 op = DW_OP_gt;
14917 ret = op0;
14918 add_loc_descr (&ret, op1);
14919 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14920 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14921 add_loc_descr (&ret, bra_node);
14922 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
14923 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
14924 add_loc_descr (&ret, drop_node);
14925 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14926 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
14927 if ((GET_CODE (rtl) == SMIN || GET_CODE (rtl) == SMAX)
14928 && is_a <scalar_int_mode> (mode, &int_mode)
14929 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14930 ret = convert_descriptor_to_mode (int_mode, ret);
14931 return ret;
14932 }
14933
14934 /* Helper function for mem_loc_descriptor. Perform OP binary op,
14935 but after converting arguments to type_die, afterwards
14936 convert back to unsigned. */
14937
14938 static dw_loc_descr_ref
14939 typed_binop (enum dwarf_location_atom op, rtx rtl, dw_die_ref type_die,
14940 scalar_int_mode mode, machine_mode mem_mode)
14941 {
14942 dw_loc_descr_ref cvt, op0, op1;
14943
14944 if (type_die == NULL)
14945 return NULL;
14946 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14947 VAR_INIT_STATUS_INITIALIZED);
14948 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14949 VAR_INIT_STATUS_INITIALIZED);
14950 if (op0 == NULL || op1 == NULL)
14951 return NULL;
14952 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14953 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14954 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14955 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14956 add_loc_descr (&op0, cvt);
14957 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14958 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14959 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14960 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14961 add_loc_descr (&op1, cvt);
14962 add_loc_descr (&op0, op1);
14963 add_loc_descr (&op0, new_loc_descr (op, 0, 0));
14964 return convert_descriptor_to_mode (mode, op0);
14965 }
14966
14967 /* CLZ (where constV is CLZ_DEFINED_VALUE_AT_ZERO computed value,
14968 const0 is DW_OP_lit0 or corresponding typed constant,
14969 const1 is DW_OP_lit1 or corresponding typed constant
14970 and constMSB is constant with just the MSB bit set
14971 for the mode):
14972 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
14973 L1: const0 DW_OP_swap
14974 L2: DW_OP_dup constMSB DW_OP_and DW_OP_bra <L3> const1 DW_OP_shl
14975 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14976 L3: DW_OP_drop
14977 L4: DW_OP_nop
14978
14979 CTZ is similar:
14980 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
14981 L1: const0 DW_OP_swap
14982 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
14983 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14984 L3: DW_OP_drop
14985 L4: DW_OP_nop
14986
14987 FFS is similar:
14988 DW_OP_dup DW_OP_bra <L1> DW_OP_drop const0 DW_OP_skip <L4>
14989 L1: const1 DW_OP_swap
14990 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
14991 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
14992 L3: DW_OP_drop
14993 L4: DW_OP_nop */
14994
14995 static dw_loc_descr_ref
14996 clz_loc_descriptor (rtx rtl, scalar_int_mode mode,
14997 machine_mode mem_mode)
14998 {
14999 dw_loc_descr_ref op0, ret, tmp;
15000 HOST_WIDE_INT valv;
15001 dw_loc_descr_ref l1jump, l1label;
15002 dw_loc_descr_ref l2jump, l2label;
15003 dw_loc_descr_ref l3jump, l3label;
15004 dw_loc_descr_ref l4jump, l4label;
15005 rtx msb;
15006
15007 if (GET_MODE (XEXP (rtl, 0)) != mode)
15008 return NULL;
15009
15010 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15011 VAR_INIT_STATUS_INITIALIZED);
15012 if (op0 == NULL)
15013 return NULL;
15014 ret = op0;
15015 if (GET_CODE (rtl) == CLZ)
15016 {
15017 if (!CLZ_DEFINED_VALUE_AT_ZERO (mode, valv))
15018 valv = GET_MODE_BITSIZE (mode);
15019 }
15020 else if (GET_CODE (rtl) == FFS)
15021 valv = 0;
15022 else if (!CTZ_DEFINED_VALUE_AT_ZERO (mode, valv))
15023 valv = GET_MODE_BITSIZE (mode);
15024 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15025 l1jump = new_loc_descr (DW_OP_bra, 0, 0);
15026 add_loc_descr (&ret, l1jump);
15027 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
15028 tmp = mem_loc_descriptor (GEN_INT (valv), mode, mem_mode,
15029 VAR_INIT_STATUS_INITIALIZED);
15030 if (tmp == NULL)
15031 return NULL;
15032 add_loc_descr (&ret, tmp);
15033 l4jump = new_loc_descr (DW_OP_skip, 0, 0);
15034 add_loc_descr (&ret, l4jump);
15035 l1label = mem_loc_descriptor (GET_CODE (rtl) == FFS
15036 ? const1_rtx : const0_rtx,
15037 mode, mem_mode,
15038 VAR_INIT_STATUS_INITIALIZED);
15039 if (l1label == NULL)
15040 return NULL;
15041 add_loc_descr (&ret, l1label);
15042 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15043 l2label = new_loc_descr (DW_OP_dup, 0, 0);
15044 add_loc_descr (&ret, l2label);
15045 if (GET_CODE (rtl) != CLZ)
15046 msb = const1_rtx;
15047 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
15048 msb = GEN_INT (HOST_WIDE_INT_1U
15049 << (GET_MODE_BITSIZE (mode) - 1));
15050 else
15051 msb = immed_wide_int_const
15052 (wi::set_bit_in_zero (GET_MODE_PRECISION (mode) - 1,
15053 GET_MODE_PRECISION (mode)), mode);
15054 if (GET_CODE (msb) == CONST_INT && INTVAL (msb) < 0)
15055 tmp = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15056 ? DW_OP_const4u : HOST_BITS_PER_WIDE_INT == 64
15057 ? DW_OP_const8u : DW_OP_constu, INTVAL (msb), 0);
15058 else
15059 tmp = mem_loc_descriptor (msb, mode, mem_mode,
15060 VAR_INIT_STATUS_INITIALIZED);
15061 if (tmp == NULL)
15062 return NULL;
15063 add_loc_descr (&ret, tmp);
15064 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15065 l3jump = new_loc_descr (DW_OP_bra, 0, 0);
15066 add_loc_descr (&ret, l3jump);
15067 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15068 VAR_INIT_STATUS_INITIALIZED);
15069 if (tmp == NULL)
15070 return NULL;
15071 add_loc_descr (&ret, tmp);
15072 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == CLZ
15073 ? DW_OP_shl : DW_OP_shr, 0, 0));
15074 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15075 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst, 1, 0));
15076 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15077 l2jump = new_loc_descr (DW_OP_skip, 0, 0);
15078 add_loc_descr (&ret, l2jump);
15079 l3label = new_loc_descr (DW_OP_drop, 0, 0);
15080 add_loc_descr (&ret, l3label);
15081 l4label = new_loc_descr (DW_OP_nop, 0, 0);
15082 add_loc_descr (&ret, l4label);
15083 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15084 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15085 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15086 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15087 l3jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15088 l3jump->dw_loc_oprnd1.v.val_loc = l3label;
15089 l4jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15090 l4jump->dw_loc_oprnd1.v.val_loc = l4label;
15091 return ret;
15092 }
15093
15094 /* POPCOUNT (const0 is DW_OP_lit0 or corresponding typed constant,
15095 const1 is DW_OP_lit1 or corresponding typed constant):
15096 const0 DW_OP_swap
15097 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15098 DW_OP_plus DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15099 L2: DW_OP_drop
15100
15101 PARITY is similar:
15102 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15103 DW_OP_xor DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15104 L2: DW_OP_drop */
15105
15106 static dw_loc_descr_ref
15107 popcount_loc_descriptor (rtx rtl, scalar_int_mode mode,
15108 machine_mode mem_mode)
15109 {
15110 dw_loc_descr_ref op0, ret, tmp;
15111 dw_loc_descr_ref l1jump, l1label;
15112 dw_loc_descr_ref l2jump, l2label;
15113
15114 if (GET_MODE (XEXP (rtl, 0)) != mode)
15115 return NULL;
15116
15117 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15118 VAR_INIT_STATUS_INITIALIZED);
15119 if (op0 == NULL)
15120 return NULL;
15121 ret = op0;
15122 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15123 VAR_INIT_STATUS_INITIALIZED);
15124 if (tmp == NULL)
15125 return NULL;
15126 add_loc_descr (&ret, tmp);
15127 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15128 l1label = new_loc_descr (DW_OP_dup, 0, 0);
15129 add_loc_descr (&ret, l1label);
15130 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15131 add_loc_descr (&ret, l2jump);
15132 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15133 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15134 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15135 VAR_INIT_STATUS_INITIALIZED);
15136 if (tmp == NULL)
15137 return NULL;
15138 add_loc_descr (&ret, tmp);
15139 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15140 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == POPCOUNT
15141 ? DW_OP_plus : DW_OP_xor, 0, 0));
15142 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15143 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15144 VAR_INIT_STATUS_INITIALIZED);
15145 add_loc_descr (&ret, tmp);
15146 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15147 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15148 add_loc_descr (&ret, l1jump);
15149 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15150 add_loc_descr (&ret, l2label);
15151 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15152 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15153 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15154 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15155 return ret;
15156 }
15157
15158 /* BSWAP (constS is initial shift count, either 56 or 24):
15159 constS const0
15160 L1: DW_OP_pick <2> constS DW_OP_pick <3> DW_OP_minus DW_OP_shr
15161 const255 DW_OP_and DW_OP_pick <2> DW_OP_shl DW_OP_or
15162 DW_OP_swap DW_OP_dup const0 DW_OP_eq DW_OP_bra <L2> const8
15163 DW_OP_minus DW_OP_swap DW_OP_skip <L1>
15164 L2: DW_OP_drop DW_OP_swap DW_OP_drop */
15165
15166 static dw_loc_descr_ref
15167 bswap_loc_descriptor (rtx rtl, scalar_int_mode mode,
15168 machine_mode mem_mode)
15169 {
15170 dw_loc_descr_ref op0, ret, tmp;
15171 dw_loc_descr_ref l1jump, l1label;
15172 dw_loc_descr_ref l2jump, l2label;
15173
15174 if (BITS_PER_UNIT != 8
15175 || (GET_MODE_BITSIZE (mode) != 32
15176 && GET_MODE_BITSIZE (mode) != 64))
15177 return NULL;
15178
15179 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15180 VAR_INIT_STATUS_INITIALIZED);
15181 if (op0 == NULL)
15182 return NULL;
15183
15184 ret = op0;
15185 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15186 mode, mem_mode,
15187 VAR_INIT_STATUS_INITIALIZED);
15188 if (tmp == NULL)
15189 return NULL;
15190 add_loc_descr (&ret, tmp);
15191 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15192 VAR_INIT_STATUS_INITIALIZED);
15193 if (tmp == NULL)
15194 return NULL;
15195 add_loc_descr (&ret, tmp);
15196 l1label = new_loc_descr (DW_OP_pick, 2, 0);
15197 add_loc_descr (&ret, l1label);
15198 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15199 mode, mem_mode,
15200 VAR_INIT_STATUS_INITIALIZED);
15201 add_loc_descr (&ret, tmp);
15202 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 3, 0));
15203 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15204 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15205 tmp = mem_loc_descriptor (GEN_INT (255), mode, mem_mode,
15206 VAR_INIT_STATUS_INITIALIZED);
15207 if (tmp == NULL)
15208 return NULL;
15209 add_loc_descr (&ret, tmp);
15210 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15211 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 2, 0));
15212 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15213 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15214 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15215 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15216 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15217 VAR_INIT_STATUS_INITIALIZED);
15218 add_loc_descr (&ret, tmp);
15219 add_loc_descr (&ret, new_loc_descr (DW_OP_eq, 0, 0));
15220 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15221 add_loc_descr (&ret, l2jump);
15222 tmp = mem_loc_descriptor (GEN_INT (8), mode, mem_mode,
15223 VAR_INIT_STATUS_INITIALIZED);
15224 add_loc_descr (&ret, tmp);
15225 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15226 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15227 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15228 add_loc_descr (&ret, l1jump);
15229 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15230 add_loc_descr (&ret, l2label);
15231 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15232 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
15233 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15234 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15235 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15236 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15237 return ret;
15238 }
15239
15240 /* ROTATE (constMASK is mode mask, BITSIZE is bitsize of mode):
15241 DW_OP_over DW_OP_over DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15242 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_neg
15243 DW_OP_plus_uconst <BITSIZE> DW_OP_shr DW_OP_or
15244
15245 ROTATERT is similar:
15246 DW_OP_over DW_OP_over DW_OP_neg DW_OP_plus_uconst <BITSIZE>
15247 DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15248 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_shr DW_OP_or */
15249
15250 static dw_loc_descr_ref
15251 rotate_loc_descriptor (rtx rtl, scalar_int_mode mode,
15252 machine_mode mem_mode)
15253 {
15254 rtx rtlop1 = XEXP (rtl, 1);
15255 dw_loc_descr_ref op0, op1, ret, mask[2] = { NULL, NULL };
15256 int i;
15257
15258 if (is_narrower_int_mode (GET_MODE (rtlop1), mode))
15259 rtlop1 = gen_rtx_ZERO_EXTEND (mode, rtlop1);
15260 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15261 VAR_INIT_STATUS_INITIALIZED);
15262 op1 = mem_loc_descriptor (rtlop1, mode, mem_mode,
15263 VAR_INIT_STATUS_INITIALIZED);
15264 if (op0 == NULL || op1 == NULL)
15265 return NULL;
15266 if (GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE)
15267 for (i = 0; i < 2; i++)
15268 {
15269 if (GET_MODE_BITSIZE (mode) < HOST_BITS_PER_WIDE_INT)
15270 mask[i] = mem_loc_descriptor (GEN_INT (GET_MODE_MASK (mode)),
15271 mode, mem_mode,
15272 VAR_INIT_STATUS_INITIALIZED);
15273 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
15274 mask[i] = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15275 ? DW_OP_const4u
15276 : HOST_BITS_PER_WIDE_INT == 64
15277 ? DW_OP_const8u : DW_OP_constu,
15278 GET_MODE_MASK (mode), 0);
15279 else
15280 mask[i] = NULL;
15281 if (mask[i] == NULL)
15282 return NULL;
15283 add_loc_descr (&mask[i], new_loc_descr (DW_OP_and, 0, 0));
15284 }
15285 ret = op0;
15286 add_loc_descr (&ret, op1);
15287 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15288 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15289 if (GET_CODE (rtl) == ROTATERT)
15290 {
15291 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15292 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15293 GET_MODE_BITSIZE (mode), 0));
15294 }
15295 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15296 if (mask[0] != NULL)
15297 add_loc_descr (&ret, mask[0]);
15298 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15299 if (mask[1] != NULL)
15300 {
15301 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15302 add_loc_descr (&ret, mask[1]);
15303 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15304 }
15305 if (GET_CODE (rtl) == ROTATE)
15306 {
15307 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15308 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15309 GET_MODE_BITSIZE (mode), 0));
15310 }
15311 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15312 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15313 return ret;
15314 }
15315
15316 /* Helper function for mem_loc_descriptor. Return DW_OP_GNU_parameter_ref
15317 for DEBUG_PARAMETER_REF RTL. */
15318
15319 static dw_loc_descr_ref
15320 parameter_ref_descriptor (rtx rtl)
15321 {
15322 dw_loc_descr_ref ret;
15323 dw_die_ref ref;
15324
15325 if (dwarf_strict)
15326 return NULL;
15327 gcc_assert (TREE_CODE (DEBUG_PARAMETER_REF_DECL (rtl)) == PARM_DECL);
15328 /* With LTO during LTRANS we get the late DIE that refers to the early
15329 DIE, thus we add another indirection here. This seems to confuse
15330 gdb enough to make gcc.dg/guality/pr68860-1.c FAIL with LTO. */
15331 ref = lookup_decl_die (DEBUG_PARAMETER_REF_DECL (rtl));
15332 ret = new_loc_descr (DW_OP_GNU_parameter_ref, 0, 0);
15333 if (ref)
15334 {
15335 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15336 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
15337 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
15338 }
15339 else
15340 {
15341 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
15342 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_PARAMETER_REF_DECL (rtl);
15343 }
15344 return ret;
15345 }
15346
15347 /* The following routine converts the RTL for a variable or parameter
15348 (resident in memory) into an equivalent Dwarf representation of a
15349 mechanism for getting the address of that same variable onto the top of a
15350 hypothetical "address evaluation" stack.
15351
15352 When creating memory location descriptors, we are effectively transforming
15353 the RTL for a memory-resident object into its Dwarf postfix expression
15354 equivalent. This routine recursively descends an RTL tree, turning
15355 it into Dwarf postfix code as it goes.
15356
15357 MODE is the mode that should be assumed for the rtl if it is VOIDmode.
15358
15359 MEM_MODE is the mode of the memory reference, needed to handle some
15360 autoincrement addressing modes.
15361
15362 Return 0 if we can't represent the location. */
15363
15364 dw_loc_descr_ref
15365 mem_loc_descriptor (rtx rtl, machine_mode mode,
15366 machine_mode mem_mode,
15367 enum var_init_status initialized)
15368 {
15369 dw_loc_descr_ref mem_loc_result = NULL;
15370 enum dwarf_location_atom op;
15371 dw_loc_descr_ref op0, op1;
15372 rtx inner = NULL_RTX;
15373 poly_int64 offset;
15374
15375 if (mode == VOIDmode)
15376 mode = GET_MODE (rtl);
15377
15378 /* Note that for a dynamically sized array, the location we will generate a
15379 description of here will be the lowest numbered location which is
15380 actually within the array. That's *not* necessarily the same as the
15381 zeroth element of the array. */
15382
15383 rtl = targetm.delegitimize_address (rtl);
15384
15385 if (mode != GET_MODE (rtl) && GET_MODE (rtl) != VOIDmode)
15386 return NULL;
15387
15388 scalar_int_mode int_mode, inner_mode, op1_mode;
15389 switch (GET_CODE (rtl))
15390 {
15391 case POST_INC:
15392 case POST_DEC:
15393 case POST_MODIFY:
15394 return mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, initialized);
15395
15396 case SUBREG:
15397 /* The case of a subreg may arise when we have a local (register)
15398 variable or a formal (register) parameter which doesn't quite fill
15399 up an entire register. For now, just assume that it is
15400 legitimate to make the Dwarf info refer to the whole register which
15401 contains the given subreg. */
15402 if (!subreg_lowpart_p (rtl))
15403 break;
15404 inner = SUBREG_REG (rtl);
15405 /* FALLTHRU */
15406 case TRUNCATE:
15407 if (inner == NULL_RTX)
15408 inner = XEXP (rtl, 0);
15409 if (is_a <scalar_int_mode> (mode, &int_mode)
15410 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15411 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15412 #ifdef POINTERS_EXTEND_UNSIGNED
15413 || (int_mode == Pmode && mem_mode != VOIDmode)
15414 #endif
15415 )
15416 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE)
15417 {
15418 mem_loc_result = mem_loc_descriptor (inner,
15419 inner_mode,
15420 mem_mode, initialized);
15421 break;
15422 }
15423 if (dwarf_strict && dwarf_version < 5)
15424 break;
15425 if (is_a <scalar_int_mode> (mode, &int_mode)
15426 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15427 ? GET_MODE_SIZE (int_mode) <= GET_MODE_SIZE (inner_mode)
15428 : known_eq (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15429 {
15430 dw_die_ref type_die;
15431 dw_loc_descr_ref cvt;
15432
15433 mem_loc_result = mem_loc_descriptor (inner,
15434 GET_MODE (inner),
15435 mem_mode, initialized);
15436 if (mem_loc_result == NULL)
15437 break;
15438 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15439 if (type_die == NULL)
15440 {
15441 mem_loc_result = NULL;
15442 break;
15443 }
15444 if (maybe_ne (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15445 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15446 else
15447 cvt = new_loc_descr (dwarf_OP (DW_OP_reinterpret), 0, 0);
15448 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15449 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15450 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15451 add_loc_descr (&mem_loc_result, cvt);
15452 if (is_a <scalar_int_mode> (mode, &int_mode)
15453 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15454 {
15455 /* Convert it to untyped afterwards. */
15456 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15457 add_loc_descr (&mem_loc_result, cvt);
15458 }
15459 }
15460 break;
15461
15462 case REG:
15463 if (!is_a <scalar_int_mode> (mode, &int_mode)
15464 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15465 && rtl != arg_pointer_rtx
15466 && rtl != frame_pointer_rtx
15467 #ifdef POINTERS_EXTEND_UNSIGNED
15468 && (int_mode != Pmode || mem_mode == VOIDmode)
15469 #endif
15470 ))
15471 {
15472 dw_die_ref type_die;
15473 unsigned int dbx_regnum;
15474
15475 if (dwarf_strict && dwarf_version < 5)
15476 break;
15477 if (REGNO (rtl) > FIRST_PSEUDO_REGISTER)
15478 break;
15479 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15480 if (type_die == NULL)
15481 break;
15482
15483 dbx_regnum = dbx_reg_number (rtl);
15484 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15485 break;
15486 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_regval_type),
15487 dbx_regnum, 0);
15488 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15489 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15490 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.external = 0;
15491 break;
15492 }
15493 /* Whenever a register number forms a part of the description of the
15494 method for calculating the (dynamic) address of a memory resident
15495 object, DWARF rules require the register number be referred to as
15496 a "base register". This distinction is not based in any way upon
15497 what category of register the hardware believes the given register
15498 belongs to. This is strictly DWARF terminology we're dealing with
15499 here. Note that in cases where the location of a memory-resident
15500 data object could be expressed as: OP_ADD (OP_BASEREG (basereg),
15501 OP_CONST (0)) the actual DWARF location descriptor that we generate
15502 may just be OP_BASEREG (basereg). This may look deceptively like
15503 the object in question was allocated to a register (rather than in
15504 memory) so DWARF consumers need to be aware of the subtle
15505 distinction between OP_REG and OP_BASEREG. */
15506 if (REGNO (rtl) < FIRST_PSEUDO_REGISTER)
15507 mem_loc_result = based_loc_descr (rtl, 0, VAR_INIT_STATUS_INITIALIZED);
15508 else if (stack_realign_drap
15509 && crtl->drap_reg
15510 && crtl->args.internal_arg_pointer == rtl
15511 && REGNO (crtl->drap_reg) < FIRST_PSEUDO_REGISTER)
15512 {
15513 /* If RTL is internal_arg_pointer, which has been optimized
15514 out, use DRAP instead. */
15515 mem_loc_result = based_loc_descr (crtl->drap_reg, 0,
15516 VAR_INIT_STATUS_INITIALIZED);
15517 }
15518 break;
15519
15520 case SIGN_EXTEND:
15521 case ZERO_EXTEND:
15522 if (!is_a <scalar_int_mode> (mode, &int_mode)
15523 || !is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode))
15524 break;
15525 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
15526 mem_mode, VAR_INIT_STATUS_INITIALIZED);
15527 if (op0 == 0)
15528 break;
15529 else if (GET_CODE (rtl) == ZERO_EXTEND
15530 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15531 && GET_MODE_BITSIZE (inner_mode) < HOST_BITS_PER_WIDE_INT
15532 /* If DW_OP_const{1,2,4}u won't be used, it is shorter
15533 to expand zero extend as two shifts instead of
15534 masking. */
15535 && GET_MODE_SIZE (inner_mode) <= 4)
15536 {
15537 mem_loc_result = op0;
15538 add_loc_descr (&mem_loc_result,
15539 int_loc_descriptor (GET_MODE_MASK (inner_mode)));
15540 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_and, 0, 0));
15541 }
15542 else if (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15543 {
15544 int shift = DWARF2_ADDR_SIZE - GET_MODE_SIZE (inner_mode);
15545 shift *= BITS_PER_UNIT;
15546 if (GET_CODE (rtl) == SIGN_EXTEND)
15547 op = DW_OP_shra;
15548 else
15549 op = DW_OP_shr;
15550 mem_loc_result = op0;
15551 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15552 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
15553 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15554 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15555 }
15556 else if (!dwarf_strict || dwarf_version >= 5)
15557 {
15558 dw_die_ref type_die1, type_die2;
15559 dw_loc_descr_ref cvt;
15560
15561 type_die1 = base_type_for_mode (inner_mode,
15562 GET_CODE (rtl) == ZERO_EXTEND);
15563 if (type_die1 == NULL)
15564 break;
15565 type_die2 = base_type_for_mode (int_mode, 1);
15566 if (type_die2 == NULL)
15567 break;
15568 mem_loc_result = op0;
15569 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15570 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15571 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die1;
15572 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15573 add_loc_descr (&mem_loc_result, cvt);
15574 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15575 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15576 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die2;
15577 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15578 add_loc_descr (&mem_loc_result, cvt);
15579 }
15580 break;
15581
15582 case MEM:
15583 {
15584 rtx new_rtl = avoid_constant_pool_reference (rtl);
15585 if (new_rtl != rtl)
15586 {
15587 mem_loc_result = mem_loc_descriptor (new_rtl, mode, mem_mode,
15588 initialized);
15589 if (mem_loc_result != NULL)
15590 return mem_loc_result;
15591 }
15592 }
15593 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0),
15594 get_address_mode (rtl), mode,
15595 VAR_INIT_STATUS_INITIALIZED);
15596 if (mem_loc_result == NULL)
15597 mem_loc_result = tls_mem_loc_descriptor (rtl);
15598 if (mem_loc_result != NULL)
15599 {
15600 if (!is_a <scalar_int_mode> (mode, &int_mode)
15601 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15602 {
15603 dw_die_ref type_die;
15604 dw_loc_descr_ref deref;
15605 HOST_WIDE_INT size;
15606
15607 if (dwarf_strict && dwarf_version < 5)
15608 return NULL;
15609 if (!GET_MODE_SIZE (mode).is_constant (&size))
15610 return NULL;
15611 type_die
15612 = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15613 if (type_die == NULL)
15614 return NULL;
15615 deref = new_loc_descr (dwarf_OP (DW_OP_deref_type), size, 0);
15616 deref->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15617 deref->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15618 deref->dw_loc_oprnd2.v.val_die_ref.external = 0;
15619 add_loc_descr (&mem_loc_result, deref);
15620 }
15621 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
15622 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_deref, 0, 0));
15623 else
15624 add_loc_descr (&mem_loc_result,
15625 new_loc_descr (DW_OP_deref_size,
15626 GET_MODE_SIZE (int_mode), 0));
15627 }
15628 break;
15629
15630 case LO_SUM:
15631 return mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, initialized);
15632
15633 case LABEL_REF:
15634 /* Some ports can transform a symbol ref into a label ref, because
15635 the symbol ref is too far away and has to be dumped into a constant
15636 pool. */
15637 case CONST:
15638 case SYMBOL_REF:
15639 if (!is_a <scalar_int_mode> (mode, &int_mode)
15640 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15641 #ifdef POINTERS_EXTEND_UNSIGNED
15642 && (int_mode != Pmode || mem_mode == VOIDmode)
15643 #endif
15644 ))
15645 break;
15646 if (GET_CODE (rtl) == SYMBOL_REF
15647 && SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
15648 {
15649 dw_loc_descr_ref temp;
15650
15651 /* If this is not defined, we have no way to emit the data. */
15652 if (!targetm.have_tls || !targetm.asm_out.output_dwarf_dtprel)
15653 break;
15654
15655 temp = new_addr_loc_descr (rtl, dtprel_true);
15656
15657 /* We check for DWARF 5 here because gdb did not implement
15658 DW_OP_form_tls_address until after 7.12. */
15659 mem_loc_result = new_loc_descr ((dwarf_version >= 5
15660 ? DW_OP_form_tls_address
15661 : DW_OP_GNU_push_tls_address),
15662 0, 0);
15663 add_loc_descr (&mem_loc_result, temp);
15664
15665 break;
15666 }
15667
15668 if (!const_ok_for_output (rtl))
15669 {
15670 if (GET_CODE (rtl) == CONST)
15671 switch (GET_CODE (XEXP (rtl, 0)))
15672 {
15673 case NOT:
15674 op = DW_OP_not;
15675 goto try_const_unop;
15676 case NEG:
15677 op = DW_OP_neg;
15678 goto try_const_unop;
15679 try_const_unop:
15680 rtx arg;
15681 arg = XEXP (XEXP (rtl, 0), 0);
15682 if (!CONSTANT_P (arg))
15683 arg = gen_rtx_CONST (int_mode, arg);
15684 op0 = mem_loc_descriptor (arg, int_mode, mem_mode,
15685 initialized);
15686 if (op0)
15687 {
15688 mem_loc_result = op0;
15689 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15690 }
15691 break;
15692 default:
15693 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), int_mode,
15694 mem_mode, initialized);
15695 break;
15696 }
15697 break;
15698 }
15699
15700 symref:
15701 mem_loc_result = new_addr_loc_descr (rtl, dtprel_false);
15702 vec_safe_push (used_rtx_array, rtl);
15703 break;
15704
15705 case CONCAT:
15706 case CONCATN:
15707 case VAR_LOCATION:
15708 case DEBUG_IMPLICIT_PTR:
15709 expansion_failed (NULL_TREE, rtl,
15710 "CONCAT/CONCATN/VAR_LOCATION is handled only by loc_descriptor");
15711 return 0;
15712
15713 case ENTRY_VALUE:
15714 if (dwarf_strict && dwarf_version < 5)
15715 return NULL;
15716 if (REG_P (ENTRY_VALUE_EXP (rtl)))
15717 {
15718 if (!is_a <scalar_int_mode> (mode, &int_mode)
15719 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15720 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15721 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15722 else
15723 {
15724 unsigned int dbx_regnum = dbx_reg_number (ENTRY_VALUE_EXP (rtl));
15725 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15726 return NULL;
15727 op0 = one_reg_loc_descriptor (dbx_regnum,
15728 VAR_INIT_STATUS_INITIALIZED);
15729 }
15730 }
15731 else if (MEM_P (ENTRY_VALUE_EXP (rtl))
15732 && REG_P (XEXP (ENTRY_VALUE_EXP (rtl), 0)))
15733 {
15734 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15735 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15736 if (op0 && op0->dw_loc_opc == DW_OP_fbreg)
15737 return NULL;
15738 }
15739 else
15740 gcc_unreachable ();
15741 if (op0 == NULL)
15742 return NULL;
15743 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_entry_value), 0, 0);
15744 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_loc;
15745 mem_loc_result->dw_loc_oprnd1.v.val_loc = op0;
15746 break;
15747
15748 case DEBUG_PARAMETER_REF:
15749 mem_loc_result = parameter_ref_descriptor (rtl);
15750 break;
15751
15752 case PRE_MODIFY:
15753 /* Extract the PLUS expression nested inside and fall into
15754 PLUS code below. */
15755 rtl = XEXP (rtl, 1);
15756 goto plus;
15757
15758 case PRE_INC:
15759 case PRE_DEC:
15760 /* Turn these into a PLUS expression and fall into the PLUS code
15761 below. */
15762 rtl = gen_rtx_PLUS (mode, XEXP (rtl, 0),
15763 gen_int_mode (GET_CODE (rtl) == PRE_INC
15764 ? GET_MODE_UNIT_SIZE (mem_mode)
15765 : -GET_MODE_UNIT_SIZE (mem_mode),
15766 mode));
15767
15768 /* fall through */
15769
15770 case PLUS:
15771 plus:
15772 if (is_based_loc (rtl)
15773 && is_a <scalar_int_mode> (mode, &int_mode)
15774 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15775 || XEXP (rtl, 0) == arg_pointer_rtx
15776 || XEXP (rtl, 0) == frame_pointer_rtx))
15777 mem_loc_result = based_loc_descr (XEXP (rtl, 0),
15778 INTVAL (XEXP (rtl, 1)),
15779 VAR_INIT_STATUS_INITIALIZED);
15780 else
15781 {
15782 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15783 VAR_INIT_STATUS_INITIALIZED);
15784 if (mem_loc_result == 0)
15785 break;
15786
15787 if (CONST_INT_P (XEXP (rtl, 1))
15788 && (GET_MODE_SIZE (as_a <scalar_int_mode> (mode))
15789 <= DWARF2_ADDR_SIZE))
15790 loc_descr_plus_const (&mem_loc_result, INTVAL (XEXP (rtl, 1)));
15791 else
15792 {
15793 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15794 VAR_INIT_STATUS_INITIALIZED);
15795 if (op1 == 0)
15796 return NULL;
15797 add_loc_descr (&mem_loc_result, op1);
15798 add_loc_descr (&mem_loc_result,
15799 new_loc_descr (DW_OP_plus, 0, 0));
15800 }
15801 }
15802 break;
15803
15804 /* If a pseudo-reg is optimized away, it is possible for it to
15805 be replaced with a MEM containing a multiply or shift. */
15806 case MINUS:
15807 op = DW_OP_minus;
15808 goto do_binop;
15809
15810 case MULT:
15811 op = DW_OP_mul;
15812 goto do_binop;
15813
15814 case DIV:
15815 if ((!dwarf_strict || dwarf_version >= 5)
15816 && is_a <scalar_int_mode> (mode, &int_mode)
15817 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15818 {
15819 mem_loc_result = typed_binop (DW_OP_div, rtl,
15820 base_type_for_mode (mode, 0),
15821 int_mode, mem_mode);
15822 break;
15823 }
15824 op = DW_OP_div;
15825 goto do_binop;
15826
15827 case UMOD:
15828 op = DW_OP_mod;
15829 goto do_binop;
15830
15831 case ASHIFT:
15832 op = DW_OP_shl;
15833 goto do_shift;
15834
15835 case ASHIFTRT:
15836 op = DW_OP_shra;
15837 goto do_shift;
15838
15839 case LSHIFTRT:
15840 op = DW_OP_shr;
15841 goto do_shift;
15842
15843 do_shift:
15844 if (!is_a <scalar_int_mode> (mode, &int_mode))
15845 break;
15846 op0 = mem_loc_descriptor (XEXP (rtl, 0), int_mode, mem_mode,
15847 VAR_INIT_STATUS_INITIALIZED);
15848 {
15849 rtx rtlop1 = XEXP (rtl, 1);
15850 if (is_a <scalar_int_mode> (GET_MODE (rtlop1), &op1_mode)
15851 && GET_MODE_BITSIZE (op1_mode) < GET_MODE_BITSIZE (int_mode))
15852 rtlop1 = gen_rtx_ZERO_EXTEND (int_mode, rtlop1);
15853 op1 = mem_loc_descriptor (rtlop1, int_mode, mem_mode,
15854 VAR_INIT_STATUS_INITIALIZED);
15855 }
15856
15857 if (op0 == 0 || op1 == 0)
15858 break;
15859
15860 mem_loc_result = op0;
15861 add_loc_descr (&mem_loc_result, op1);
15862 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15863 break;
15864
15865 case AND:
15866 op = DW_OP_and;
15867 goto do_binop;
15868
15869 case IOR:
15870 op = DW_OP_or;
15871 goto do_binop;
15872
15873 case XOR:
15874 op = DW_OP_xor;
15875 goto do_binop;
15876
15877 do_binop:
15878 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15879 VAR_INIT_STATUS_INITIALIZED);
15880 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15881 VAR_INIT_STATUS_INITIALIZED);
15882
15883 if (op0 == 0 || op1 == 0)
15884 break;
15885
15886 mem_loc_result = op0;
15887 add_loc_descr (&mem_loc_result, op1);
15888 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15889 break;
15890
15891 case MOD:
15892 if ((!dwarf_strict || dwarf_version >= 5)
15893 && is_a <scalar_int_mode> (mode, &int_mode)
15894 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15895 {
15896 mem_loc_result = typed_binop (DW_OP_mod, rtl,
15897 base_type_for_mode (mode, 0),
15898 int_mode, mem_mode);
15899 break;
15900 }
15901
15902 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15903 VAR_INIT_STATUS_INITIALIZED);
15904 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15905 VAR_INIT_STATUS_INITIALIZED);
15906
15907 if (op0 == 0 || op1 == 0)
15908 break;
15909
15910 mem_loc_result = op0;
15911 add_loc_descr (&mem_loc_result, op1);
15912 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
15913 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
15914 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_div, 0, 0));
15915 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_mul, 0, 0));
15916 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_minus, 0, 0));
15917 break;
15918
15919 case UDIV:
15920 if ((!dwarf_strict || dwarf_version >= 5)
15921 && is_a <scalar_int_mode> (mode, &int_mode))
15922 {
15923 if (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15924 {
15925 op = DW_OP_div;
15926 goto do_binop;
15927 }
15928 mem_loc_result = typed_binop (DW_OP_div, rtl,
15929 base_type_for_mode (int_mode, 1),
15930 int_mode, mem_mode);
15931 }
15932 break;
15933
15934 case NOT:
15935 op = DW_OP_not;
15936 goto do_unop;
15937
15938 case ABS:
15939 op = DW_OP_abs;
15940 goto do_unop;
15941
15942 case NEG:
15943 op = DW_OP_neg;
15944 goto do_unop;
15945
15946 do_unop:
15947 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15948 VAR_INIT_STATUS_INITIALIZED);
15949
15950 if (op0 == 0)
15951 break;
15952
15953 mem_loc_result = op0;
15954 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15955 break;
15956
15957 case CONST_INT:
15958 if (!is_a <scalar_int_mode> (mode, &int_mode)
15959 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15960 #ifdef POINTERS_EXTEND_UNSIGNED
15961 || (int_mode == Pmode
15962 && mem_mode != VOIDmode
15963 && trunc_int_for_mode (INTVAL (rtl), ptr_mode) == INTVAL (rtl))
15964 #endif
15965 )
15966 {
15967 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
15968 break;
15969 }
15970 if ((!dwarf_strict || dwarf_version >= 5)
15971 && (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT
15972 || GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_DOUBLE_INT))
15973 {
15974 dw_die_ref type_die = base_type_for_mode (int_mode, 1);
15975 scalar_int_mode amode;
15976 if (type_die == NULL)
15977 return NULL;
15978 if (INTVAL (rtl) >= 0
15979 && (int_mode_for_size (DWARF2_ADDR_SIZE * BITS_PER_UNIT, 0)
15980 .exists (&amode))
15981 && trunc_int_for_mode (INTVAL (rtl), amode) == INTVAL (rtl)
15982 /* const DW_OP_convert <XXX> vs.
15983 DW_OP_const_type <XXX, 1, const>. */
15984 && size_of_int_loc_descriptor (INTVAL (rtl)) + 1 + 1
15985 < (unsigned long) 1 + 1 + 1 + GET_MODE_SIZE (int_mode))
15986 {
15987 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
15988 op0 = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15989 op0->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15990 op0->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15991 op0->dw_loc_oprnd1.v.val_die_ref.external = 0;
15992 add_loc_descr (&mem_loc_result, op0);
15993 return mem_loc_result;
15994 }
15995 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0,
15996 INTVAL (rtl));
15997 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15998 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15999 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16000 if (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT)
16001 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
16002 else
16003 {
16004 mem_loc_result->dw_loc_oprnd2.val_class
16005 = dw_val_class_const_double;
16006 mem_loc_result->dw_loc_oprnd2.v.val_double
16007 = double_int::from_shwi (INTVAL (rtl));
16008 }
16009 }
16010 break;
16011
16012 case CONST_DOUBLE:
16013 if (!dwarf_strict || dwarf_version >= 5)
16014 {
16015 dw_die_ref type_die;
16016
16017 /* Note that if TARGET_SUPPORTS_WIDE_INT == 0, a
16018 CONST_DOUBLE rtx could represent either a large integer
16019 or a floating-point constant. If TARGET_SUPPORTS_WIDE_INT != 0,
16020 the value is always a floating point constant.
16021
16022 When it is an integer, a CONST_DOUBLE is used whenever
16023 the constant requires 2 HWIs to be adequately represented.
16024 We output CONST_DOUBLEs as blocks. */
16025 if (mode == VOIDmode
16026 || (GET_MODE (rtl) == VOIDmode
16027 && maybe_ne (GET_MODE_BITSIZE (mode),
16028 HOST_BITS_PER_DOUBLE_INT)))
16029 break;
16030 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
16031 if (type_die == NULL)
16032 return NULL;
16033 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
16034 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16035 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16036 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16037 #if TARGET_SUPPORTS_WIDE_INT == 0
16038 if (!SCALAR_FLOAT_MODE_P (mode))
16039 {
16040 mem_loc_result->dw_loc_oprnd2.val_class
16041 = dw_val_class_const_double;
16042 mem_loc_result->dw_loc_oprnd2.v.val_double
16043 = rtx_to_double_int (rtl);
16044 }
16045 else
16046 #endif
16047 {
16048 scalar_float_mode float_mode = as_a <scalar_float_mode> (mode);
16049 unsigned int length = GET_MODE_SIZE (float_mode);
16050 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16051
16052 insert_float (rtl, array);
16053 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16054 mem_loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16055 mem_loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16056 mem_loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16057 }
16058 }
16059 break;
16060
16061 case CONST_WIDE_INT:
16062 if (!dwarf_strict || dwarf_version >= 5)
16063 {
16064 dw_die_ref type_die;
16065
16066 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
16067 if (type_die == NULL)
16068 return NULL;
16069 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
16070 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16071 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16072 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16073 mem_loc_result->dw_loc_oprnd2.val_class
16074 = dw_val_class_wide_int;
16075 mem_loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16076 *mem_loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, mode);
16077 }
16078 break;
16079
16080 case CONST_POLY_INT:
16081 mem_loc_result = int_loc_descriptor (rtx_to_poly_int64 (rtl));
16082 break;
16083
16084 case EQ:
16085 mem_loc_result = scompare_loc_descriptor (DW_OP_eq, rtl, mem_mode);
16086 break;
16087
16088 case GE:
16089 mem_loc_result = scompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16090 break;
16091
16092 case GT:
16093 mem_loc_result = scompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16094 break;
16095
16096 case LE:
16097 mem_loc_result = scompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16098 break;
16099
16100 case LT:
16101 mem_loc_result = scompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16102 break;
16103
16104 case NE:
16105 mem_loc_result = scompare_loc_descriptor (DW_OP_ne, rtl, mem_mode);
16106 break;
16107
16108 case GEU:
16109 mem_loc_result = ucompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16110 break;
16111
16112 case GTU:
16113 mem_loc_result = ucompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16114 break;
16115
16116 case LEU:
16117 mem_loc_result = ucompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16118 break;
16119
16120 case LTU:
16121 mem_loc_result = ucompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16122 break;
16123
16124 case UMIN:
16125 case UMAX:
16126 if (!SCALAR_INT_MODE_P (mode))
16127 break;
16128 /* FALLTHRU */
16129 case SMIN:
16130 case SMAX:
16131 mem_loc_result = minmax_loc_descriptor (rtl, mode, mem_mode);
16132 break;
16133
16134 case ZERO_EXTRACT:
16135 case SIGN_EXTRACT:
16136 if (CONST_INT_P (XEXP (rtl, 1))
16137 && CONST_INT_P (XEXP (rtl, 2))
16138 && is_a <scalar_int_mode> (mode, &int_mode)
16139 && is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode)
16140 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16141 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE
16142 && ((unsigned) INTVAL (XEXP (rtl, 1))
16143 + (unsigned) INTVAL (XEXP (rtl, 2))
16144 <= GET_MODE_BITSIZE (int_mode)))
16145 {
16146 int shift, size;
16147 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
16148 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16149 if (op0 == 0)
16150 break;
16151 if (GET_CODE (rtl) == SIGN_EXTRACT)
16152 op = DW_OP_shra;
16153 else
16154 op = DW_OP_shr;
16155 mem_loc_result = op0;
16156 size = INTVAL (XEXP (rtl, 1));
16157 shift = INTVAL (XEXP (rtl, 2));
16158 if (BITS_BIG_ENDIAN)
16159 shift = GET_MODE_BITSIZE (inner_mode) - shift - size;
16160 if (shift + size != (int) DWARF2_ADDR_SIZE)
16161 {
16162 add_loc_descr (&mem_loc_result,
16163 int_loc_descriptor (DWARF2_ADDR_SIZE
16164 - shift - size));
16165 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
16166 }
16167 if (size != (int) DWARF2_ADDR_SIZE)
16168 {
16169 add_loc_descr (&mem_loc_result,
16170 int_loc_descriptor (DWARF2_ADDR_SIZE - size));
16171 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
16172 }
16173 }
16174 break;
16175
16176 case IF_THEN_ELSE:
16177 {
16178 dw_loc_descr_ref op2, bra_node, drop_node;
16179 op0 = mem_loc_descriptor (XEXP (rtl, 0),
16180 GET_MODE (XEXP (rtl, 0)) == VOIDmode
16181 ? word_mode : GET_MODE (XEXP (rtl, 0)),
16182 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16183 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
16184 VAR_INIT_STATUS_INITIALIZED);
16185 op2 = mem_loc_descriptor (XEXP (rtl, 2), mode, mem_mode,
16186 VAR_INIT_STATUS_INITIALIZED);
16187 if (op0 == NULL || op1 == NULL || op2 == NULL)
16188 break;
16189
16190 mem_loc_result = op1;
16191 add_loc_descr (&mem_loc_result, op2);
16192 add_loc_descr (&mem_loc_result, op0);
16193 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
16194 add_loc_descr (&mem_loc_result, bra_node);
16195 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_swap, 0, 0));
16196 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
16197 add_loc_descr (&mem_loc_result, drop_node);
16198 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
16199 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
16200 }
16201 break;
16202
16203 case FLOAT_EXTEND:
16204 case FLOAT_TRUNCATE:
16205 case FLOAT:
16206 case UNSIGNED_FLOAT:
16207 case FIX:
16208 case UNSIGNED_FIX:
16209 if (!dwarf_strict || dwarf_version >= 5)
16210 {
16211 dw_die_ref type_die;
16212 dw_loc_descr_ref cvt;
16213
16214 op0 = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (XEXP (rtl, 0)),
16215 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16216 if (op0 == NULL)
16217 break;
16218 if (is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &int_mode)
16219 && (GET_CODE (rtl) == FLOAT
16220 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE))
16221 {
16222 type_die = base_type_for_mode (int_mode,
16223 GET_CODE (rtl) == UNSIGNED_FLOAT);
16224 if (type_die == NULL)
16225 break;
16226 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16227 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16228 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16229 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16230 add_loc_descr (&op0, cvt);
16231 }
16232 type_die = base_type_for_mode (mode, GET_CODE (rtl) == UNSIGNED_FIX);
16233 if (type_die == NULL)
16234 break;
16235 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16236 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16237 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16238 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16239 add_loc_descr (&op0, cvt);
16240 if (is_a <scalar_int_mode> (mode, &int_mode)
16241 && (GET_CODE (rtl) == FIX
16242 || GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE))
16243 {
16244 op0 = convert_descriptor_to_mode (int_mode, op0);
16245 if (op0 == NULL)
16246 break;
16247 }
16248 mem_loc_result = op0;
16249 }
16250 break;
16251
16252 case CLZ:
16253 case CTZ:
16254 case FFS:
16255 if (is_a <scalar_int_mode> (mode, &int_mode))
16256 mem_loc_result = clz_loc_descriptor (rtl, int_mode, mem_mode);
16257 break;
16258
16259 case POPCOUNT:
16260 case PARITY:
16261 if (is_a <scalar_int_mode> (mode, &int_mode))
16262 mem_loc_result = popcount_loc_descriptor (rtl, int_mode, mem_mode);
16263 break;
16264
16265 case BSWAP:
16266 if (is_a <scalar_int_mode> (mode, &int_mode))
16267 mem_loc_result = bswap_loc_descriptor (rtl, int_mode, mem_mode);
16268 break;
16269
16270 case ROTATE:
16271 case ROTATERT:
16272 if (is_a <scalar_int_mode> (mode, &int_mode))
16273 mem_loc_result = rotate_loc_descriptor (rtl, int_mode, mem_mode);
16274 break;
16275
16276 case COMPARE:
16277 /* In theory, we could implement the above. */
16278 /* DWARF cannot represent the unsigned compare operations
16279 natively. */
16280 case SS_MULT:
16281 case US_MULT:
16282 case SS_DIV:
16283 case US_DIV:
16284 case SS_PLUS:
16285 case US_PLUS:
16286 case SS_MINUS:
16287 case US_MINUS:
16288 case SS_NEG:
16289 case US_NEG:
16290 case SS_ABS:
16291 case SS_ASHIFT:
16292 case US_ASHIFT:
16293 case SS_TRUNCATE:
16294 case US_TRUNCATE:
16295 case UNORDERED:
16296 case ORDERED:
16297 case UNEQ:
16298 case UNGE:
16299 case UNGT:
16300 case UNLE:
16301 case UNLT:
16302 case LTGT:
16303 case FRACT_CONVERT:
16304 case UNSIGNED_FRACT_CONVERT:
16305 case SAT_FRACT:
16306 case UNSIGNED_SAT_FRACT:
16307 case SQRT:
16308 case ASM_OPERANDS:
16309 case VEC_MERGE:
16310 case VEC_SELECT:
16311 case VEC_CONCAT:
16312 case VEC_DUPLICATE:
16313 case VEC_SERIES:
16314 case UNSPEC:
16315 case HIGH:
16316 case FMA:
16317 case STRICT_LOW_PART:
16318 case CONST_VECTOR:
16319 case CONST_FIXED:
16320 case CLRSB:
16321 case CLOBBER:
16322 /* If delegitimize_address couldn't do anything with the UNSPEC, we
16323 can't express it in the debug info. This can happen e.g. with some
16324 TLS UNSPECs. */
16325 break;
16326
16327 case CONST_STRING:
16328 resolve_one_addr (&rtl);
16329 goto symref;
16330
16331 /* RTL sequences inside PARALLEL record a series of DWARF operations for
16332 the expression. An UNSPEC rtx represents a raw DWARF operation,
16333 new_loc_descr is called for it to build the operation directly.
16334 Otherwise mem_loc_descriptor is called recursively. */
16335 case PARALLEL:
16336 {
16337 int index = 0;
16338 dw_loc_descr_ref exp_result = NULL;
16339
16340 for (; index < XVECLEN (rtl, 0); index++)
16341 {
16342 rtx elem = XVECEXP (rtl, 0, index);
16343 if (GET_CODE (elem) == UNSPEC)
16344 {
16345 /* Each DWARF operation UNSPEC contain two operands, if
16346 one operand is not used for the operation, const0_rtx is
16347 passed. */
16348 gcc_assert (XVECLEN (elem, 0) == 2);
16349
16350 HOST_WIDE_INT dw_op = XINT (elem, 1);
16351 HOST_WIDE_INT oprnd1 = INTVAL (XVECEXP (elem, 0, 0));
16352 HOST_WIDE_INT oprnd2 = INTVAL (XVECEXP (elem, 0, 1));
16353 exp_result
16354 = new_loc_descr ((enum dwarf_location_atom) dw_op, oprnd1,
16355 oprnd2);
16356 }
16357 else
16358 exp_result
16359 = mem_loc_descriptor (elem, mode, mem_mode,
16360 VAR_INIT_STATUS_INITIALIZED);
16361
16362 if (!mem_loc_result)
16363 mem_loc_result = exp_result;
16364 else
16365 add_loc_descr (&mem_loc_result, exp_result);
16366 }
16367
16368 break;
16369 }
16370
16371 default:
16372 if (flag_checking)
16373 {
16374 print_rtl (stderr, rtl);
16375 gcc_unreachable ();
16376 }
16377 break;
16378 }
16379
16380 if (mem_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16381 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16382
16383 return mem_loc_result;
16384 }
16385
16386 /* Return a descriptor that describes the concatenation of two locations.
16387 This is typically a complex variable. */
16388
16389 static dw_loc_descr_ref
16390 concat_loc_descriptor (rtx x0, rtx x1, enum var_init_status initialized)
16391 {
16392 /* At present we only track constant-sized pieces. */
16393 unsigned int size0, size1;
16394 if (!GET_MODE_SIZE (GET_MODE (x0)).is_constant (&size0)
16395 || !GET_MODE_SIZE (GET_MODE (x1)).is_constant (&size1))
16396 return 0;
16397
16398 dw_loc_descr_ref cc_loc_result = NULL;
16399 dw_loc_descr_ref x0_ref
16400 = loc_descriptor (x0, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16401 dw_loc_descr_ref x1_ref
16402 = loc_descriptor (x1, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16403
16404 if (x0_ref == 0 || x1_ref == 0)
16405 return 0;
16406
16407 cc_loc_result = x0_ref;
16408 add_loc_descr_op_piece (&cc_loc_result, size0);
16409
16410 add_loc_descr (&cc_loc_result, x1_ref);
16411 add_loc_descr_op_piece (&cc_loc_result, size1);
16412
16413 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
16414 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16415
16416 return cc_loc_result;
16417 }
16418
16419 /* Return a descriptor that describes the concatenation of N
16420 locations. */
16421
16422 static dw_loc_descr_ref
16423 concatn_loc_descriptor (rtx concatn, enum var_init_status initialized)
16424 {
16425 unsigned int i;
16426 dw_loc_descr_ref cc_loc_result = NULL;
16427 unsigned int n = XVECLEN (concatn, 0);
16428 unsigned int size;
16429
16430 for (i = 0; i < n; ++i)
16431 {
16432 dw_loc_descr_ref ref;
16433 rtx x = XVECEXP (concatn, 0, i);
16434
16435 /* At present we only track constant-sized pieces. */
16436 if (!GET_MODE_SIZE (GET_MODE (x)).is_constant (&size))
16437 return NULL;
16438
16439 ref = loc_descriptor (x, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16440 if (ref == NULL)
16441 return NULL;
16442
16443 add_loc_descr (&cc_loc_result, ref);
16444 add_loc_descr_op_piece (&cc_loc_result, size);
16445 }
16446
16447 if (cc_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16448 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16449
16450 return cc_loc_result;
16451 }
16452
16453 /* Helper function for loc_descriptor. Return DW_OP_implicit_pointer
16454 for DEBUG_IMPLICIT_PTR RTL. */
16455
16456 static dw_loc_descr_ref
16457 implicit_ptr_descriptor (rtx rtl, HOST_WIDE_INT offset)
16458 {
16459 dw_loc_descr_ref ret;
16460 dw_die_ref ref;
16461
16462 if (dwarf_strict && dwarf_version < 5)
16463 return NULL;
16464 gcc_assert (TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == VAR_DECL
16465 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == PARM_DECL
16466 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == RESULT_DECL);
16467 ref = lookup_decl_die (DEBUG_IMPLICIT_PTR_DECL (rtl));
16468 ret = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
16469 ret->dw_loc_oprnd2.val_class = dw_val_class_const;
16470 if (ref)
16471 {
16472 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16473 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
16474 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
16475 }
16476 else
16477 {
16478 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
16479 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_IMPLICIT_PTR_DECL (rtl);
16480 }
16481 return ret;
16482 }
16483
16484 /* Output a proper Dwarf location descriptor for a variable or parameter
16485 which is either allocated in a register or in a memory location. For a
16486 register, we just generate an OP_REG and the register number. For a
16487 memory location we provide a Dwarf postfix expression describing how to
16488 generate the (dynamic) address of the object onto the address stack.
16489
16490 MODE is mode of the decl if this loc_descriptor is going to be used in
16491 .debug_loc section where DW_OP_stack_value and DW_OP_implicit_value are
16492 allowed, VOIDmode otherwise.
16493
16494 If we don't know how to describe it, return 0. */
16495
16496 static dw_loc_descr_ref
16497 loc_descriptor (rtx rtl, machine_mode mode,
16498 enum var_init_status initialized)
16499 {
16500 dw_loc_descr_ref loc_result = NULL;
16501 scalar_int_mode int_mode;
16502
16503 switch (GET_CODE (rtl))
16504 {
16505 case SUBREG:
16506 /* The case of a subreg may arise when we have a local (register)
16507 variable or a formal (register) parameter which doesn't quite fill
16508 up an entire register. For now, just assume that it is
16509 legitimate to make the Dwarf info refer to the whole register which
16510 contains the given subreg. */
16511 if (REG_P (SUBREG_REG (rtl)) && subreg_lowpart_p (rtl))
16512 loc_result = loc_descriptor (SUBREG_REG (rtl),
16513 GET_MODE (SUBREG_REG (rtl)), initialized);
16514 else
16515 goto do_default;
16516 break;
16517
16518 case REG:
16519 loc_result = reg_loc_descriptor (rtl, initialized);
16520 break;
16521
16522 case MEM:
16523 loc_result = mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
16524 GET_MODE (rtl), initialized);
16525 if (loc_result == NULL)
16526 loc_result = tls_mem_loc_descriptor (rtl);
16527 if (loc_result == NULL)
16528 {
16529 rtx new_rtl = avoid_constant_pool_reference (rtl);
16530 if (new_rtl != rtl)
16531 loc_result = loc_descriptor (new_rtl, mode, initialized);
16532 }
16533 break;
16534
16535 case CONCAT:
16536 loc_result = concat_loc_descriptor (XEXP (rtl, 0), XEXP (rtl, 1),
16537 initialized);
16538 break;
16539
16540 case CONCATN:
16541 loc_result = concatn_loc_descriptor (rtl, initialized);
16542 break;
16543
16544 case VAR_LOCATION:
16545 /* Single part. */
16546 if (GET_CODE (PAT_VAR_LOCATION_LOC (rtl)) != PARALLEL)
16547 {
16548 rtx loc = PAT_VAR_LOCATION_LOC (rtl);
16549 if (GET_CODE (loc) == EXPR_LIST)
16550 loc = XEXP (loc, 0);
16551 loc_result = loc_descriptor (loc, mode, initialized);
16552 break;
16553 }
16554
16555 rtl = XEXP (rtl, 1);
16556 /* FALLTHRU */
16557
16558 case PARALLEL:
16559 {
16560 rtvec par_elems = XVEC (rtl, 0);
16561 int num_elem = GET_NUM_ELEM (par_elems);
16562 machine_mode mode;
16563 int i, size;
16564
16565 /* Create the first one, so we have something to add to. */
16566 loc_result = loc_descriptor (XEXP (RTVEC_ELT (par_elems, 0), 0),
16567 VOIDmode, initialized);
16568 if (loc_result == NULL)
16569 return NULL;
16570 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, 0), 0));
16571 /* At present we only track constant-sized pieces. */
16572 if (!GET_MODE_SIZE (mode).is_constant (&size))
16573 return NULL;
16574 add_loc_descr_op_piece (&loc_result, size);
16575 for (i = 1; i < num_elem; i++)
16576 {
16577 dw_loc_descr_ref temp;
16578
16579 temp = loc_descriptor (XEXP (RTVEC_ELT (par_elems, i), 0),
16580 VOIDmode, initialized);
16581 if (temp == NULL)
16582 return NULL;
16583 add_loc_descr (&loc_result, temp);
16584 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, i), 0));
16585 /* At present we only track constant-sized pieces. */
16586 if (!GET_MODE_SIZE (mode).is_constant (&size))
16587 return NULL;
16588 add_loc_descr_op_piece (&loc_result, size);
16589 }
16590 }
16591 break;
16592
16593 case CONST_INT:
16594 if (mode != VOIDmode && mode != BLKmode)
16595 {
16596 int_mode = as_a <scalar_int_mode> (mode);
16597 loc_result = address_of_int_loc_descriptor (GET_MODE_SIZE (int_mode),
16598 INTVAL (rtl));
16599 }
16600 break;
16601
16602 case CONST_DOUBLE:
16603 if (mode == VOIDmode)
16604 mode = GET_MODE (rtl);
16605
16606 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16607 {
16608 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16609
16610 /* Note that a CONST_DOUBLE rtx could represent either an integer
16611 or a floating-point constant. A CONST_DOUBLE is used whenever
16612 the constant requires more than one word in order to be
16613 adequately represented. We output CONST_DOUBLEs as blocks. */
16614 scalar_mode smode = as_a <scalar_mode> (mode);
16615 loc_result = new_loc_descr (DW_OP_implicit_value,
16616 GET_MODE_SIZE (smode), 0);
16617 #if TARGET_SUPPORTS_WIDE_INT == 0
16618 if (!SCALAR_FLOAT_MODE_P (smode))
16619 {
16620 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const_double;
16621 loc_result->dw_loc_oprnd2.v.val_double
16622 = rtx_to_double_int (rtl);
16623 }
16624 else
16625 #endif
16626 {
16627 unsigned int length = GET_MODE_SIZE (smode);
16628 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16629
16630 insert_float (rtl, array);
16631 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16632 loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16633 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16634 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16635 }
16636 }
16637 break;
16638
16639 case CONST_WIDE_INT:
16640 if (mode == VOIDmode)
16641 mode = GET_MODE (rtl);
16642
16643 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16644 {
16645 int_mode = as_a <scalar_int_mode> (mode);
16646 loc_result = new_loc_descr (DW_OP_implicit_value,
16647 GET_MODE_SIZE (int_mode), 0);
16648 loc_result->dw_loc_oprnd2.val_class = dw_val_class_wide_int;
16649 loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16650 *loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, int_mode);
16651 }
16652 break;
16653
16654 case CONST_VECTOR:
16655 if (mode == VOIDmode)
16656 mode = GET_MODE (rtl);
16657
16658 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16659 {
16660 unsigned int length;
16661 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
16662 return NULL;
16663
16664 unsigned int elt_size = GET_MODE_UNIT_SIZE (GET_MODE (rtl));
16665 unsigned char *array
16666 = ggc_vec_alloc<unsigned char> (length * elt_size);
16667 unsigned int i;
16668 unsigned char *p;
16669 machine_mode imode = GET_MODE_INNER (mode);
16670
16671 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16672 switch (GET_MODE_CLASS (mode))
16673 {
16674 case MODE_VECTOR_INT:
16675 for (i = 0, p = array; i < length; i++, p += elt_size)
16676 {
16677 rtx elt = CONST_VECTOR_ELT (rtl, i);
16678 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
16679 }
16680 break;
16681
16682 case MODE_VECTOR_FLOAT:
16683 for (i = 0, p = array; i < length; i++, p += elt_size)
16684 {
16685 rtx elt = CONST_VECTOR_ELT (rtl, i);
16686 insert_float (elt, p);
16687 }
16688 break;
16689
16690 default:
16691 gcc_unreachable ();
16692 }
16693
16694 loc_result = new_loc_descr (DW_OP_implicit_value,
16695 length * elt_size, 0);
16696 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16697 loc_result->dw_loc_oprnd2.v.val_vec.length = length;
16698 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = elt_size;
16699 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16700 }
16701 break;
16702
16703 case CONST:
16704 if (mode == VOIDmode
16705 || CONST_SCALAR_INT_P (XEXP (rtl, 0))
16706 || CONST_DOUBLE_AS_FLOAT_P (XEXP (rtl, 0))
16707 || GET_CODE (XEXP (rtl, 0)) == CONST_VECTOR)
16708 {
16709 loc_result = loc_descriptor (XEXP (rtl, 0), mode, initialized);
16710 break;
16711 }
16712 /* FALLTHROUGH */
16713 case SYMBOL_REF:
16714 if (!const_ok_for_output (rtl))
16715 break;
16716 /* FALLTHROUGH */
16717 case LABEL_REF:
16718 if (is_a <scalar_int_mode> (mode, &int_mode)
16719 && GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE
16720 && (dwarf_version >= 4 || !dwarf_strict))
16721 {
16722 loc_result = new_addr_loc_descr (rtl, dtprel_false);
16723 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
16724 vec_safe_push (used_rtx_array, rtl);
16725 }
16726 break;
16727
16728 case DEBUG_IMPLICIT_PTR:
16729 loc_result = implicit_ptr_descriptor (rtl, 0);
16730 break;
16731
16732 case PLUS:
16733 if (GET_CODE (XEXP (rtl, 0)) == DEBUG_IMPLICIT_PTR
16734 && CONST_INT_P (XEXP (rtl, 1)))
16735 {
16736 loc_result
16737 = implicit_ptr_descriptor (XEXP (rtl, 0), INTVAL (XEXP (rtl, 1)));
16738 break;
16739 }
16740 /* FALLTHRU */
16741 do_default:
16742 default:
16743 if ((is_a <scalar_int_mode> (mode, &int_mode)
16744 && GET_MODE (rtl) == int_mode
16745 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16746 && dwarf_version >= 4)
16747 || (!dwarf_strict && mode != VOIDmode && mode != BLKmode))
16748 {
16749 /* Value expression. */
16750 loc_result = mem_loc_descriptor (rtl, mode, VOIDmode, initialized);
16751 if (loc_result)
16752 add_loc_descr (&loc_result,
16753 new_loc_descr (DW_OP_stack_value, 0, 0));
16754 }
16755 break;
16756 }
16757
16758 return loc_result;
16759 }
16760
16761 /* We need to figure out what section we should use as the base for the
16762 address ranges where a given location is valid.
16763 1. If this particular DECL has a section associated with it, use that.
16764 2. If this function has a section associated with it, use that.
16765 3. Otherwise, use the text section.
16766 XXX: If you split a variable across multiple sections, we won't notice. */
16767
16768 static const char *
16769 secname_for_decl (const_tree decl)
16770 {
16771 const char *secname;
16772
16773 if (VAR_OR_FUNCTION_DECL_P (decl)
16774 && (DECL_EXTERNAL (decl) || TREE_PUBLIC (decl) || TREE_STATIC (decl))
16775 && DECL_SECTION_NAME (decl))
16776 secname = DECL_SECTION_NAME (decl);
16777 else if (current_function_decl && DECL_SECTION_NAME (current_function_decl))
16778 secname = DECL_SECTION_NAME (current_function_decl);
16779 else if (cfun && in_cold_section_p)
16780 secname = crtl->subsections.cold_section_label;
16781 else
16782 secname = text_section_label;
16783
16784 return secname;
16785 }
16786
16787 /* Return true when DECL_BY_REFERENCE is defined and set for DECL. */
16788
16789 static bool
16790 decl_by_reference_p (tree decl)
16791 {
16792 return ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL
16793 || VAR_P (decl))
16794 && DECL_BY_REFERENCE (decl));
16795 }
16796
16797 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16798 for VARLOC. */
16799
16800 static dw_loc_descr_ref
16801 dw_loc_list_1 (tree loc, rtx varloc, int want_address,
16802 enum var_init_status initialized)
16803 {
16804 int have_address = 0;
16805 dw_loc_descr_ref descr;
16806 machine_mode mode;
16807
16808 if (want_address != 2)
16809 {
16810 gcc_assert (GET_CODE (varloc) == VAR_LOCATION);
16811 /* Single part. */
16812 if (GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
16813 {
16814 varloc = PAT_VAR_LOCATION_LOC (varloc);
16815 if (GET_CODE (varloc) == EXPR_LIST)
16816 varloc = XEXP (varloc, 0);
16817 mode = GET_MODE (varloc);
16818 if (MEM_P (varloc))
16819 {
16820 rtx addr = XEXP (varloc, 0);
16821 descr = mem_loc_descriptor (addr, get_address_mode (varloc),
16822 mode, initialized);
16823 if (descr)
16824 have_address = 1;
16825 else
16826 {
16827 rtx x = avoid_constant_pool_reference (varloc);
16828 if (x != varloc)
16829 descr = mem_loc_descriptor (x, mode, VOIDmode,
16830 initialized);
16831 }
16832 }
16833 else
16834 descr = mem_loc_descriptor (varloc, mode, VOIDmode, initialized);
16835 }
16836 else
16837 return 0;
16838 }
16839 else
16840 {
16841 if (GET_CODE (varloc) == VAR_LOCATION)
16842 mode = DECL_MODE (PAT_VAR_LOCATION_DECL (varloc));
16843 else
16844 mode = DECL_MODE (loc);
16845 descr = loc_descriptor (varloc, mode, initialized);
16846 have_address = 1;
16847 }
16848
16849 if (!descr)
16850 return 0;
16851
16852 if (want_address == 2 && !have_address
16853 && (dwarf_version >= 4 || !dwarf_strict))
16854 {
16855 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
16856 {
16857 expansion_failed (loc, NULL_RTX,
16858 "DWARF address size mismatch");
16859 return 0;
16860 }
16861 add_loc_descr (&descr, new_loc_descr (DW_OP_stack_value, 0, 0));
16862 have_address = 1;
16863 }
16864 /* Show if we can't fill the request for an address. */
16865 if (want_address && !have_address)
16866 {
16867 expansion_failed (loc, NULL_RTX,
16868 "Want address and only have value");
16869 return 0;
16870 }
16871
16872 /* If we've got an address and don't want one, dereference. */
16873 if (!want_address && have_address)
16874 {
16875 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
16876 enum dwarf_location_atom op;
16877
16878 if (size > DWARF2_ADDR_SIZE || size == -1)
16879 {
16880 expansion_failed (loc, NULL_RTX,
16881 "DWARF address size mismatch");
16882 return 0;
16883 }
16884 else if (size == DWARF2_ADDR_SIZE)
16885 op = DW_OP_deref;
16886 else
16887 op = DW_OP_deref_size;
16888
16889 add_loc_descr (&descr, new_loc_descr (op, size, 0));
16890 }
16891
16892 return descr;
16893 }
16894
16895 /* Create a DW_OP_piece or DW_OP_bit_piece for bitsize, or return NULL
16896 if it is not possible. */
16897
16898 static dw_loc_descr_ref
16899 new_loc_descr_op_bit_piece (HOST_WIDE_INT bitsize, HOST_WIDE_INT offset)
16900 {
16901 if ((bitsize % BITS_PER_UNIT) == 0 && offset == 0)
16902 return new_loc_descr (DW_OP_piece, bitsize / BITS_PER_UNIT, 0);
16903 else if (dwarf_version >= 3 || !dwarf_strict)
16904 return new_loc_descr (DW_OP_bit_piece, bitsize, offset);
16905 else
16906 return NULL;
16907 }
16908
16909 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16910 for VAR_LOC_NOTE for variable DECL that has been optimized by SRA. */
16911
16912 static dw_loc_descr_ref
16913 dw_sra_loc_expr (tree decl, rtx loc)
16914 {
16915 rtx p;
16916 unsigned HOST_WIDE_INT padsize = 0;
16917 dw_loc_descr_ref descr, *descr_tail;
16918 unsigned HOST_WIDE_INT decl_size;
16919 rtx varloc;
16920 enum var_init_status initialized;
16921
16922 if (DECL_SIZE (decl) == NULL
16923 || !tree_fits_uhwi_p (DECL_SIZE (decl)))
16924 return NULL;
16925
16926 decl_size = tree_to_uhwi (DECL_SIZE (decl));
16927 descr = NULL;
16928 descr_tail = &descr;
16929
16930 for (p = loc; p; p = XEXP (p, 1))
16931 {
16932 unsigned HOST_WIDE_INT bitsize = decl_piece_bitsize (p);
16933 rtx loc_note = *decl_piece_varloc_ptr (p);
16934 dw_loc_descr_ref cur_descr;
16935 dw_loc_descr_ref *tail, last = NULL;
16936 unsigned HOST_WIDE_INT opsize = 0;
16937
16938 if (loc_note == NULL_RTX
16939 || NOTE_VAR_LOCATION_LOC (loc_note) == NULL_RTX)
16940 {
16941 padsize += bitsize;
16942 continue;
16943 }
16944 initialized = NOTE_VAR_LOCATION_STATUS (loc_note);
16945 varloc = NOTE_VAR_LOCATION (loc_note);
16946 cur_descr = dw_loc_list_1 (decl, varloc, 2, initialized);
16947 if (cur_descr == NULL)
16948 {
16949 padsize += bitsize;
16950 continue;
16951 }
16952
16953 /* Check that cur_descr either doesn't use
16954 DW_OP_*piece operations, or their sum is equal
16955 to bitsize. Otherwise we can't embed it. */
16956 for (tail = &cur_descr; *tail != NULL;
16957 tail = &(*tail)->dw_loc_next)
16958 if ((*tail)->dw_loc_opc == DW_OP_piece)
16959 {
16960 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned
16961 * BITS_PER_UNIT;
16962 last = *tail;
16963 }
16964 else if ((*tail)->dw_loc_opc == DW_OP_bit_piece)
16965 {
16966 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned;
16967 last = *tail;
16968 }
16969
16970 if (last != NULL && opsize != bitsize)
16971 {
16972 padsize += bitsize;
16973 /* Discard the current piece of the descriptor and release any
16974 addr_table entries it uses. */
16975 remove_loc_list_addr_table_entries (cur_descr);
16976 continue;
16977 }
16978
16979 /* If there is a hole, add DW_OP_*piece after empty DWARF
16980 expression, which means that those bits are optimized out. */
16981 if (padsize)
16982 {
16983 if (padsize > decl_size)
16984 {
16985 remove_loc_list_addr_table_entries (cur_descr);
16986 goto discard_descr;
16987 }
16988 decl_size -= padsize;
16989 *descr_tail = new_loc_descr_op_bit_piece (padsize, 0);
16990 if (*descr_tail == NULL)
16991 {
16992 remove_loc_list_addr_table_entries (cur_descr);
16993 goto discard_descr;
16994 }
16995 descr_tail = &(*descr_tail)->dw_loc_next;
16996 padsize = 0;
16997 }
16998 *descr_tail = cur_descr;
16999 descr_tail = tail;
17000 if (bitsize > decl_size)
17001 goto discard_descr;
17002 decl_size -= bitsize;
17003 if (last == NULL)
17004 {
17005 HOST_WIDE_INT offset = 0;
17006 if (GET_CODE (varloc) == VAR_LOCATION
17007 && GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
17008 {
17009 varloc = PAT_VAR_LOCATION_LOC (varloc);
17010 if (GET_CODE (varloc) == EXPR_LIST)
17011 varloc = XEXP (varloc, 0);
17012 }
17013 do
17014 {
17015 if (GET_CODE (varloc) == CONST
17016 || GET_CODE (varloc) == SIGN_EXTEND
17017 || GET_CODE (varloc) == ZERO_EXTEND)
17018 varloc = XEXP (varloc, 0);
17019 else if (GET_CODE (varloc) == SUBREG)
17020 varloc = SUBREG_REG (varloc);
17021 else
17022 break;
17023 }
17024 while (1);
17025 /* DW_OP_bit_size offset should be zero for register
17026 or implicit location descriptions and empty location
17027 descriptions, but for memory addresses needs big endian
17028 adjustment. */
17029 if (MEM_P (varloc))
17030 {
17031 unsigned HOST_WIDE_INT memsize;
17032 if (!poly_uint64 (MEM_SIZE (varloc)).is_constant (&memsize))
17033 goto discard_descr;
17034 memsize *= BITS_PER_UNIT;
17035 if (memsize != bitsize)
17036 {
17037 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
17038 && (memsize > BITS_PER_WORD || bitsize > BITS_PER_WORD))
17039 goto discard_descr;
17040 if (memsize < bitsize)
17041 goto discard_descr;
17042 if (BITS_BIG_ENDIAN)
17043 offset = memsize - bitsize;
17044 }
17045 }
17046
17047 *descr_tail = new_loc_descr_op_bit_piece (bitsize, offset);
17048 if (*descr_tail == NULL)
17049 goto discard_descr;
17050 descr_tail = &(*descr_tail)->dw_loc_next;
17051 }
17052 }
17053
17054 /* If there were any non-empty expressions, add padding till the end of
17055 the decl. */
17056 if (descr != NULL && decl_size != 0)
17057 {
17058 *descr_tail = new_loc_descr_op_bit_piece (decl_size, 0);
17059 if (*descr_tail == NULL)
17060 goto discard_descr;
17061 }
17062 return descr;
17063
17064 discard_descr:
17065 /* Discard the descriptor and release any addr_table entries it uses. */
17066 remove_loc_list_addr_table_entries (descr);
17067 return NULL;
17068 }
17069
17070 /* Return the dwarf representation of the location list LOC_LIST of
17071 DECL. WANT_ADDRESS has the same meaning as in loc_list_from_tree
17072 function. */
17073
17074 static dw_loc_list_ref
17075 dw_loc_list (var_loc_list *loc_list, tree decl, int want_address)
17076 {
17077 const char *endname, *secname;
17078 var_loc_view endview;
17079 rtx varloc;
17080 enum var_init_status initialized;
17081 struct var_loc_node *node;
17082 dw_loc_descr_ref descr;
17083 char label_id[MAX_ARTIFICIAL_LABEL_BYTES];
17084 dw_loc_list_ref list = NULL;
17085 dw_loc_list_ref *listp = &list;
17086
17087 /* Now that we know what section we are using for a base,
17088 actually construct the list of locations.
17089 The first location information is what is passed to the
17090 function that creates the location list, and the remaining
17091 locations just get added on to that list.
17092 Note that we only know the start address for a location
17093 (IE location changes), so to build the range, we use
17094 the range [current location start, next location start].
17095 This means we have to special case the last node, and generate
17096 a range of [last location start, end of function label]. */
17097
17098 if (cfun && crtl->has_bb_partition)
17099 {
17100 bool save_in_cold_section_p = in_cold_section_p;
17101 in_cold_section_p = first_function_block_is_cold;
17102 if (loc_list->last_before_switch == NULL)
17103 in_cold_section_p = !in_cold_section_p;
17104 secname = secname_for_decl (decl);
17105 in_cold_section_p = save_in_cold_section_p;
17106 }
17107 else
17108 secname = secname_for_decl (decl);
17109
17110 for (node = loc_list->first; node; node = node->next)
17111 {
17112 bool range_across_switch = false;
17113 if (GET_CODE (node->loc) == EXPR_LIST
17114 || NOTE_VAR_LOCATION_LOC (node->loc) != NULL_RTX)
17115 {
17116 if (GET_CODE (node->loc) == EXPR_LIST)
17117 {
17118 descr = NULL;
17119 /* This requires DW_OP_{,bit_}piece, which is not usable
17120 inside DWARF expressions. */
17121 if (want_address == 2)
17122 descr = dw_sra_loc_expr (decl, node->loc);
17123 }
17124 else
17125 {
17126 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17127 varloc = NOTE_VAR_LOCATION (node->loc);
17128 descr = dw_loc_list_1 (decl, varloc, want_address, initialized);
17129 }
17130 if (descr)
17131 {
17132 /* If section switch happens in between node->label
17133 and node->next->label (or end of function) and
17134 we can't emit it as a single entry list,
17135 emit two ranges, first one ending at the end
17136 of first partition and second one starting at the
17137 beginning of second partition. */
17138 if (node == loc_list->last_before_switch
17139 && (node != loc_list->first || loc_list->first->next)
17140 && current_function_decl)
17141 {
17142 endname = cfun->fde->dw_fde_end;
17143 endview = 0;
17144 range_across_switch = true;
17145 }
17146 /* The variable has a location between NODE->LABEL and
17147 NODE->NEXT->LABEL. */
17148 else if (node->next)
17149 endname = node->next->label, endview = node->next->view;
17150 /* If the variable has a location at the last label
17151 it keeps its location until the end of function. */
17152 else if (!current_function_decl)
17153 endname = text_end_label, endview = 0;
17154 else
17155 {
17156 ASM_GENERATE_INTERNAL_LABEL (label_id, FUNC_END_LABEL,
17157 current_function_funcdef_no);
17158 endname = ggc_strdup (label_id);
17159 endview = 0;
17160 }
17161
17162 *listp = new_loc_list (descr, node->label, node->view,
17163 endname, endview, secname);
17164 if (TREE_CODE (decl) == PARM_DECL
17165 && node == loc_list->first
17166 && NOTE_P (node->loc)
17167 && strcmp (node->label, endname) == 0)
17168 (*listp)->force = true;
17169 listp = &(*listp)->dw_loc_next;
17170 }
17171 }
17172
17173 if (cfun
17174 && crtl->has_bb_partition
17175 && node == loc_list->last_before_switch)
17176 {
17177 bool save_in_cold_section_p = in_cold_section_p;
17178 in_cold_section_p = !first_function_block_is_cold;
17179 secname = secname_for_decl (decl);
17180 in_cold_section_p = save_in_cold_section_p;
17181 }
17182
17183 if (range_across_switch)
17184 {
17185 if (GET_CODE (node->loc) == EXPR_LIST)
17186 descr = dw_sra_loc_expr (decl, node->loc);
17187 else
17188 {
17189 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17190 varloc = NOTE_VAR_LOCATION (node->loc);
17191 descr = dw_loc_list_1 (decl, varloc, want_address,
17192 initialized);
17193 }
17194 gcc_assert (descr);
17195 /* The variable has a location between NODE->LABEL and
17196 NODE->NEXT->LABEL. */
17197 if (node->next)
17198 endname = node->next->label, endview = node->next->view;
17199 else
17200 endname = cfun->fde->dw_fde_second_end, endview = 0;
17201 *listp = new_loc_list (descr, cfun->fde->dw_fde_second_begin, 0,
17202 endname, endview, secname);
17203 listp = &(*listp)->dw_loc_next;
17204 }
17205 }
17206
17207 /* Try to avoid the overhead of a location list emitting a location
17208 expression instead, but only if we didn't have more than one
17209 location entry in the first place. If some entries were not
17210 representable, we don't want to pretend a single entry that was
17211 applies to the entire scope in which the variable is
17212 available. */
17213 if (list && loc_list->first->next)
17214 gen_llsym (list);
17215 else
17216 maybe_gen_llsym (list);
17217
17218 return list;
17219 }
17220
17221 /* Return if the loc_list has only single element and thus can be represented
17222 as location description. */
17223
17224 static bool
17225 single_element_loc_list_p (dw_loc_list_ref list)
17226 {
17227 gcc_assert (!list->dw_loc_next || list->ll_symbol);
17228 return !list->ll_symbol;
17229 }
17230
17231 /* Duplicate a single element of location list. */
17232
17233 static inline dw_loc_descr_ref
17234 copy_loc_descr (dw_loc_descr_ref ref)
17235 {
17236 dw_loc_descr_ref copy = ggc_alloc<dw_loc_descr_node> ();
17237 memcpy (copy, ref, sizeof (dw_loc_descr_node));
17238 return copy;
17239 }
17240
17241 /* To each location in list LIST append loc descr REF. */
17242
17243 static void
17244 add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17245 {
17246 dw_loc_descr_ref copy;
17247 add_loc_descr (&list->expr, ref);
17248 list = list->dw_loc_next;
17249 while (list)
17250 {
17251 copy = copy_loc_descr (ref);
17252 add_loc_descr (&list->expr, copy);
17253 while (copy->dw_loc_next)
17254 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17255 list = list->dw_loc_next;
17256 }
17257 }
17258
17259 /* To each location in list LIST prepend loc descr REF. */
17260
17261 static void
17262 prepend_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17263 {
17264 dw_loc_descr_ref copy;
17265 dw_loc_descr_ref ref_end = list->expr;
17266 add_loc_descr (&ref, list->expr);
17267 list->expr = ref;
17268 list = list->dw_loc_next;
17269 while (list)
17270 {
17271 dw_loc_descr_ref end = list->expr;
17272 list->expr = copy = copy_loc_descr (ref);
17273 while (copy->dw_loc_next != ref_end)
17274 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17275 copy->dw_loc_next = end;
17276 list = list->dw_loc_next;
17277 }
17278 }
17279
17280 /* Given two lists RET and LIST
17281 produce location list that is result of adding expression in LIST
17282 to expression in RET on each position in program.
17283 Might be destructive on both RET and LIST.
17284
17285 TODO: We handle only simple cases of RET or LIST having at most one
17286 element. General case would involve sorting the lists in program order
17287 and merging them that will need some additional work.
17288 Adding that will improve quality of debug info especially for SRA-ed
17289 structures. */
17290
17291 static void
17292 add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list)
17293 {
17294 if (!list)
17295 return;
17296 if (!*ret)
17297 {
17298 *ret = list;
17299 return;
17300 }
17301 if (!list->dw_loc_next)
17302 {
17303 add_loc_descr_to_each (*ret, list->expr);
17304 return;
17305 }
17306 if (!(*ret)->dw_loc_next)
17307 {
17308 prepend_loc_descr_to_each (list, (*ret)->expr);
17309 *ret = list;
17310 return;
17311 }
17312 expansion_failed (NULL_TREE, NULL_RTX,
17313 "Don't know how to merge two non-trivial"
17314 " location lists.\n");
17315 *ret = NULL;
17316 return;
17317 }
17318
17319 /* LOC is constant expression. Try a luck, look it up in constant
17320 pool and return its loc_descr of its address. */
17321
17322 static dw_loc_descr_ref
17323 cst_pool_loc_descr (tree loc)
17324 {
17325 /* Get an RTL for this, if something has been emitted. */
17326 rtx rtl = lookup_constant_def (loc);
17327
17328 if (!rtl || !MEM_P (rtl))
17329 {
17330 gcc_assert (!rtl);
17331 return 0;
17332 }
17333 gcc_assert (GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF);
17334
17335 /* TODO: We might get more coverage if we was actually delaying expansion
17336 of all expressions till end of compilation when constant pools are fully
17337 populated. */
17338 if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (XEXP (rtl, 0))))
17339 {
17340 expansion_failed (loc, NULL_RTX,
17341 "CST value in contant pool but not marked.");
17342 return 0;
17343 }
17344 return mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
17345 GET_MODE (rtl), VAR_INIT_STATUS_INITIALIZED);
17346 }
17347
17348 /* Return dw_loc_list representing address of addr_expr LOC
17349 by looking for inner INDIRECT_REF expression and turning
17350 it into simple arithmetics.
17351
17352 See loc_list_from_tree for the meaning of CONTEXT. */
17353
17354 static dw_loc_list_ref
17355 loc_list_for_address_of_addr_expr_of_indirect_ref (tree loc, bool toplev,
17356 loc_descr_context *context)
17357 {
17358 tree obj, offset;
17359 poly_int64 bitsize, bitpos, bytepos;
17360 machine_mode mode;
17361 int unsignedp, reversep, volatilep = 0;
17362 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
17363
17364 obj = get_inner_reference (TREE_OPERAND (loc, 0),
17365 &bitsize, &bitpos, &offset, &mode,
17366 &unsignedp, &reversep, &volatilep);
17367 STRIP_NOPS (obj);
17368 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos))
17369 {
17370 expansion_failed (loc, NULL_RTX, "bitfield access");
17371 return 0;
17372 }
17373 if (!INDIRECT_REF_P (obj))
17374 {
17375 expansion_failed (obj,
17376 NULL_RTX, "no indirect ref in inner refrence");
17377 return 0;
17378 }
17379 if (!offset && known_eq (bitpos, 0))
17380 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), toplev ? 2 : 1,
17381 context);
17382 else if (toplev
17383 && int_size_in_bytes (TREE_TYPE (loc)) <= DWARF2_ADDR_SIZE
17384 && (dwarf_version >= 4 || !dwarf_strict))
17385 {
17386 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), 0, context);
17387 if (!list_ret)
17388 return 0;
17389 if (offset)
17390 {
17391 /* Variable offset. */
17392 list_ret1 = loc_list_from_tree (offset, 0, context);
17393 if (list_ret1 == 0)
17394 return 0;
17395 add_loc_list (&list_ret, list_ret1);
17396 if (!list_ret)
17397 return 0;
17398 add_loc_descr_to_each (list_ret,
17399 new_loc_descr (DW_OP_plus, 0, 0));
17400 }
17401 HOST_WIDE_INT value;
17402 if (bytepos.is_constant (&value) && value > 0)
17403 add_loc_descr_to_each (list_ret,
17404 new_loc_descr (DW_OP_plus_uconst, value, 0));
17405 else if (maybe_ne (bytepos, 0))
17406 loc_list_plus_const (list_ret, bytepos);
17407 add_loc_descr_to_each (list_ret,
17408 new_loc_descr (DW_OP_stack_value, 0, 0));
17409 }
17410 return list_ret;
17411 }
17412
17413 /* Set LOC to the next operation that is not a DW_OP_nop operation. In the case
17414 all operations from LOC are nops, move to the last one. Insert in NOPS all
17415 operations that are skipped. */
17416
17417 static void
17418 loc_descr_to_next_no_nop (dw_loc_descr_ref &loc,
17419 hash_set<dw_loc_descr_ref> &nops)
17420 {
17421 while (loc->dw_loc_next != NULL && loc->dw_loc_opc == DW_OP_nop)
17422 {
17423 nops.add (loc);
17424 loc = loc->dw_loc_next;
17425 }
17426 }
17427
17428 /* Helper for loc_descr_without_nops: free the location description operation
17429 P. */
17430
17431 bool
17432 free_loc_descr (const dw_loc_descr_ref &loc, void *data ATTRIBUTE_UNUSED)
17433 {
17434 ggc_free (loc);
17435 return true;
17436 }
17437
17438 /* Remove all DW_OP_nop operations from LOC except, if it exists, the one that
17439 finishes LOC. */
17440
17441 static void
17442 loc_descr_without_nops (dw_loc_descr_ref &loc)
17443 {
17444 if (loc->dw_loc_opc == DW_OP_nop && loc->dw_loc_next == NULL)
17445 return;
17446
17447 /* Set of all DW_OP_nop operations we remove. */
17448 hash_set<dw_loc_descr_ref> nops;
17449
17450 /* First, strip all prefix NOP operations in order to keep the head of the
17451 operations list. */
17452 loc_descr_to_next_no_nop (loc, nops);
17453
17454 for (dw_loc_descr_ref cur = loc; cur != NULL;)
17455 {
17456 /* For control flow operations: strip "prefix" nops in destination
17457 labels. */
17458 if (cur->dw_loc_oprnd1.val_class == dw_val_class_loc)
17459 loc_descr_to_next_no_nop (cur->dw_loc_oprnd1.v.val_loc, nops);
17460 if (cur->dw_loc_oprnd2.val_class == dw_val_class_loc)
17461 loc_descr_to_next_no_nop (cur->dw_loc_oprnd2.v.val_loc, nops);
17462
17463 /* Do the same for the operations that follow, then move to the next
17464 iteration. */
17465 if (cur->dw_loc_next != NULL)
17466 loc_descr_to_next_no_nop (cur->dw_loc_next, nops);
17467 cur = cur->dw_loc_next;
17468 }
17469
17470 nops.traverse<void *, free_loc_descr> (NULL);
17471 }
17472
17473
17474 struct dwarf_procedure_info;
17475
17476 /* Helper structure for location descriptions generation. */
17477 struct loc_descr_context
17478 {
17479 /* The type that is implicitly referenced by DW_OP_push_object_address, or
17480 NULL_TREE if DW_OP_push_object_address in invalid for this location
17481 description. This is used when processing PLACEHOLDER_EXPR nodes. */
17482 tree context_type;
17483 /* The ..._DECL node that should be translated as a
17484 DW_OP_push_object_address operation. */
17485 tree base_decl;
17486 /* Information about the DWARF procedure we are currently generating. NULL if
17487 we are not generating a DWARF procedure. */
17488 struct dwarf_procedure_info *dpi;
17489 /* True if integral PLACEHOLDER_EXPR stands for the first argument passed
17490 by consumer. Used for DW_TAG_generic_subrange attributes. */
17491 bool placeholder_arg;
17492 /* True if PLACEHOLDER_EXPR has been seen. */
17493 bool placeholder_seen;
17494 };
17495
17496 /* DWARF procedures generation
17497
17498 DWARF expressions (aka. location descriptions) are used to encode variable
17499 things such as sizes or offsets. Such computations can have redundant parts
17500 that can be factorized in order to reduce the size of the output debug
17501 information. This is the whole point of DWARF procedures.
17502
17503 Thanks to stor-layout.c, size and offset expressions in GENERIC trees are
17504 already factorized into functions ("size functions") in order to handle very
17505 big and complex types. Such functions are quite simple: they have integral
17506 arguments, they return an integral result and their body contains only a
17507 return statement with arithmetic expressions. This is the only kind of
17508 function we are interested in translating into DWARF procedures, here.
17509
17510 DWARF expressions and DWARF procedure are executed using a stack, so we have
17511 to define some calling convention for them to interact. Let's say that:
17512
17513 - Before calling a DWARF procedure, DWARF expressions must push on the stack
17514 all arguments in reverse order (right-to-left) so that when the DWARF
17515 procedure execution starts, the first argument is the top of the stack.
17516
17517 - Then, when returning, the DWARF procedure must have consumed all arguments
17518 on the stack, must have pushed the result and touched nothing else.
17519
17520 - Each integral argument and the result are integral types can be hold in a
17521 single stack slot.
17522
17523 - We call "frame offset" the number of stack slots that are "under DWARF
17524 procedure control": it includes the arguments slots, the temporaries and
17525 the result slot. Thus, it is equal to the number of arguments when the
17526 procedure execution starts and must be equal to one (the result) when it
17527 returns. */
17528
17529 /* Helper structure used when generating operations for a DWARF procedure. */
17530 struct dwarf_procedure_info
17531 {
17532 /* The FUNCTION_DECL node corresponding to the DWARF procedure that is
17533 currently translated. */
17534 tree fndecl;
17535 /* The number of arguments FNDECL takes. */
17536 unsigned args_count;
17537 };
17538
17539 /* Return a pointer to a newly created DIE node for a DWARF procedure. Add
17540 LOCATION as its DW_AT_location attribute. If FNDECL is not NULL_TREE,
17541 equate it to this DIE. */
17542
17543 static dw_die_ref
17544 new_dwarf_proc_die (dw_loc_descr_ref location, tree fndecl,
17545 dw_die_ref parent_die)
17546 {
17547 dw_die_ref dwarf_proc_die;
17548
17549 if ((dwarf_version < 3 && dwarf_strict)
17550 || location == NULL)
17551 return NULL;
17552
17553 dwarf_proc_die = new_die (DW_TAG_dwarf_procedure, parent_die, fndecl);
17554 if (fndecl)
17555 equate_decl_number_to_die (fndecl, dwarf_proc_die);
17556 add_AT_loc (dwarf_proc_die, DW_AT_location, location);
17557 return dwarf_proc_die;
17558 }
17559
17560 /* Return whether TYPE is a supported type as a DWARF procedure argument
17561 type or return type (we handle only scalar types and pointer types that
17562 aren't wider than the DWARF expression evaluation stack. */
17563
17564 static bool
17565 is_handled_procedure_type (tree type)
17566 {
17567 return ((INTEGRAL_TYPE_P (type)
17568 || TREE_CODE (type) == OFFSET_TYPE
17569 || TREE_CODE (type) == POINTER_TYPE)
17570 && int_size_in_bytes (type) <= DWARF2_ADDR_SIZE);
17571 }
17572
17573 /* Helper for resolve_args_picking: do the same but stop when coming across
17574 visited nodes. For each node we visit, register in FRAME_OFFSETS the frame
17575 offset *before* evaluating the corresponding operation. */
17576
17577 static bool
17578 resolve_args_picking_1 (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17579 struct dwarf_procedure_info *dpi,
17580 hash_map<dw_loc_descr_ref, unsigned> &frame_offsets)
17581 {
17582 /* The "frame_offset" identifier is already used to name a macro... */
17583 unsigned frame_offset_ = initial_frame_offset;
17584 dw_loc_descr_ref l;
17585
17586 for (l = loc; l != NULL;)
17587 {
17588 bool existed;
17589 unsigned &l_frame_offset = frame_offsets.get_or_insert (l, &existed);
17590
17591 /* If we already met this node, there is nothing to compute anymore. */
17592 if (existed)
17593 {
17594 /* Make sure that the stack size is consistent wherever the execution
17595 flow comes from. */
17596 gcc_assert ((unsigned) l_frame_offset == frame_offset_);
17597 break;
17598 }
17599 l_frame_offset = frame_offset_;
17600
17601 /* If needed, relocate the picking offset with respect to the frame
17602 offset. */
17603 if (l->frame_offset_rel)
17604 {
17605 unsigned HOST_WIDE_INT off;
17606 switch (l->dw_loc_opc)
17607 {
17608 case DW_OP_pick:
17609 off = l->dw_loc_oprnd1.v.val_unsigned;
17610 break;
17611 case DW_OP_dup:
17612 off = 0;
17613 break;
17614 case DW_OP_over:
17615 off = 1;
17616 break;
17617 default:
17618 gcc_unreachable ();
17619 }
17620 /* frame_offset_ is the size of the current stack frame, including
17621 incoming arguments. Besides, the arguments are pushed
17622 right-to-left. Thus, in order to access the Nth argument from
17623 this operation node, the picking has to skip temporaries *plus*
17624 one stack slot per argument (0 for the first one, 1 for the second
17625 one, etc.).
17626
17627 The targetted argument number (N) is already set as the operand,
17628 and the number of temporaries can be computed with:
17629 frame_offsets_ - dpi->args_count */
17630 off += frame_offset_ - dpi->args_count;
17631
17632 /* DW_OP_pick handles only offsets from 0 to 255 (inclusive)... */
17633 if (off > 255)
17634 return false;
17635
17636 if (off == 0)
17637 {
17638 l->dw_loc_opc = DW_OP_dup;
17639 l->dw_loc_oprnd1.v.val_unsigned = 0;
17640 }
17641 else if (off == 1)
17642 {
17643 l->dw_loc_opc = DW_OP_over;
17644 l->dw_loc_oprnd1.v.val_unsigned = 0;
17645 }
17646 else
17647 {
17648 l->dw_loc_opc = DW_OP_pick;
17649 l->dw_loc_oprnd1.v.val_unsigned = off;
17650 }
17651 }
17652
17653 /* Update frame_offset according to the effect the current operation has
17654 on the stack. */
17655 switch (l->dw_loc_opc)
17656 {
17657 case DW_OP_deref:
17658 case DW_OP_swap:
17659 case DW_OP_rot:
17660 case DW_OP_abs:
17661 case DW_OP_neg:
17662 case DW_OP_not:
17663 case DW_OP_plus_uconst:
17664 case DW_OP_skip:
17665 case DW_OP_reg0:
17666 case DW_OP_reg1:
17667 case DW_OP_reg2:
17668 case DW_OP_reg3:
17669 case DW_OP_reg4:
17670 case DW_OP_reg5:
17671 case DW_OP_reg6:
17672 case DW_OP_reg7:
17673 case DW_OP_reg8:
17674 case DW_OP_reg9:
17675 case DW_OP_reg10:
17676 case DW_OP_reg11:
17677 case DW_OP_reg12:
17678 case DW_OP_reg13:
17679 case DW_OP_reg14:
17680 case DW_OP_reg15:
17681 case DW_OP_reg16:
17682 case DW_OP_reg17:
17683 case DW_OP_reg18:
17684 case DW_OP_reg19:
17685 case DW_OP_reg20:
17686 case DW_OP_reg21:
17687 case DW_OP_reg22:
17688 case DW_OP_reg23:
17689 case DW_OP_reg24:
17690 case DW_OP_reg25:
17691 case DW_OP_reg26:
17692 case DW_OP_reg27:
17693 case DW_OP_reg28:
17694 case DW_OP_reg29:
17695 case DW_OP_reg30:
17696 case DW_OP_reg31:
17697 case DW_OP_bregx:
17698 case DW_OP_piece:
17699 case DW_OP_deref_size:
17700 case DW_OP_nop:
17701 case DW_OP_bit_piece:
17702 case DW_OP_implicit_value:
17703 case DW_OP_stack_value:
17704 break;
17705
17706 case DW_OP_addr:
17707 case DW_OP_const1u:
17708 case DW_OP_const1s:
17709 case DW_OP_const2u:
17710 case DW_OP_const2s:
17711 case DW_OP_const4u:
17712 case DW_OP_const4s:
17713 case DW_OP_const8u:
17714 case DW_OP_const8s:
17715 case DW_OP_constu:
17716 case DW_OP_consts:
17717 case DW_OP_dup:
17718 case DW_OP_over:
17719 case DW_OP_pick:
17720 case DW_OP_lit0:
17721 case DW_OP_lit1:
17722 case DW_OP_lit2:
17723 case DW_OP_lit3:
17724 case DW_OP_lit4:
17725 case DW_OP_lit5:
17726 case DW_OP_lit6:
17727 case DW_OP_lit7:
17728 case DW_OP_lit8:
17729 case DW_OP_lit9:
17730 case DW_OP_lit10:
17731 case DW_OP_lit11:
17732 case DW_OP_lit12:
17733 case DW_OP_lit13:
17734 case DW_OP_lit14:
17735 case DW_OP_lit15:
17736 case DW_OP_lit16:
17737 case DW_OP_lit17:
17738 case DW_OP_lit18:
17739 case DW_OP_lit19:
17740 case DW_OP_lit20:
17741 case DW_OP_lit21:
17742 case DW_OP_lit22:
17743 case DW_OP_lit23:
17744 case DW_OP_lit24:
17745 case DW_OP_lit25:
17746 case DW_OP_lit26:
17747 case DW_OP_lit27:
17748 case DW_OP_lit28:
17749 case DW_OP_lit29:
17750 case DW_OP_lit30:
17751 case DW_OP_lit31:
17752 case DW_OP_breg0:
17753 case DW_OP_breg1:
17754 case DW_OP_breg2:
17755 case DW_OP_breg3:
17756 case DW_OP_breg4:
17757 case DW_OP_breg5:
17758 case DW_OP_breg6:
17759 case DW_OP_breg7:
17760 case DW_OP_breg8:
17761 case DW_OP_breg9:
17762 case DW_OP_breg10:
17763 case DW_OP_breg11:
17764 case DW_OP_breg12:
17765 case DW_OP_breg13:
17766 case DW_OP_breg14:
17767 case DW_OP_breg15:
17768 case DW_OP_breg16:
17769 case DW_OP_breg17:
17770 case DW_OP_breg18:
17771 case DW_OP_breg19:
17772 case DW_OP_breg20:
17773 case DW_OP_breg21:
17774 case DW_OP_breg22:
17775 case DW_OP_breg23:
17776 case DW_OP_breg24:
17777 case DW_OP_breg25:
17778 case DW_OP_breg26:
17779 case DW_OP_breg27:
17780 case DW_OP_breg28:
17781 case DW_OP_breg29:
17782 case DW_OP_breg30:
17783 case DW_OP_breg31:
17784 case DW_OP_fbreg:
17785 case DW_OP_push_object_address:
17786 case DW_OP_call_frame_cfa:
17787 case DW_OP_GNU_variable_value:
17788 ++frame_offset_;
17789 break;
17790
17791 case DW_OP_drop:
17792 case DW_OP_xderef:
17793 case DW_OP_and:
17794 case DW_OP_div:
17795 case DW_OP_minus:
17796 case DW_OP_mod:
17797 case DW_OP_mul:
17798 case DW_OP_or:
17799 case DW_OP_plus:
17800 case DW_OP_shl:
17801 case DW_OP_shr:
17802 case DW_OP_shra:
17803 case DW_OP_xor:
17804 case DW_OP_bra:
17805 case DW_OP_eq:
17806 case DW_OP_ge:
17807 case DW_OP_gt:
17808 case DW_OP_le:
17809 case DW_OP_lt:
17810 case DW_OP_ne:
17811 case DW_OP_regx:
17812 case DW_OP_xderef_size:
17813 --frame_offset_;
17814 break;
17815
17816 case DW_OP_call2:
17817 case DW_OP_call4:
17818 case DW_OP_call_ref:
17819 {
17820 dw_die_ref dwarf_proc = l->dw_loc_oprnd1.v.val_die_ref.die;
17821 int *stack_usage = dwarf_proc_stack_usage_map->get (dwarf_proc);
17822
17823 if (stack_usage == NULL)
17824 return false;
17825 frame_offset_ += *stack_usage;
17826 break;
17827 }
17828
17829 case DW_OP_implicit_pointer:
17830 case DW_OP_entry_value:
17831 case DW_OP_const_type:
17832 case DW_OP_regval_type:
17833 case DW_OP_deref_type:
17834 case DW_OP_convert:
17835 case DW_OP_reinterpret:
17836 case DW_OP_form_tls_address:
17837 case DW_OP_GNU_push_tls_address:
17838 case DW_OP_GNU_uninit:
17839 case DW_OP_GNU_encoded_addr:
17840 case DW_OP_GNU_implicit_pointer:
17841 case DW_OP_GNU_entry_value:
17842 case DW_OP_GNU_const_type:
17843 case DW_OP_GNU_regval_type:
17844 case DW_OP_GNU_deref_type:
17845 case DW_OP_GNU_convert:
17846 case DW_OP_GNU_reinterpret:
17847 case DW_OP_GNU_parameter_ref:
17848 /* loc_list_from_tree will probably not output these operations for
17849 size functions, so assume they will not appear here. */
17850 /* Fall through... */
17851
17852 default:
17853 gcc_unreachable ();
17854 }
17855
17856 /* Now, follow the control flow (except subroutine calls). */
17857 switch (l->dw_loc_opc)
17858 {
17859 case DW_OP_bra:
17860 if (!resolve_args_picking_1 (l->dw_loc_next, frame_offset_, dpi,
17861 frame_offsets))
17862 return false;
17863 /* Fall through. */
17864
17865 case DW_OP_skip:
17866 l = l->dw_loc_oprnd1.v.val_loc;
17867 break;
17868
17869 case DW_OP_stack_value:
17870 return true;
17871
17872 default:
17873 l = l->dw_loc_next;
17874 break;
17875 }
17876 }
17877
17878 return true;
17879 }
17880
17881 /* Make a DFS over operations reachable through LOC (i.e. follow branch
17882 operations) in order to resolve the operand of DW_OP_pick operations that
17883 target DWARF procedure arguments (DPI). INITIAL_FRAME_OFFSET is the frame
17884 offset *before* LOC is executed. Return if all relocations were
17885 successful. */
17886
17887 static bool
17888 resolve_args_picking (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17889 struct dwarf_procedure_info *dpi)
17890 {
17891 /* Associate to all visited operations the frame offset *before* evaluating
17892 this operation. */
17893 hash_map<dw_loc_descr_ref, unsigned> frame_offsets;
17894
17895 return resolve_args_picking_1 (loc, initial_frame_offset, dpi,
17896 frame_offsets);
17897 }
17898
17899 /* Try to generate a DWARF procedure that computes the same result as FNDECL.
17900 Return NULL if it is not possible. */
17901
17902 static dw_die_ref
17903 function_to_dwarf_procedure (tree fndecl)
17904 {
17905 struct loc_descr_context ctx;
17906 struct dwarf_procedure_info dpi;
17907 dw_die_ref dwarf_proc_die;
17908 tree tree_body = DECL_SAVED_TREE (fndecl);
17909 dw_loc_descr_ref loc_body, epilogue;
17910
17911 tree cursor;
17912 unsigned i;
17913
17914 /* Do not generate multiple DWARF procedures for the same function
17915 declaration. */
17916 dwarf_proc_die = lookup_decl_die (fndecl);
17917 if (dwarf_proc_die != NULL)
17918 return dwarf_proc_die;
17919
17920 /* DWARF procedures are available starting with the DWARFv3 standard. */
17921 if (dwarf_version < 3 && dwarf_strict)
17922 return NULL;
17923
17924 /* We handle only functions for which we still have a body, that return a
17925 supported type and that takes arguments with supported types. Note that
17926 there is no point translating functions that return nothing. */
17927 if (tree_body == NULL_TREE
17928 || DECL_RESULT (fndecl) == NULL_TREE
17929 || !is_handled_procedure_type (TREE_TYPE (DECL_RESULT (fndecl))))
17930 return NULL;
17931
17932 for (cursor = DECL_ARGUMENTS (fndecl);
17933 cursor != NULL_TREE;
17934 cursor = TREE_CHAIN (cursor))
17935 if (!is_handled_procedure_type (TREE_TYPE (cursor)))
17936 return NULL;
17937
17938 /* Match only "expr" in: RETURN_EXPR (MODIFY_EXPR (RESULT_DECL, expr)). */
17939 if (TREE_CODE (tree_body) != RETURN_EXPR)
17940 return NULL;
17941 tree_body = TREE_OPERAND (tree_body, 0);
17942 if (TREE_CODE (tree_body) != MODIFY_EXPR
17943 || TREE_OPERAND (tree_body, 0) != DECL_RESULT (fndecl))
17944 return NULL;
17945 tree_body = TREE_OPERAND (tree_body, 1);
17946
17947 /* Try to translate the body expression itself. Note that this will probably
17948 cause an infinite recursion if its call graph has a cycle. This is very
17949 unlikely for size functions, however, so don't bother with such things at
17950 the moment. */
17951 ctx.context_type = NULL_TREE;
17952 ctx.base_decl = NULL_TREE;
17953 ctx.dpi = &dpi;
17954 ctx.placeholder_arg = false;
17955 ctx.placeholder_seen = false;
17956 dpi.fndecl = fndecl;
17957 dpi.args_count = list_length (DECL_ARGUMENTS (fndecl));
17958 loc_body = loc_descriptor_from_tree (tree_body, 0, &ctx);
17959 if (!loc_body)
17960 return NULL;
17961
17962 /* After evaluating all operands in "loc_body", we should still have on the
17963 stack all arguments plus the desired function result (top of the stack).
17964 Generate code in order to keep only the result in our stack frame. */
17965 epilogue = NULL;
17966 for (i = 0; i < dpi.args_count; ++i)
17967 {
17968 dw_loc_descr_ref op_couple = new_loc_descr (DW_OP_swap, 0, 0);
17969 op_couple->dw_loc_next = new_loc_descr (DW_OP_drop, 0, 0);
17970 op_couple->dw_loc_next->dw_loc_next = epilogue;
17971 epilogue = op_couple;
17972 }
17973 add_loc_descr (&loc_body, epilogue);
17974 if (!resolve_args_picking (loc_body, dpi.args_count, &dpi))
17975 return NULL;
17976
17977 /* Trailing nops from loc_descriptor_from_tree (if any) cannot be removed
17978 because they are considered useful. Now there is an epilogue, they are
17979 not anymore, so give it another try. */
17980 loc_descr_without_nops (loc_body);
17981
17982 /* fndecl may be used both as a regular DW_TAG_subprogram DIE and as
17983 a DW_TAG_dwarf_procedure, so we may have a conflict, here. It's unlikely,
17984 though, given that size functions do not come from source, so they should
17985 not have a dedicated DW_TAG_subprogram DIE. */
17986 dwarf_proc_die
17987 = new_dwarf_proc_die (loc_body, fndecl,
17988 get_context_die (DECL_CONTEXT (fndecl)));
17989
17990 /* The called DWARF procedure consumes one stack slot per argument and
17991 returns one stack slot. */
17992 dwarf_proc_stack_usage_map->put (dwarf_proc_die, 1 - dpi.args_count);
17993
17994 return dwarf_proc_die;
17995 }
17996
17997
17998 /* Generate Dwarf location list representing LOC.
17999 If WANT_ADDRESS is false, expression computing LOC will be computed
18000 If WANT_ADDRESS is 1, expression computing address of LOC will be returned
18001 if WANT_ADDRESS is 2, expression computing address useable in location
18002 will be returned (i.e. DW_OP_reg can be used
18003 to refer to register values).
18004
18005 CONTEXT provides information to customize the location descriptions
18006 generation. Its context_type field specifies what type is implicitly
18007 referenced by DW_OP_push_object_address. If it is NULL_TREE, this operation
18008 will not be generated.
18009
18010 Its DPI field determines whether we are generating a DWARF expression for a
18011 DWARF procedure, so PARM_DECL references are processed specifically.
18012
18013 If CONTEXT is NULL, the behavior is the same as if context_type, base_decl
18014 and dpi fields were null. */
18015
18016 static dw_loc_list_ref
18017 loc_list_from_tree_1 (tree loc, int want_address,
18018 struct loc_descr_context *context)
18019 {
18020 dw_loc_descr_ref ret = NULL, ret1 = NULL;
18021 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
18022 int have_address = 0;
18023 enum dwarf_location_atom op;
18024
18025 /* ??? Most of the time we do not take proper care for sign/zero
18026 extending the values properly. Hopefully this won't be a real
18027 problem... */
18028
18029 if (context != NULL
18030 && context->base_decl == loc
18031 && want_address == 0)
18032 {
18033 if (dwarf_version >= 3 || !dwarf_strict)
18034 return new_loc_list (new_loc_descr (DW_OP_push_object_address, 0, 0),
18035 NULL, 0, NULL, 0, NULL);
18036 else
18037 return NULL;
18038 }
18039
18040 switch (TREE_CODE (loc))
18041 {
18042 case ERROR_MARK:
18043 expansion_failed (loc, NULL_RTX, "ERROR_MARK");
18044 return 0;
18045
18046 case PLACEHOLDER_EXPR:
18047 /* This case involves extracting fields from an object to determine the
18048 position of other fields. It is supposed to appear only as the first
18049 operand of COMPONENT_REF nodes and to reference precisely the type
18050 that the context allows. */
18051 if (context != NULL
18052 && TREE_TYPE (loc) == context->context_type
18053 && want_address >= 1)
18054 {
18055 if (dwarf_version >= 3 || !dwarf_strict)
18056 {
18057 ret = new_loc_descr (DW_OP_push_object_address, 0, 0);
18058 have_address = 1;
18059 break;
18060 }
18061 else
18062 return NULL;
18063 }
18064 /* For DW_TAG_generic_subrange attributes, PLACEHOLDER_EXPR stands for
18065 the single argument passed by consumer. */
18066 else if (context != NULL
18067 && context->placeholder_arg
18068 && INTEGRAL_TYPE_P (TREE_TYPE (loc))
18069 && want_address == 0)
18070 {
18071 ret = new_loc_descr (DW_OP_pick, 0, 0);
18072 ret->frame_offset_rel = 1;
18073 context->placeholder_seen = true;
18074 break;
18075 }
18076 else
18077 expansion_failed (loc, NULL_RTX,
18078 "PLACEHOLDER_EXPR for an unexpected type");
18079 break;
18080
18081 case CALL_EXPR:
18082 {
18083 const int nargs = call_expr_nargs (loc);
18084 tree callee = get_callee_fndecl (loc);
18085 int i;
18086 dw_die_ref dwarf_proc;
18087
18088 if (callee == NULL_TREE)
18089 goto call_expansion_failed;
18090
18091 /* We handle only functions that return an integer. */
18092 if (!is_handled_procedure_type (TREE_TYPE (TREE_TYPE (callee))))
18093 goto call_expansion_failed;
18094
18095 dwarf_proc = function_to_dwarf_procedure (callee);
18096 if (dwarf_proc == NULL)
18097 goto call_expansion_failed;
18098
18099 /* Evaluate arguments right-to-left so that the first argument will
18100 be the top-most one on the stack. */
18101 for (i = nargs - 1; i >= 0; --i)
18102 {
18103 dw_loc_descr_ref loc_descr
18104 = loc_descriptor_from_tree (CALL_EXPR_ARG (loc, i), 0,
18105 context);
18106
18107 if (loc_descr == NULL)
18108 goto call_expansion_failed;
18109
18110 add_loc_descr (&ret, loc_descr);
18111 }
18112
18113 ret1 = new_loc_descr (DW_OP_call4, 0, 0);
18114 ret1->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18115 ret1->dw_loc_oprnd1.v.val_die_ref.die = dwarf_proc;
18116 ret1->dw_loc_oprnd1.v.val_die_ref.external = 0;
18117 add_loc_descr (&ret, ret1);
18118 break;
18119
18120 call_expansion_failed:
18121 expansion_failed (loc, NULL_RTX, "CALL_EXPR");
18122 /* There are no opcodes for these operations. */
18123 return 0;
18124 }
18125
18126 case PREINCREMENT_EXPR:
18127 case PREDECREMENT_EXPR:
18128 case POSTINCREMENT_EXPR:
18129 case POSTDECREMENT_EXPR:
18130 expansion_failed (loc, NULL_RTX, "PRE/POST INDCREMENT/DECREMENT");
18131 /* There are no opcodes for these operations. */
18132 return 0;
18133
18134 case ADDR_EXPR:
18135 /* If we already want an address, see if there is INDIRECT_REF inside
18136 e.g. for &this->field. */
18137 if (want_address)
18138 {
18139 list_ret = loc_list_for_address_of_addr_expr_of_indirect_ref
18140 (loc, want_address == 2, context);
18141 if (list_ret)
18142 have_address = 1;
18143 else if (decl_address_ip_invariant_p (TREE_OPERAND (loc, 0))
18144 && (ret = cst_pool_loc_descr (loc)))
18145 have_address = 1;
18146 }
18147 /* Otherwise, process the argument and look for the address. */
18148 if (!list_ret && !ret)
18149 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 1, context);
18150 else
18151 {
18152 if (want_address)
18153 expansion_failed (loc, NULL_RTX, "need address of ADDR_EXPR");
18154 return NULL;
18155 }
18156 break;
18157
18158 case VAR_DECL:
18159 if (DECL_THREAD_LOCAL_P (loc))
18160 {
18161 rtx rtl;
18162 enum dwarf_location_atom tls_op;
18163 enum dtprel_bool dtprel = dtprel_false;
18164
18165 if (targetm.have_tls)
18166 {
18167 /* If this is not defined, we have no way to emit the
18168 data. */
18169 if (!targetm.asm_out.output_dwarf_dtprel)
18170 return 0;
18171
18172 /* The way DW_OP_GNU_push_tls_address is specified, we
18173 can only look up addresses of objects in the current
18174 module. We used DW_OP_addr as first op, but that's
18175 wrong, because DW_OP_addr is relocated by the debug
18176 info consumer, while DW_OP_GNU_push_tls_address
18177 operand shouldn't be. */
18178 if (DECL_EXTERNAL (loc) && !targetm.binds_local_p (loc))
18179 return 0;
18180 dtprel = dtprel_true;
18181 /* We check for DWARF 5 here because gdb did not implement
18182 DW_OP_form_tls_address until after 7.12. */
18183 tls_op = (dwarf_version >= 5 ? DW_OP_form_tls_address
18184 : DW_OP_GNU_push_tls_address);
18185 }
18186 else
18187 {
18188 if (!targetm.emutls.debug_form_tls_address
18189 || !(dwarf_version >= 3 || !dwarf_strict))
18190 return 0;
18191 /* We stuffed the control variable into the DECL_VALUE_EXPR
18192 to signal (via DECL_HAS_VALUE_EXPR_P) that the decl should
18193 no longer appear in gimple code. We used the control
18194 variable in specific so that we could pick it up here. */
18195 loc = DECL_VALUE_EXPR (loc);
18196 tls_op = DW_OP_form_tls_address;
18197 }
18198
18199 rtl = rtl_for_decl_location (loc);
18200 if (rtl == NULL_RTX)
18201 return 0;
18202
18203 if (!MEM_P (rtl))
18204 return 0;
18205 rtl = XEXP (rtl, 0);
18206 if (! CONSTANT_P (rtl))
18207 return 0;
18208
18209 ret = new_addr_loc_descr (rtl, dtprel);
18210 ret1 = new_loc_descr (tls_op, 0, 0);
18211 add_loc_descr (&ret, ret1);
18212
18213 have_address = 1;
18214 break;
18215 }
18216 /* FALLTHRU */
18217
18218 case PARM_DECL:
18219 if (context != NULL && context->dpi != NULL
18220 && DECL_CONTEXT (loc) == context->dpi->fndecl)
18221 {
18222 /* We are generating code for a DWARF procedure and we want to access
18223 one of its arguments: find the appropriate argument offset and let
18224 the resolve_args_picking pass compute the offset that complies
18225 with the stack frame size. */
18226 unsigned i = 0;
18227 tree cursor;
18228
18229 for (cursor = DECL_ARGUMENTS (context->dpi->fndecl);
18230 cursor != NULL_TREE && cursor != loc;
18231 cursor = TREE_CHAIN (cursor), ++i)
18232 ;
18233 /* If we are translating a DWARF procedure, all referenced parameters
18234 must belong to the current function. */
18235 gcc_assert (cursor != NULL_TREE);
18236
18237 ret = new_loc_descr (DW_OP_pick, i, 0);
18238 ret->frame_offset_rel = 1;
18239 break;
18240 }
18241 /* FALLTHRU */
18242
18243 case RESULT_DECL:
18244 if (DECL_HAS_VALUE_EXPR_P (loc))
18245 return loc_list_from_tree_1 (DECL_VALUE_EXPR (loc),
18246 want_address, context);
18247 /* FALLTHRU */
18248
18249 case FUNCTION_DECL:
18250 {
18251 rtx rtl;
18252 var_loc_list *loc_list = lookup_decl_loc (loc);
18253
18254 if (loc_list && loc_list->first)
18255 {
18256 list_ret = dw_loc_list (loc_list, loc, want_address);
18257 have_address = want_address != 0;
18258 break;
18259 }
18260 rtl = rtl_for_decl_location (loc);
18261 if (rtl == NULL_RTX)
18262 {
18263 if (TREE_CODE (loc) != FUNCTION_DECL
18264 && early_dwarf
18265 && current_function_decl
18266 && want_address != 1
18267 && ! DECL_IGNORED_P (loc)
18268 && (INTEGRAL_TYPE_P (TREE_TYPE (loc))
18269 || POINTER_TYPE_P (TREE_TYPE (loc)))
18270 && DECL_CONTEXT (loc) == current_function_decl
18271 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (loc)))
18272 <= DWARF2_ADDR_SIZE))
18273 {
18274 dw_die_ref ref = lookup_decl_die (loc);
18275 ret = new_loc_descr (DW_OP_GNU_variable_value, 0, 0);
18276 if (ref)
18277 {
18278 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18279 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
18280 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
18281 }
18282 else
18283 {
18284 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
18285 ret->dw_loc_oprnd1.v.val_decl_ref = loc;
18286 }
18287 break;
18288 }
18289 expansion_failed (loc, NULL_RTX, "DECL has no RTL");
18290 return 0;
18291 }
18292 else if (CONST_INT_P (rtl))
18293 {
18294 HOST_WIDE_INT val = INTVAL (rtl);
18295 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18296 val &= GET_MODE_MASK (DECL_MODE (loc));
18297 ret = int_loc_descriptor (val);
18298 }
18299 else if (GET_CODE (rtl) == CONST_STRING)
18300 {
18301 expansion_failed (loc, NULL_RTX, "CONST_STRING");
18302 return 0;
18303 }
18304 else if (CONSTANT_P (rtl) && const_ok_for_output (rtl))
18305 ret = new_addr_loc_descr (rtl, dtprel_false);
18306 else
18307 {
18308 machine_mode mode, mem_mode;
18309
18310 /* Certain constructs can only be represented at top-level. */
18311 if (want_address == 2)
18312 {
18313 ret = loc_descriptor (rtl, VOIDmode,
18314 VAR_INIT_STATUS_INITIALIZED);
18315 have_address = 1;
18316 }
18317 else
18318 {
18319 mode = GET_MODE (rtl);
18320 mem_mode = VOIDmode;
18321 if (MEM_P (rtl))
18322 {
18323 mem_mode = mode;
18324 mode = get_address_mode (rtl);
18325 rtl = XEXP (rtl, 0);
18326 have_address = 1;
18327 }
18328 ret = mem_loc_descriptor (rtl, mode, mem_mode,
18329 VAR_INIT_STATUS_INITIALIZED);
18330 }
18331 if (!ret)
18332 expansion_failed (loc, rtl,
18333 "failed to produce loc descriptor for rtl");
18334 }
18335 }
18336 break;
18337
18338 case MEM_REF:
18339 if (!integer_zerop (TREE_OPERAND (loc, 1)))
18340 {
18341 have_address = 1;
18342 goto do_plus;
18343 }
18344 /* Fallthru. */
18345 case INDIRECT_REF:
18346 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18347 have_address = 1;
18348 break;
18349
18350 case TARGET_MEM_REF:
18351 case SSA_NAME:
18352 case DEBUG_EXPR_DECL:
18353 return NULL;
18354
18355 case COMPOUND_EXPR:
18356 return loc_list_from_tree_1 (TREE_OPERAND (loc, 1), want_address,
18357 context);
18358
18359 CASE_CONVERT:
18360 case VIEW_CONVERT_EXPR:
18361 case SAVE_EXPR:
18362 case MODIFY_EXPR:
18363 case NON_LVALUE_EXPR:
18364 return loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address,
18365 context);
18366
18367 case COMPONENT_REF:
18368 case BIT_FIELD_REF:
18369 case ARRAY_REF:
18370 case ARRAY_RANGE_REF:
18371 case REALPART_EXPR:
18372 case IMAGPART_EXPR:
18373 {
18374 tree obj, offset;
18375 poly_int64 bitsize, bitpos, bytepos;
18376 machine_mode mode;
18377 int unsignedp, reversep, volatilep = 0;
18378
18379 obj = get_inner_reference (loc, &bitsize, &bitpos, &offset, &mode,
18380 &unsignedp, &reversep, &volatilep);
18381
18382 gcc_assert (obj != loc);
18383
18384 list_ret = loc_list_from_tree_1 (obj,
18385 want_address == 2
18386 && known_eq (bitpos, 0)
18387 && !offset ? 2 : 1,
18388 context);
18389 /* TODO: We can extract value of the small expression via shifting even
18390 for nonzero bitpos. */
18391 if (list_ret == 0)
18392 return 0;
18393 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
18394 || !multiple_p (bitsize, BITS_PER_UNIT))
18395 {
18396 expansion_failed (loc, NULL_RTX,
18397 "bitfield access");
18398 return 0;
18399 }
18400
18401 if (offset != NULL_TREE)
18402 {
18403 /* Variable offset. */
18404 list_ret1 = loc_list_from_tree_1 (offset, 0, context);
18405 if (list_ret1 == 0)
18406 return 0;
18407 add_loc_list (&list_ret, list_ret1);
18408 if (!list_ret)
18409 return 0;
18410 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus, 0, 0));
18411 }
18412
18413 HOST_WIDE_INT value;
18414 if (bytepos.is_constant (&value) && value > 0)
18415 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus_uconst,
18416 value, 0));
18417 else if (maybe_ne (bytepos, 0))
18418 loc_list_plus_const (list_ret, bytepos);
18419
18420 have_address = 1;
18421 break;
18422 }
18423
18424 case INTEGER_CST:
18425 if ((want_address || !tree_fits_shwi_p (loc))
18426 && (ret = cst_pool_loc_descr (loc)))
18427 have_address = 1;
18428 else if (want_address == 2
18429 && tree_fits_shwi_p (loc)
18430 && (ret = address_of_int_loc_descriptor
18431 (int_size_in_bytes (TREE_TYPE (loc)),
18432 tree_to_shwi (loc))))
18433 have_address = 1;
18434 else if (tree_fits_shwi_p (loc))
18435 ret = int_loc_descriptor (tree_to_shwi (loc));
18436 else if (tree_fits_uhwi_p (loc))
18437 ret = uint_loc_descriptor (tree_to_uhwi (loc));
18438 else
18439 {
18440 expansion_failed (loc, NULL_RTX,
18441 "Integer operand is not host integer");
18442 return 0;
18443 }
18444 break;
18445
18446 case CONSTRUCTOR:
18447 case REAL_CST:
18448 case STRING_CST:
18449 case COMPLEX_CST:
18450 if ((ret = cst_pool_loc_descr (loc)))
18451 have_address = 1;
18452 else if (TREE_CODE (loc) == CONSTRUCTOR)
18453 {
18454 tree type = TREE_TYPE (loc);
18455 unsigned HOST_WIDE_INT size = int_size_in_bytes (type);
18456 unsigned HOST_WIDE_INT offset = 0;
18457 unsigned HOST_WIDE_INT cnt;
18458 constructor_elt *ce;
18459
18460 if (TREE_CODE (type) == RECORD_TYPE)
18461 {
18462 /* This is very limited, but it's enough to output
18463 pointers to member functions, as long as the
18464 referenced function is defined in the current
18465 translation unit. */
18466 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (loc), cnt, ce)
18467 {
18468 tree val = ce->value;
18469
18470 tree field = ce->index;
18471
18472 if (val)
18473 STRIP_NOPS (val);
18474
18475 if (!field || DECL_BIT_FIELD (field))
18476 {
18477 expansion_failed (loc, NULL_RTX,
18478 "bitfield in record type constructor");
18479 size = offset = (unsigned HOST_WIDE_INT)-1;
18480 ret = NULL;
18481 break;
18482 }
18483
18484 HOST_WIDE_INT fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
18485 unsigned HOST_WIDE_INT pos = int_byte_position (field);
18486 gcc_assert (pos + fieldsize <= size);
18487 if (pos < offset)
18488 {
18489 expansion_failed (loc, NULL_RTX,
18490 "out-of-order fields in record constructor");
18491 size = offset = (unsigned HOST_WIDE_INT)-1;
18492 ret = NULL;
18493 break;
18494 }
18495 if (pos > offset)
18496 {
18497 ret1 = new_loc_descr (DW_OP_piece, pos - offset, 0);
18498 add_loc_descr (&ret, ret1);
18499 offset = pos;
18500 }
18501 if (val && fieldsize != 0)
18502 {
18503 ret1 = loc_descriptor_from_tree (val, want_address, context);
18504 if (!ret1)
18505 {
18506 expansion_failed (loc, NULL_RTX,
18507 "unsupported expression in field");
18508 size = offset = (unsigned HOST_WIDE_INT)-1;
18509 ret = NULL;
18510 break;
18511 }
18512 add_loc_descr (&ret, ret1);
18513 }
18514 if (fieldsize)
18515 {
18516 ret1 = new_loc_descr (DW_OP_piece, fieldsize, 0);
18517 add_loc_descr (&ret, ret1);
18518 offset = pos + fieldsize;
18519 }
18520 }
18521
18522 if (offset != size)
18523 {
18524 ret1 = new_loc_descr (DW_OP_piece, size - offset, 0);
18525 add_loc_descr (&ret, ret1);
18526 offset = size;
18527 }
18528
18529 have_address = !!want_address;
18530 }
18531 else
18532 expansion_failed (loc, NULL_RTX,
18533 "constructor of non-record type");
18534 }
18535 else
18536 /* We can construct small constants here using int_loc_descriptor. */
18537 expansion_failed (loc, NULL_RTX,
18538 "constructor or constant not in constant pool");
18539 break;
18540
18541 case TRUTH_AND_EXPR:
18542 case TRUTH_ANDIF_EXPR:
18543 case BIT_AND_EXPR:
18544 op = DW_OP_and;
18545 goto do_binop;
18546
18547 case TRUTH_XOR_EXPR:
18548 case BIT_XOR_EXPR:
18549 op = DW_OP_xor;
18550 goto do_binop;
18551
18552 case TRUTH_OR_EXPR:
18553 case TRUTH_ORIF_EXPR:
18554 case BIT_IOR_EXPR:
18555 op = DW_OP_or;
18556 goto do_binop;
18557
18558 case FLOOR_DIV_EXPR:
18559 case CEIL_DIV_EXPR:
18560 case ROUND_DIV_EXPR:
18561 case TRUNC_DIV_EXPR:
18562 case EXACT_DIV_EXPR:
18563 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18564 return 0;
18565 op = DW_OP_div;
18566 goto do_binop;
18567
18568 case MINUS_EXPR:
18569 op = DW_OP_minus;
18570 goto do_binop;
18571
18572 case FLOOR_MOD_EXPR:
18573 case CEIL_MOD_EXPR:
18574 case ROUND_MOD_EXPR:
18575 case TRUNC_MOD_EXPR:
18576 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18577 {
18578 op = DW_OP_mod;
18579 goto do_binop;
18580 }
18581 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18582 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18583 if (list_ret == 0 || list_ret1 == 0)
18584 return 0;
18585
18586 add_loc_list (&list_ret, list_ret1);
18587 if (list_ret == 0)
18588 return 0;
18589 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18590 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18591 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_div, 0, 0));
18592 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_mul, 0, 0));
18593 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_minus, 0, 0));
18594 break;
18595
18596 case MULT_EXPR:
18597 op = DW_OP_mul;
18598 goto do_binop;
18599
18600 case LSHIFT_EXPR:
18601 op = DW_OP_shl;
18602 goto do_binop;
18603
18604 case RSHIFT_EXPR:
18605 op = (TYPE_UNSIGNED (TREE_TYPE (loc)) ? DW_OP_shr : DW_OP_shra);
18606 goto do_binop;
18607
18608 case POINTER_PLUS_EXPR:
18609 case PLUS_EXPR:
18610 do_plus:
18611 if (tree_fits_shwi_p (TREE_OPERAND (loc, 1)))
18612 {
18613 /* Big unsigned numbers can fit in HOST_WIDE_INT but it may be
18614 smarter to encode their opposite. The DW_OP_plus_uconst operation
18615 takes 1 + X bytes, X being the size of the ULEB128 addend. On the
18616 other hand, a "<push literal>; DW_OP_minus" pattern takes 1 + Y
18617 bytes, Y being the size of the operation that pushes the opposite
18618 of the addend. So let's choose the smallest representation. */
18619 const tree tree_addend = TREE_OPERAND (loc, 1);
18620 offset_int wi_addend;
18621 HOST_WIDE_INT shwi_addend;
18622 dw_loc_descr_ref loc_naddend;
18623
18624 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18625 if (list_ret == 0)
18626 return 0;
18627
18628 /* Try to get the literal to push. It is the opposite of the addend,
18629 so as we rely on wrapping during DWARF evaluation, first decode
18630 the literal as a "DWARF-sized" signed number. */
18631 wi_addend = wi::to_offset (tree_addend);
18632 wi_addend = wi::sext (wi_addend, DWARF2_ADDR_SIZE * 8);
18633 shwi_addend = wi_addend.to_shwi ();
18634 loc_naddend = (shwi_addend != INTTYPE_MINIMUM (HOST_WIDE_INT))
18635 ? int_loc_descriptor (-shwi_addend)
18636 : NULL;
18637
18638 if (loc_naddend != NULL
18639 && ((unsigned) size_of_uleb128 (shwi_addend)
18640 > size_of_loc_descr (loc_naddend)))
18641 {
18642 add_loc_descr_to_each (list_ret, loc_naddend);
18643 add_loc_descr_to_each (list_ret,
18644 new_loc_descr (DW_OP_minus, 0, 0));
18645 }
18646 else
18647 {
18648 for (dw_loc_descr_ref loc_cur = loc_naddend; loc_cur != NULL; )
18649 {
18650 loc_naddend = loc_cur;
18651 loc_cur = loc_cur->dw_loc_next;
18652 ggc_free (loc_naddend);
18653 }
18654 loc_list_plus_const (list_ret, wi_addend.to_shwi ());
18655 }
18656 break;
18657 }
18658
18659 op = DW_OP_plus;
18660 goto do_binop;
18661
18662 case LE_EXPR:
18663 op = DW_OP_le;
18664 goto do_comp_binop;
18665
18666 case GE_EXPR:
18667 op = DW_OP_ge;
18668 goto do_comp_binop;
18669
18670 case LT_EXPR:
18671 op = DW_OP_lt;
18672 goto do_comp_binop;
18673
18674 case GT_EXPR:
18675 op = DW_OP_gt;
18676 goto do_comp_binop;
18677
18678 do_comp_binop:
18679 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (loc, 0))))
18680 {
18681 list_ret = loc_list_from_tree (TREE_OPERAND (loc, 0), 0, context);
18682 list_ret1 = loc_list_from_tree (TREE_OPERAND (loc, 1), 0, context);
18683 list_ret = loc_list_from_uint_comparison (list_ret, list_ret1,
18684 TREE_CODE (loc));
18685 break;
18686 }
18687 else
18688 goto do_binop;
18689
18690 case EQ_EXPR:
18691 op = DW_OP_eq;
18692 goto do_binop;
18693
18694 case NE_EXPR:
18695 op = DW_OP_ne;
18696 goto do_binop;
18697
18698 do_binop:
18699 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18700 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18701 if (list_ret == 0 || list_ret1 == 0)
18702 return 0;
18703
18704 add_loc_list (&list_ret, list_ret1);
18705 if (list_ret == 0)
18706 return 0;
18707 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18708 break;
18709
18710 case TRUTH_NOT_EXPR:
18711 case BIT_NOT_EXPR:
18712 op = DW_OP_not;
18713 goto do_unop;
18714
18715 case ABS_EXPR:
18716 op = DW_OP_abs;
18717 goto do_unop;
18718
18719 case NEGATE_EXPR:
18720 op = DW_OP_neg;
18721 goto do_unop;
18722
18723 do_unop:
18724 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18725 if (list_ret == 0)
18726 return 0;
18727
18728 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18729 break;
18730
18731 case MIN_EXPR:
18732 case MAX_EXPR:
18733 {
18734 const enum tree_code code =
18735 TREE_CODE (loc) == MIN_EXPR ? GT_EXPR : LT_EXPR;
18736
18737 loc = build3 (COND_EXPR, TREE_TYPE (loc),
18738 build2 (code, integer_type_node,
18739 TREE_OPERAND (loc, 0), TREE_OPERAND (loc, 1)),
18740 TREE_OPERAND (loc, 1), TREE_OPERAND (loc, 0));
18741 }
18742
18743 /* fall through */
18744
18745 case COND_EXPR:
18746 {
18747 dw_loc_descr_ref lhs
18748 = loc_descriptor_from_tree (TREE_OPERAND (loc, 1), 0, context);
18749 dw_loc_list_ref rhs
18750 = loc_list_from_tree_1 (TREE_OPERAND (loc, 2), 0, context);
18751 dw_loc_descr_ref bra_node, jump_node, tmp;
18752
18753 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18754 if (list_ret == 0 || lhs == 0 || rhs == 0)
18755 return 0;
18756
18757 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
18758 add_loc_descr_to_each (list_ret, bra_node);
18759
18760 add_loc_list (&list_ret, rhs);
18761 jump_node = new_loc_descr (DW_OP_skip, 0, 0);
18762 add_loc_descr_to_each (list_ret, jump_node);
18763
18764 add_loc_descr_to_each (list_ret, lhs);
18765 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18766 bra_node->dw_loc_oprnd1.v.val_loc = lhs;
18767
18768 /* ??? Need a node to point the skip at. Use a nop. */
18769 tmp = new_loc_descr (DW_OP_nop, 0, 0);
18770 add_loc_descr_to_each (list_ret, tmp);
18771 jump_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18772 jump_node->dw_loc_oprnd1.v.val_loc = tmp;
18773 }
18774 break;
18775
18776 case FIX_TRUNC_EXPR:
18777 return 0;
18778
18779 default:
18780 /* Leave front-end specific codes as simply unknown. This comes
18781 up, for instance, with the C STMT_EXPR. */
18782 if ((unsigned int) TREE_CODE (loc)
18783 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE)
18784 {
18785 expansion_failed (loc, NULL_RTX,
18786 "language specific tree node");
18787 return 0;
18788 }
18789
18790 /* Otherwise this is a generic code; we should just lists all of
18791 these explicitly. We forgot one. */
18792 if (flag_checking)
18793 gcc_unreachable ();
18794
18795 /* In a release build, we want to degrade gracefully: better to
18796 generate incomplete debugging information than to crash. */
18797 return NULL;
18798 }
18799
18800 if (!ret && !list_ret)
18801 return 0;
18802
18803 if (want_address == 2 && !have_address
18804 && (dwarf_version >= 4 || !dwarf_strict))
18805 {
18806 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
18807 {
18808 expansion_failed (loc, NULL_RTX,
18809 "DWARF address size mismatch");
18810 return 0;
18811 }
18812 if (ret)
18813 add_loc_descr (&ret, new_loc_descr (DW_OP_stack_value, 0, 0));
18814 else
18815 add_loc_descr_to_each (list_ret,
18816 new_loc_descr (DW_OP_stack_value, 0, 0));
18817 have_address = 1;
18818 }
18819 /* Show if we can't fill the request for an address. */
18820 if (want_address && !have_address)
18821 {
18822 expansion_failed (loc, NULL_RTX,
18823 "Want address and only have value");
18824 return 0;
18825 }
18826
18827 gcc_assert (!ret || !list_ret);
18828
18829 /* If we've got an address and don't want one, dereference. */
18830 if (!want_address && have_address)
18831 {
18832 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
18833
18834 if (size > DWARF2_ADDR_SIZE || size == -1)
18835 {
18836 expansion_failed (loc, NULL_RTX,
18837 "DWARF address size mismatch");
18838 return 0;
18839 }
18840 else if (size == DWARF2_ADDR_SIZE)
18841 op = DW_OP_deref;
18842 else
18843 op = DW_OP_deref_size;
18844
18845 if (ret)
18846 add_loc_descr (&ret, new_loc_descr (op, size, 0));
18847 else
18848 add_loc_descr_to_each (list_ret, new_loc_descr (op, size, 0));
18849 }
18850 if (ret)
18851 list_ret = new_loc_list (ret, NULL, 0, NULL, 0, NULL);
18852
18853 return list_ret;
18854 }
18855
18856 /* Likewise, but strip useless DW_OP_nop operations in the resulting
18857 expressions. */
18858
18859 static dw_loc_list_ref
18860 loc_list_from_tree (tree loc, int want_address,
18861 struct loc_descr_context *context)
18862 {
18863 dw_loc_list_ref result = loc_list_from_tree_1 (loc, want_address, context);
18864
18865 for (dw_loc_list_ref loc_cur = result;
18866 loc_cur != NULL; loc_cur = loc_cur->dw_loc_next)
18867 loc_descr_without_nops (loc_cur->expr);
18868 return result;
18869 }
18870
18871 /* Same as above but return only single location expression. */
18872 static dw_loc_descr_ref
18873 loc_descriptor_from_tree (tree loc, int want_address,
18874 struct loc_descr_context *context)
18875 {
18876 dw_loc_list_ref ret = loc_list_from_tree (loc, want_address, context);
18877 if (!ret)
18878 return NULL;
18879 if (ret->dw_loc_next)
18880 {
18881 expansion_failed (loc, NULL_RTX,
18882 "Location list where only loc descriptor needed");
18883 return NULL;
18884 }
18885 return ret->expr;
18886 }
18887
18888 /* Given a value, round it up to the lowest multiple of `boundary'
18889 which is not less than the value itself. */
18890
18891 static inline HOST_WIDE_INT
18892 ceiling (HOST_WIDE_INT value, unsigned int boundary)
18893 {
18894 return (((value + boundary - 1) / boundary) * boundary);
18895 }
18896
18897 /* Given a pointer to what is assumed to be a FIELD_DECL node, return a
18898 pointer to the declared type for the relevant field variable, or return
18899 `integer_type_node' if the given node turns out to be an
18900 ERROR_MARK node. */
18901
18902 static inline tree
18903 field_type (const_tree decl)
18904 {
18905 tree type;
18906
18907 if (TREE_CODE (decl) == ERROR_MARK)
18908 return integer_type_node;
18909
18910 type = DECL_BIT_FIELD_TYPE (decl);
18911 if (type == NULL_TREE)
18912 type = TREE_TYPE (decl);
18913
18914 return type;
18915 }
18916
18917 /* Given a pointer to a tree node, return the alignment in bits for
18918 it, or else return BITS_PER_WORD if the node actually turns out to
18919 be an ERROR_MARK node. */
18920
18921 static inline unsigned
18922 simple_type_align_in_bits (const_tree type)
18923 {
18924 return (TREE_CODE (type) != ERROR_MARK) ? TYPE_ALIGN (type) : BITS_PER_WORD;
18925 }
18926
18927 static inline unsigned
18928 simple_decl_align_in_bits (const_tree decl)
18929 {
18930 return (TREE_CODE (decl) != ERROR_MARK) ? DECL_ALIGN (decl) : BITS_PER_WORD;
18931 }
18932
18933 /* Return the result of rounding T up to ALIGN. */
18934
18935 static inline offset_int
18936 round_up_to_align (const offset_int &t, unsigned int align)
18937 {
18938 return wi::udiv_trunc (t + align - 1, align) * align;
18939 }
18940
18941 /* Compute the size of TYPE in bytes. If possible, return NULL and store the
18942 size as an integer constant in CST_SIZE. Otherwise, if possible, return a
18943 DWARF expression that computes the size. Return NULL and set CST_SIZE to -1
18944 if we fail to return the size in one of these two forms. */
18945
18946 static dw_loc_descr_ref
18947 type_byte_size (const_tree type, HOST_WIDE_INT *cst_size)
18948 {
18949 tree tree_size;
18950 struct loc_descr_context ctx;
18951
18952 /* Return a constant integer in priority, if possible. */
18953 *cst_size = int_size_in_bytes (type);
18954 if (*cst_size != -1)
18955 return NULL;
18956
18957 ctx.context_type = const_cast<tree> (type);
18958 ctx.base_decl = NULL_TREE;
18959 ctx.dpi = NULL;
18960 ctx.placeholder_arg = false;
18961 ctx.placeholder_seen = false;
18962
18963 type = TYPE_MAIN_VARIANT (type);
18964 tree_size = TYPE_SIZE_UNIT (type);
18965 return ((tree_size != NULL_TREE)
18966 ? loc_descriptor_from_tree (tree_size, 0, &ctx)
18967 : NULL);
18968 }
18969
18970 /* Helper structure for RECORD_TYPE processing. */
18971 struct vlr_context
18972 {
18973 /* Root RECORD_TYPE. It is needed to generate data member location
18974 descriptions in variable-length records (VLR), but also to cope with
18975 variants, which are composed of nested structures multiplexed with
18976 QUAL_UNION_TYPE nodes. Each time such a structure is passed to a
18977 function processing a FIELD_DECL, it is required to be non null. */
18978 tree struct_type;
18979 /* When generating a variant part in a RECORD_TYPE (i.e. a nested
18980 QUAL_UNION_TYPE), this holds an expression that computes the offset for
18981 this variant part as part of the root record (in storage units). For
18982 regular records, it must be NULL_TREE. */
18983 tree variant_part_offset;
18984 };
18985
18986 /* Given a pointer to a FIELD_DECL, compute the byte offset of the lowest
18987 addressed byte of the "containing object" for the given FIELD_DECL. If
18988 possible, return a native constant through CST_OFFSET (in which case NULL is
18989 returned); otherwise return a DWARF expression that computes the offset.
18990
18991 Set *CST_OFFSET to 0 and return NULL if we are unable to determine what
18992 that offset is, either because the argument turns out to be a pointer to an
18993 ERROR_MARK node, or because the offset expression is too complex for us.
18994
18995 CTX is required: see the comment for VLR_CONTEXT. */
18996
18997 static dw_loc_descr_ref
18998 field_byte_offset (const_tree decl, struct vlr_context *ctx,
18999 HOST_WIDE_INT *cst_offset)
19000 {
19001 tree tree_result;
19002 dw_loc_list_ref loc_result;
19003
19004 *cst_offset = 0;
19005
19006 if (TREE_CODE (decl) == ERROR_MARK)
19007 return NULL;
19008 else
19009 gcc_assert (TREE_CODE (decl) == FIELD_DECL);
19010
19011 /* We cannot handle variable bit offsets at the moment, so abort if it's the
19012 case. */
19013 if (TREE_CODE (DECL_FIELD_BIT_OFFSET (decl)) != INTEGER_CST)
19014 return NULL;
19015
19016 #ifdef PCC_BITFIELD_TYPE_MATTERS
19017 /* We used to handle only constant offsets in all cases. Now, we handle
19018 properly dynamic byte offsets only when PCC bitfield type doesn't
19019 matter. */
19020 if (PCC_BITFIELD_TYPE_MATTERS
19021 && TREE_CODE (DECL_FIELD_OFFSET (decl)) == INTEGER_CST)
19022 {
19023 offset_int object_offset_in_bits;
19024 offset_int object_offset_in_bytes;
19025 offset_int bitpos_int;
19026 tree type;
19027 tree field_size_tree;
19028 offset_int deepest_bitpos;
19029 offset_int field_size_in_bits;
19030 unsigned int type_align_in_bits;
19031 unsigned int decl_align_in_bits;
19032 offset_int type_size_in_bits;
19033
19034 bitpos_int = wi::to_offset (bit_position (decl));
19035 type = field_type (decl);
19036 type_size_in_bits = offset_int_type_size_in_bits (type);
19037 type_align_in_bits = simple_type_align_in_bits (type);
19038
19039 field_size_tree = DECL_SIZE (decl);
19040
19041 /* The size could be unspecified if there was an error, or for
19042 a flexible array member. */
19043 if (!field_size_tree)
19044 field_size_tree = bitsize_zero_node;
19045
19046 /* If the size of the field is not constant, use the type size. */
19047 if (TREE_CODE (field_size_tree) == INTEGER_CST)
19048 field_size_in_bits = wi::to_offset (field_size_tree);
19049 else
19050 field_size_in_bits = type_size_in_bits;
19051
19052 decl_align_in_bits = simple_decl_align_in_bits (decl);
19053
19054 /* The GCC front-end doesn't make any attempt to keep track of the
19055 starting bit offset (relative to the start of the containing
19056 structure type) of the hypothetical "containing object" for a
19057 bit-field. Thus, when computing the byte offset value for the
19058 start of the "containing object" of a bit-field, we must deduce
19059 this information on our own. This can be rather tricky to do in
19060 some cases. For example, handling the following structure type
19061 definition when compiling for an i386/i486 target (which only
19062 aligns long long's to 32-bit boundaries) can be very tricky:
19063
19064 struct S { int field1; long long field2:31; };
19065
19066 Fortunately, there is a simple rule-of-thumb which can be used
19067 in such cases. When compiling for an i386/i486, GCC will
19068 allocate 8 bytes for the structure shown above. It decides to
19069 do this based upon one simple rule for bit-field allocation.
19070 GCC allocates each "containing object" for each bit-field at
19071 the first (i.e. lowest addressed) legitimate alignment boundary
19072 (based upon the required minimum alignment for the declared
19073 type of the field) which it can possibly use, subject to the
19074 condition that there is still enough available space remaining
19075 in the containing object (when allocated at the selected point)
19076 to fully accommodate all of the bits of the bit-field itself.
19077
19078 This simple rule makes it obvious why GCC allocates 8 bytes for
19079 each object of the structure type shown above. When looking
19080 for a place to allocate the "containing object" for `field2',
19081 the compiler simply tries to allocate a 64-bit "containing
19082 object" at each successive 32-bit boundary (starting at zero)
19083 until it finds a place to allocate that 64- bit field such that
19084 at least 31 contiguous (and previously unallocated) bits remain
19085 within that selected 64 bit field. (As it turns out, for the
19086 example above, the compiler finds it is OK to allocate the
19087 "containing object" 64-bit field at bit-offset zero within the
19088 structure type.)
19089
19090 Here we attempt to work backwards from the limited set of facts
19091 we're given, and we try to deduce from those facts, where GCC
19092 must have believed that the containing object started (within
19093 the structure type). The value we deduce is then used (by the
19094 callers of this routine) to generate DW_AT_location and
19095 DW_AT_bit_offset attributes for fields (both bit-fields and, in
19096 the case of DW_AT_location, regular fields as well). */
19097
19098 /* Figure out the bit-distance from the start of the structure to
19099 the "deepest" bit of the bit-field. */
19100 deepest_bitpos = bitpos_int + field_size_in_bits;
19101
19102 /* This is the tricky part. Use some fancy footwork to deduce
19103 where the lowest addressed bit of the containing object must
19104 be. */
19105 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19106
19107 /* Round up to type_align by default. This works best for
19108 bitfields. */
19109 object_offset_in_bits
19110 = round_up_to_align (object_offset_in_bits, type_align_in_bits);
19111
19112 if (wi::gtu_p (object_offset_in_bits, bitpos_int))
19113 {
19114 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19115
19116 /* Round up to decl_align instead. */
19117 object_offset_in_bits
19118 = round_up_to_align (object_offset_in_bits, decl_align_in_bits);
19119 }
19120
19121 object_offset_in_bytes
19122 = wi::lrshift (object_offset_in_bits, LOG2_BITS_PER_UNIT);
19123 if (ctx->variant_part_offset == NULL_TREE)
19124 {
19125 *cst_offset = object_offset_in_bytes.to_shwi ();
19126 return NULL;
19127 }
19128 tree_result = wide_int_to_tree (sizetype, object_offset_in_bytes);
19129 }
19130 else
19131 #endif /* PCC_BITFIELD_TYPE_MATTERS */
19132 tree_result = byte_position (decl);
19133
19134 if (ctx->variant_part_offset != NULL_TREE)
19135 tree_result = fold_build2 (PLUS_EXPR, TREE_TYPE (tree_result),
19136 ctx->variant_part_offset, tree_result);
19137
19138 /* If the byte offset is a constant, it's simplier to handle a native
19139 constant rather than a DWARF expression. */
19140 if (TREE_CODE (tree_result) == INTEGER_CST)
19141 {
19142 *cst_offset = wi::to_offset (tree_result).to_shwi ();
19143 return NULL;
19144 }
19145 struct loc_descr_context loc_ctx = {
19146 ctx->struct_type, /* context_type */
19147 NULL_TREE, /* base_decl */
19148 NULL, /* dpi */
19149 false, /* placeholder_arg */
19150 false /* placeholder_seen */
19151 };
19152 loc_result = loc_list_from_tree (tree_result, 0, &loc_ctx);
19153
19154 /* We want a DWARF expression: abort if we only have a location list with
19155 multiple elements. */
19156 if (!loc_result || !single_element_loc_list_p (loc_result))
19157 return NULL;
19158 else
19159 return loc_result->expr;
19160 }
19161 \f
19162 /* The following routines define various Dwarf attributes and any data
19163 associated with them. */
19164
19165 /* Add a location description attribute value to a DIE.
19166
19167 This emits location attributes suitable for whole variables and
19168 whole parameters. Note that the location attributes for struct fields are
19169 generated by the routine `data_member_location_attribute' below. */
19170
19171 static inline void
19172 add_AT_location_description (dw_die_ref die, enum dwarf_attribute attr_kind,
19173 dw_loc_list_ref descr)
19174 {
19175 bool check_no_locviews = true;
19176 if (descr == 0)
19177 return;
19178 if (single_element_loc_list_p (descr))
19179 add_AT_loc (die, attr_kind, descr->expr);
19180 else
19181 {
19182 add_AT_loc_list (die, attr_kind, descr);
19183 gcc_assert (descr->ll_symbol);
19184 if (attr_kind == DW_AT_location && descr->vl_symbol
19185 && dwarf2out_locviews_in_attribute ())
19186 {
19187 add_AT_view_list (die, DW_AT_GNU_locviews);
19188 check_no_locviews = false;
19189 }
19190 }
19191
19192 if (check_no_locviews)
19193 gcc_assert (!get_AT (die, DW_AT_GNU_locviews));
19194 }
19195
19196 /* Add DW_AT_accessibility attribute to DIE if needed. */
19197
19198 static void
19199 add_accessibility_attribute (dw_die_ref die, tree decl)
19200 {
19201 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
19202 children, otherwise the default is DW_ACCESS_public. In DWARF2
19203 the default has always been DW_ACCESS_public. */
19204 if (TREE_PROTECTED (decl))
19205 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
19206 else if (TREE_PRIVATE (decl))
19207 {
19208 if (dwarf_version == 2
19209 || die->die_parent == NULL
19210 || die->die_parent->die_tag != DW_TAG_class_type)
19211 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
19212 }
19213 else if (dwarf_version > 2
19214 && die->die_parent
19215 && die->die_parent->die_tag == DW_TAG_class_type)
19216 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
19217 }
19218
19219 /* Attach the specialized form of location attribute used for data members of
19220 struct and union types. In the special case of a FIELD_DECL node which
19221 represents a bit-field, the "offset" part of this special location
19222 descriptor must indicate the distance in bytes from the lowest-addressed
19223 byte of the containing struct or union type to the lowest-addressed byte of
19224 the "containing object" for the bit-field. (See the `field_byte_offset'
19225 function above).
19226
19227 For any given bit-field, the "containing object" is a hypothetical object
19228 (of some integral or enum type) within which the given bit-field lives. The
19229 type of this hypothetical "containing object" is always the same as the
19230 declared type of the individual bit-field itself (for GCC anyway... the
19231 DWARF spec doesn't actually mandate this). Note that it is the size (in
19232 bytes) of the hypothetical "containing object" which will be given in the
19233 DW_AT_byte_size attribute for this bit-field. (See the
19234 `byte_size_attribute' function below.) It is also used when calculating the
19235 value of the DW_AT_bit_offset attribute. (See the `bit_offset_attribute'
19236 function below.)
19237
19238 CTX is required: see the comment for VLR_CONTEXT. */
19239
19240 static void
19241 add_data_member_location_attribute (dw_die_ref die,
19242 tree decl,
19243 struct vlr_context *ctx)
19244 {
19245 HOST_WIDE_INT offset;
19246 dw_loc_descr_ref loc_descr = 0;
19247
19248 if (TREE_CODE (decl) == TREE_BINFO)
19249 {
19250 /* We're working on the TAG_inheritance for a base class. */
19251 if (BINFO_VIRTUAL_P (decl) && is_cxx ())
19252 {
19253 /* For C++ virtual bases we can't just use BINFO_OFFSET, as they
19254 aren't at a fixed offset from all (sub)objects of the same
19255 type. We need to extract the appropriate offset from our
19256 vtable. The following dwarf expression means
19257
19258 BaseAddr = ObAddr + *((*ObAddr) - Offset)
19259
19260 This is specific to the V3 ABI, of course. */
19261
19262 dw_loc_descr_ref tmp;
19263
19264 /* Make a copy of the object address. */
19265 tmp = new_loc_descr (DW_OP_dup, 0, 0);
19266 add_loc_descr (&loc_descr, tmp);
19267
19268 /* Extract the vtable address. */
19269 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19270 add_loc_descr (&loc_descr, tmp);
19271
19272 /* Calculate the address of the offset. */
19273 offset = tree_to_shwi (BINFO_VPTR_FIELD (decl));
19274 gcc_assert (offset < 0);
19275
19276 tmp = int_loc_descriptor (-offset);
19277 add_loc_descr (&loc_descr, tmp);
19278 tmp = new_loc_descr (DW_OP_minus, 0, 0);
19279 add_loc_descr (&loc_descr, tmp);
19280
19281 /* Extract the offset. */
19282 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19283 add_loc_descr (&loc_descr, tmp);
19284
19285 /* Add it to the object address. */
19286 tmp = new_loc_descr (DW_OP_plus, 0, 0);
19287 add_loc_descr (&loc_descr, tmp);
19288 }
19289 else
19290 offset = tree_to_shwi (BINFO_OFFSET (decl));
19291 }
19292 else
19293 {
19294 loc_descr = field_byte_offset (decl, ctx, &offset);
19295
19296 /* If loc_descr is available then we know the field offset is dynamic.
19297 However, GDB does not handle dynamic field offsets very well at the
19298 moment. */
19299 if (loc_descr != NULL && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
19300 {
19301 loc_descr = NULL;
19302 offset = 0;
19303 }
19304
19305 /* Data member location evalutation starts with the base address on the
19306 stack. Compute the field offset and add it to this base address. */
19307 else if (loc_descr != NULL)
19308 add_loc_descr (&loc_descr, new_loc_descr (DW_OP_plus, 0, 0));
19309 }
19310
19311 if (! loc_descr)
19312 {
19313 /* While DW_AT_data_bit_offset has been added already in DWARF4,
19314 e.g. GDB only added support to it in November 2016. For DWARF5
19315 we need newer debug info consumers anyway. We might change this
19316 to dwarf_version >= 4 once most consumers catched up. */
19317 if (dwarf_version >= 5
19318 && TREE_CODE (decl) == FIELD_DECL
19319 && DECL_BIT_FIELD_TYPE (decl))
19320 {
19321 tree off = bit_position (decl);
19322 if (tree_fits_uhwi_p (off) && get_AT (die, DW_AT_bit_size))
19323 {
19324 remove_AT (die, DW_AT_byte_size);
19325 remove_AT (die, DW_AT_bit_offset);
19326 add_AT_unsigned (die, DW_AT_data_bit_offset, tree_to_uhwi (off));
19327 return;
19328 }
19329 }
19330 if (dwarf_version > 2)
19331 {
19332 /* Don't need to output a location expression, just the constant. */
19333 if (offset < 0)
19334 add_AT_int (die, DW_AT_data_member_location, offset);
19335 else
19336 add_AT_unsigned (die, DW_AT_data_member_location, offset);
19337 return;
19338 }
19339 else
19340 {
19341 enum dwarf_location_atom op;
19342
19343 /* The DWARF2 standard says that we should assume that the structure
19344 address is already on the stack, so we can specify a structure
19345 field address by using DW_OP_plus_uconst. */
19346 op = DW_OP_plus_uconst;
19347 loc_descr = new_loc_descr (op, offset, 0);
19348 }
19349 }
19350
19351 add_AT_loc (die, DW_AT_data_member_location, loc_descr);
19352 }
19353
19354 /* Writes integer values to dw_vec_const array. */
19355
19356 static void
19357 insert_int (HOST_WIDE_INT val, unsigned int size, unsigned char *dest)
19358 {
19359 while (size != 0)
19360 {
19361 *dest++ = val & 0xff;
19362 val >>= 8;
19363 --size;
19364 }
19365 }
19366
19367 /* Reads integers from dw_vec_const array. Inverse of insert_int. */
19368
19369 static HOST_WIDE_INT
19370 extract_int (const unsigned char *src, unsigned int size)
19371 {
19372 HOST_WIDE_INT val = 0;
19373
19374 src += size;
19375 while (size != 0)
19376 {
19377 val <<= 8;
19378 val |= *--src & 0xff;
19379 --size;
19380 }
19381 return val;
19382 }
19383
19384 /* Writes wide_int values to dw_vec_const array. */
19385
19386 static void
19387 insert_wide_int (const wide_int &val, unsigned char *dest, int elt_size)
19388 {
19389 int i;
19390
19391 if (elt_size <= HOST_BITS_PER_WIDE_INT/BITS_PER_UNIT)
19392 {
19393 insert_int ((HOST_WIDE_INT) val.elt (0), elt_size, dest);
19394 return;
19395 }
19396
19397 /* We'd have to extend this code to support odd sizes. */
19398 gcc_assert (elt_size % (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT) == 0);
19399
19400 int n = elt_size / (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
19401
19402 if (WORDS_BIG_ENDIAN)
19403 for (i = n - 1; i >= 0; i--)
19404 {
19405 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19406 dest += sizeof (HOST_WIDE_INT);
19407 }
19408 else
19409 for (i = 0; i < n; i++)
19410 {
19411 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19412 dest += sizeof (HOST_WIDE_INT);
19413 }
19414 }
19415
19416 /* Writes floating point values to dw_vec_const array. */
19417
19418 static void
19419 insert_float (const_rtx rtl, unsigned char *array)
19420 {
19421 long val[4];
19422 int i;
19423 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19424
19425 real_to_target (val, CONST_DOUBLE_REAL_VALUE (rtl), mode);
19426
19427 /* real_to_target puts 32-bit pieces in each long. Pack them. */
19428 for (i = 0; i < GET_MODE_SIZE (mode) / 4; i++)
19429 {
19430 insert_int (val[i], 4, array);
19431 array += 4;
19432 }
19433 }
19434
19435 /* Attach a DW_AT_const_value attribute for a variable or a parameter which
19436 does not have a "location" either in memory or in a register. These
19437 things can arise in GNU C when a constant is passed as an actual parameter
19438 to an inlined function. They can also arise in C++ where declared
19439 constants do not necessarily get memory "homes". */
19440
19441 static bool
19442 add_const_value_attribute (dw_die_ref die, rtx rtl)
19443 {
19444 switch (GET_CODE (rtl))
19445 {
19446 case CONST_INT:
19447 {
19448 HOST_WIDE_INT val = INTVAL (rtl);
19449
19450 if (val < 0)
19451 add_AT_int (die, DW_AT_const_value, val);
19452 else
19453 add_AT_unsigned (die, DW_AT_const_value, (unsigned HOST_WIDE_INT) val);
19454 }
19455 return true;
19456
19457 case CONST_WIDE_INT:
19458 {
19459 wide_int w1 = rtx_mode_t (rtl, MAX_MODE_INT);
19460 unsigned int prec = MIN (wi::min_precision (w1, UNSIGNED),
19461 (unsigned int)CONST_WIDE_INT_NUNITS (rtl) * HOST_BITS_PER_WIDE_INT);
19462 wide_int w = wi::zext (w1, prec);
19463 add_AT_wide (die, DW_AT_const_value, w);
19464 }
19465 return true;
19466
19467 case CONST_DOUBLE:
19468 /* Note that a CONST_DOUBLE rtx could represent either an integer or a
19469 floating-point constant. A CONST_DOUBLE is used whenever the
19470 constant requires more than one word in order to be adequately
19471 represented. */
19472 if (TARGET_SUPPORTS_WIDE_INT == 0
19473 && !SCALAR_FLOAT_MODE_P (GET_MODE (rtl)))
19474 add_AT_double (die, DW_AT_const_value,
19475 CONST_DOUBLE_HIGH (rtl), CONST_DOUBLE_LOW (rtl));
19476 else
19477 {
19478 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19479 unsigned int length = GET_MODE_SIZE (mode);
19480 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
19481
19482 insert_float (rtl, array);
19483 add_AT_vec (die, DW_AT_const_value, length / 4, 4, array);
19484 }
19485 return true;
19486
19487 case CONST_VECTOR:
19488 {
19489 unsigned int length;
19490 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
19491 return false;
19492
19493 machine_mode mode = GET_MODE (rtl);
19494 unsigned int elt_size = GET_MODE_UNIT_SIZE (mode);
19495 unsigned char *array
19496 = ggc_vec_alloc<unsigned char> (length * elt_size);
19497 unsigned int i;
19498 unsigned char *p;
19499 machine_mode imode = GET_MODE_INNER (mode);
19500
19501 switch (GET_MODE_CLASS (mode))
19502 {
19503 case MODE_VECTOR_INT:
19504 for (i = 0, p = array; i < length; i++, p += elt_size)
19505 {
19506 rtx elt = CONST_VECTOR_ELT (rtl, i);
19507 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
19508 }
19509 break;
19510
19511 case MODE_VECTOR_FLOAT:
19512 for (i = 0, p = array; i < length; i++, p += elt_size)
19513 {
19514 rtx elt = CONST_VECTOR_ELT (rtl, i);
19515 insert_float (elt, p);
19516 }
19517 break;
19518
19519 default:
19520 gcc_unreachable ();
19521 }
19522
19523 add_AT_vec (die, DW_AT_const_value, length, elt_size, array);
19524 }
19525 return true;
19526
19527 case CONST_STRING:
19528 if (dwarf_version >= 4 || !dwarf_strict)
19529 {
19530 dw_loc_descr_ref loc_result;
19531 resolve_one_addr (&rtl);
19532 rtl_addr:
19533 loc_result = new_addr_loc_descr (rtl, dtprel_false);
19534 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
19535 add_AT_loc (die, DW_AT_location, loc_result);
19536 vec_safe_push (used_rtx_array, rtl);
19537 return true;
19538 }
19539 return false;
19540
19541 case CONST:
19542 if (CONSTANT_P (XEXP (rtl, 0)))
19543 return add_const_value_attribute (die, XEXP (rtl, 0));
19544 /* FALLTHROUGH */
19545 case SYMBOL_REF:
19546 if (!const_ok_for_output (rtl))
19547 return false;
19548 /* FALLTHROUGH */
19549 case LABEL_REF:
19550 if (dwarf_version >= 4 || !dwarf_strict)
19551 goto rtl_addr;
19552 return false;
19553
19554 case PLUS:
19555 /* In cases where an inlined instance of an inline function is passed
19556 the address of an `auto' variable (which is local to the caller) we
19557 can get a situation where the DECL_RTL of the artificial local
19558 variable (for the inlining) which acts as a stand-in for the
19559 corresponding formal parameter (of the inline function) will look
19560 like (plus:SI (reg:SI FRAME_PTR) (const_int ...)). This is not
19561 exactly a compile-time constant expression, but it isn't the address
19562 of the (artificial) local variable either. Rather, it represents the
19563 *value* which the artificial local variable always has during its
19564 lifetime. We currently have no way to represent such quasi-constant
19565 values in Dwarf, so for now we just punt and generate nothing. */
19566 return false;
19567
19568 case HIGH:
19569 case CONST_FIXED:
19570 return false;
19571
19572 case MEM:
19573 if (GET_CODE (XEXP (rtl, 0)) == CONST_STRING
19574 && MEM_READONLY_P (rtl)
19575 && GET_MODE (rtl) == BLKmode)
19576 {
19577 add_AT_string (die, DW_AT_const_value, XSTR (XEXP (rtl, 0), 0));
19578 return true;
19579 }
19580 return false;
19581
19582 default:
19583 /* No other kinds of rtx should be possible here. */
19584 gcc_unreachable ();
19585 }
19586 return false;
19587 }
19588
19589 /* Determine whether the evaluation of EXPR references any variables
19590 or functions which aren't otherwise used (and therefore may not be
19591 output). */
19592 static tree
19593 reference_to_unused (tree * tp, int * walk_subtrees,
19594 void * data ATTRIBUTE_UNUSED)
19595 {
19596 if (! EXPR_P (*tp) && ! CONSTANT_CLASS_P (*tp))
19597 *walk_subtrees = 0;
19598
19599 if (DECL_P (*tp) && ! TREE_PUBLIC (*tp) && ! TREE_USED (*tp)
19600 && ! TREE_ASM_WRITTEN (*tp))
19601 return *tp;
19602 /* ??? The C++ FE emits debug information for using decls, so
19603 putting gcc_unreachable here falls over. See PR31899. For now
19604 be conservative. */
19605 else if (!symtab->global_info_ready && VAR_OR_FUNCTION_DECL_P (*tp))
19606 return *tp;
19607 else if (VAR_P (*tp))
19608 {
19609 varpool_node *node = varpool_node::get (*tp);
19610 if (!node || !node->definition)
19611 return *tp;
19612 }
19613 else if (TREE_CODE (*tp) == FUNCTION_DECL
19614 && (!DECL_EXTERNAL (*tp) || DECL_DECLARED_INLINE_P (*tp)))
19615 {
19616 /* The call graph machinery must have finished analyzing,
19617 optimizing and gimplifying the CU by now.
19618 So if *TP has no call graph node associated
19619 to it, it means *TP will not be emitted. */
19620 if (!cgraph_node::get (*tp))
19621 return *tp;
19622 }
19623 else if (TREE_CODE (*tp) == STRING_CST && !TREE_ASM_WRITTEN (*tp))
19624 return *tp;
19625
19626 return NULL_TREE;
19627 }
19628
19629 /* Generate an RTL constant from a decl initializer INIT with decl type TYPE,
19630 for use in a later add_const_value_attribute call. */
19631
19632 static rtx
19633 rtl_for_decl_init (tree init, tree type)
19634 {
19635 rtx rtl = NULL_RTX;
19636
19637 STRIP_NOPS (init);
19638
19639 /* If a variable is initialized with a string constant without embedded
19640 zeros, build CONST_STRING. */
19641 if (TREE_CODE (init) == STRING_CST && TREE_CODE (type) == ARRAY_TYPE)
19642 {
19643 tree enttype = TREE_TYPE (type);
19644 tree domain = TYPE_DOMAIN (type);
19645 scalar_int_mode mode;
19646
19647 if (is_int_mode (TYPE_MODE (enttype), &mode)
19648 && GET_MODE_SIZE (mode) == 1
19649 && domain
19650 && TYPE_MAX_VALUE (domain)
19651 && TREE_CODE (TYPE_MAX_VALUE (domain)) == INTEGER_CST
19652 && integer_zerop (TYPE_MIN_VALUE (domain))
19653 && compare_tree_int (TYPE_MAX_VALUE (domain),
19654 TREE_STRING_LENGTH (init) - 1) == 0
19655 && ((size_t) TREE_STRING_LENGTH (init)
19656 == strlen (TREE_STRING_POINTER (init)) + 1))
19657 {
19658 rtl = gen_rtx_CONST_STRING (VOIDmode,
19659 ggc_strdup (TREE_STRING_POINTER (init)));
19660 rtl = gen_rtx_MEM (BLKmode, rtl);
19661 MEM_READONLY_P (rtl) = 1;
19662 }
19663 }
19664 /* Other aggregates, and complex values, could be represented using
19665 CONCAT: FIXME! */
19666 else if (AGGREGATE_TYPE_P (type)
19667 || (TREE_CODE (init) == VIEW_CONVERT_EXPR
19668 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (init, 0))))
19669 || TREE_CODE (type) == COMPLEX_TYPE)
19670 ;
19671 /* Vectors only work if their mode is supported by the target.
19672 FIXME: generic vectors ought to work too. */
19673 else if (TREE_CODE (type) == VECTOR_TYPE
19674 && !VECTOR_MODE_P (TYPE_MODE (type)))
19675 ;
19676 /* If the initializer is something that we know will expand into an
19677 immediate RTL constant, expand it now. We must be careful not to
19678 reference variables which won't be output. */
19679 else if (initializer_constant_valid_p (init, type)
19680 && ! walk_tree (&init, reference_to_unused, NULL, NULL))
19681 {
19682 /* Convert vector CONSTRUCTOR initializers to VECTOR_CST if
19683 possible. */
19684 if (TREE_CODE (type) == VECTOR_TYPE)
19685 switch (TREE_CODE (init))
19686 {
19687 case VECTOR_CST:
19688 break;
19689 case CONSTRUCTOR:
19690 if (TREE_CONSTANT (init))
19691 {
19692 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (init);
19693 bool constant_p = true;
19694 tree value;
19695 unsigned HOST_WIDE_INT ix;
19696
19697 /* Even when ctor is constant, it might contain non-*_CST
19698 elements (e.g. { 1.0/0.0 - 1.0/0.0, 0.0 }) and those don't
19699 belong into VECTOR_CST nodes. */
19700 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
19701 if (!CONSTANT_CLASS_P (value))
19702 {
19703 constant_p = false;
19704 break;
19705 }
19706
19707 if (constant_p)
19708 {
19709 init = build_vector_from_ctor (type, elts);
19710 break;
19711 }
19712 }
19713 /* FALLTHRU */
19714
19715 default:
19716 return NULL;
19717 }
19718
19719 rtl = expand_expr (init, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
19720
19721 /* If expand_expr returns a MEM, it wasn't immediate. */
19722 gcc_assert (!rtl || !MEM_P (rtl));
19723 }
19724
19725 return rtl;
19726 }
19727
19728 /* Generate RTL for the variable DECL to represent its location. */
19729
19730 static rtx
19731 rtl_for_decl_location (tree decl)
19732 {
19733 rtx rtl;
19734
19735 /* Here we have to decide where we are going to say the parameter "lives"
19736 (as far as the debugger is concerned). We only have a couple of
19737 choices. GCC provides us with DECL_RTL and with DECL_INCOMING_RTL.
19738
19739 DECL_RTL normally indicates where the parameter lives during most of the
19740 activation of the function. If optimization is enabled however, this
19741 could be either NULL or else a pseudo-reg. Both of those cases indicate
19742 that the parameter doesn't really live anywhere (as far as the code
19743 generation parts of GCC are concerned) during most of the function's
19744 activation. That will happen (for example) if the parameter is never
19745 referenced within the function.
19746
19747 We could just generate a location descriptor here for all non-NULL
19748 non-pseudo values of DECL_RTL and ignore all of the rest, but we can be
19749 a little nicer than that if we also consider DECL_INCOMING_RTL in cases
19750 where DECL_RTL is NULL or is a pseudo-reg.
19751
19752 Note however that we can only get away with using DECL_INCOMING_RTL as
19753 a backup substitute for DECL_RTL in certain limited cases. In cases
19754 where DECL_ARG_TYPE (decl) indicates the same type as TREE_TYPE (decl),
19755 we can be sure that the parameter was passed using the same type as it is
19756 declared to have within the function, and that its DECL_INCOMING_RTL
19757 points us to a place where a value of that type is passed.
19758
19759 In cases where DECL_ARG_TYPE (decl) and TREE_TYPE (decl) are different,
19760 we cannot (in general) use DECL_INCOMING_RTL as a substitute for DECL_RTL
19761 because in these cases DECL_INCOMING_RTL points us to a value of some
19762 type which is *different* from the type of the parameter itself. Thus,
19763 if we tried to use DECL_INCOMING_RTL to generate a location attribute in
19764 such cases, the debugger would end up (for example) trying to fetch a
19765 `float' from a place which actually contains the first part of a
19766 `double'. That would lead to really incorrect and confusing
19767 output at debug-time.
19768
19769 So, in general, we *do not* use DECL_INCOMING_RTL as a backup for DECL_RTL
19770 in cases where DECL_ARG_TYPE (decl) != TREE_TYPE (decl). There
19771 are a couple of exceptions however. On little-endian machines we can
19772 get away with using DECL_INCOMING_RTL even when DECL_ARG_TYPE (decl) is
19773 not the same as TREE_TYPE (decl), but only when DECL_ARG_TYPE (decl) is
19774 an integral type that is smaller than TREE_TYPE (decl). These cases arise
19775 when (on a little-endian machine) a non-prototyped function has a
19776 parameter declared to be of type `short' or `char'. In such cases,
19777 TREE_TYPE (decl) will be `short' or `char', DECL_ARG_TYPE (decl) will
19778 be `int', and DECL_INCOMING_RTL will point to the lowest-order byte of the
19779 passed `int' value. If the debugger then uses that address to fetch
19780 a `short' or a `char' (on a little-endian machine) the result will be
19781 the correct data, so we allow for such exceptional cases below.
19782
19783 Note that our goal here is to describe the place where the given formal
19784 parameter lives during most of the function's activation (i.e. between the
19785 end of the prologue and the start of the epilogue). We'll do that as best
19786 as we can. Note however that if the given formal parameter is modified
19787 sometime during the execution of the function, then a stack backtrace (at
19788 debug-time) will show the function as having been called with the *new*
19789 value rather than the value which was originally passed in. This happens
19790 rarely enough that it is not a major problem, but it *is* a problem, and
19791 I'd like to fix it.
19792
19793 A future version of dwarf2out.c may generate two additional attributes for
19794 any given DW_TAG_formal_parameter DIE which will describe the "passed
19795 type" and the "passed location" for the given formal parameter in addition
19796 to the attributes we now generate to indicate the "declared type" and the
19797 "active location" for each parameter. This additional set of attributes
19798 could be used by debuggers for stack backtraces. Separately, note that
19799 sometimes DECL_RTL can be NULL and DECL_INCOMING_RTL can be NULL also.
19800 This happens (for example) for inlined-instances of inline function formal
19801 parameters which are never referenced. This really shouldn't be
19802 happening. All PARM_DECL nodes should get valid non-NULL
19803 DECL_INCOMING_RTL values. FIXME. */
19804
19805 /* Use DECL_RTL as the "location" unless we find something better. */
19806 rtl = DECL_RTL_IF_SET (decl);
19807
19808 /* When generating abstract instances, ignore everything except
19809 constants, symbols living in memory, and symbols living in
19810 fixed registers. */
19811 if (! reload_completed)
19812 {
19813 if (rtl
19814 && (CONSTANT_P (rtl)
19815 || (MEM_P (rtl)
19816 && CONSTANT_P (XEXP (rtl, 0)))
19817 || (REG_P (rtl)
19818 && VAR_P (decl)
19819 && TREE_STATIC (decl))))
19820 {
19821 rtl = targetm.delegitimize_address (rtl);
19822 return rtl;
19823 }
19824 rtl = NULL_RTX;
19825 }
19826 else if (TREE_CODE (decl) == PARM_DECL)
19827 {
19828 if (rtl == NULL_RTX
19829 || is_pseudo_reg (rtl)
19830 || (MEM_P (rtl)
19831 && is_pseudo_reg (XEXP (rtl, 0))
19832 && DECL_INCOMING_RTL (decl)
19833 && MEM_P (DECL_INCOMING_RTL (decl))
19834 && GET_MODE (rtl) == GET_MODE (DECL_INCOMING_RTL (decl))))
19835 {
19836 tree declared_type = TREE_TYPE (decl);
19837 tree passed_type = DECL_ARG_TYPE (decl);
19838 machine_mode dmode = TYPE_MODE (declared_type);
19839 machine_mode pmode = TYPE_MODE (passed_type);
19840
19841 /* This decl represents a formal parameter which was optimized out.
19842 Note that DECL_INCOMING_RTL may be NULL in here, but we handle
19843 all cases where (rtl == NULL_RTX) just below. */
19844 if (dmode == pmode)
19845 rtl = DECL_INCOMING_RTL (decl);
19846 else if ((rtl == NULL_RTX || is_pseudo_reg (rtl))
19847 && SCALAR_INT_MODE_P (dmode)
19848 && known_le (GET_MODE_SIZE (dmode), GET_MODE_SIZE (pmode))
19849 && DECL_INCOMING_RTL (decl))
19850 {
19851 rtx inc = DECL_INCOMING_RTL (decl);
19852 if (REG_P (inc))
19853 rtl = inc;
19854 else if (MEM_P (inc))
19855 {
19856 if (BYTES_BIG_ENDIAN)
19857 rtl = adjust_address_nv (inc, dmode,
19858 GET_MODE_SIZE (pmode)
19859 - GET_MODE_SIZE (dmode));
19860 else
19861 rtl = inc;
19862 }
19863 }
19864 }
19865
19866 /* If the parm was passed in registers, but lives on the stack, then
19867 make a big endian correction if the mode of the type of the
19868 parameter is not the same as the mode of the rtl. */
19869 /* ??? This is the same series of checks that are made in dbxout.c before
19870 we reach the big endian correction code there. It isn't clear if all
19871 of these checks are necessary here, but keeping them all is the safe
19872 thing to do. */
19873 else if (MEM_P (rtl)
19874 && XEXP (rtl, 0) != const0_rtx
19875 && ! CONSTANT_P (XEXP (rtl, 0))
19876 /* Not passed in memory. */
19877 && !MEM_P (DECL_INCOMING_RTL (decl))
19878 /* Not passed by invisible reference. */
19879 && (!REG_P (XEXP (rtl, 0))
19880 || REGNO (XEXP (rtl, 0)) == HARD_FRAME_POINTER_REGNUM
19881 || REGNO (XEXP (rtl, 0)) == STACK_POINTER_REGNUM
19882 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
19883 || REGNO (XEXP (rtl, 0)) == ARG_POINTER_REGNUM
19884 #endif
19885 )
19886 /* Big endian correction check. */
19887 && BYTES_BIG_ENDIAN
19888 && TYPE_MODE (TREE_TYPE (decl)) != GET_MODE (rtl)
19889 && known_lt (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))),
19890 UNITS_PER_WORD))
19891 {
19892 machine_mode addr_mode = get_address_mode (rtl);
19893 poly_int64 offset = (UNITS_PER_WORD
19894 - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))));
19895
19896 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
19897 plus_constant (addr_mode, XEXP (rtl, 0), offset));
19898 }
19899 }
19900 else if (VAR_P (decl)
19901 && rtl
19902 && MEM_P (rtl)
19903 && GET_MODE (rtl) != TYPE_MODE (TREE_TYPE (decl)))
19904 {
19905 machine_mode addr_mode = get_address_mode (rtl);
19906 poly_int64 offset = byte_lowpart_offset (TYPE_MODE (TREE_TYPE (decl)),
19907 GET_MODE (rtl));
19908
19909 /* If a variable is declared "register" yet is smaller than
19910 a register, then if we store the variable to memory, it
19911 looks like we're storing a register-sized value, when in
19912 fact we are not. We need to adjust the offset of the
19913 storage location to reflect the actual value's bytes,
19914 else gdb will not be able to display it. */
19915 if (maybe_ne (offset, 0))
19916 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
19917 plus_constant (addr_mode, XEXP (rtl, 0), offset));
19918 }
19919
19920 /* A variable with no DECL_RTL but a DECL_INITIAL is a compile-time constant,
19921 and will have been substituted directly into all expressions that use it.
19922 C does not have such a concept, but C++ and other languages do. */
19923 if (!rtl && VAR_P (decl) && DECL_INITIAL (decl))
19924 rtl = rtl_for_decl_init (DECL_INITIAL (decl), TREE_TYPE (decl));
19925
19926 if (rtl)
19927 rtl = targetm.delegitimize_address (rtl);
19928
19929 /* If we don't look past the constant pool, we risk emitting a
19930 reference to a constant pool entry that isn't referenced from
19931 code, and thus is not emitted. */
19932 if (rtl)
19933 rtl = avoid_constant_pool_reference (rtl);
19934
19935 /* Try harder to get a rtl. If this symbol ends up not being emitted
19936 in the current CU, resolve_addr will remove the expression referencing
19937 it. */
19938 if (rtl == NULL_RTX
19939 && !(early_dwarf && (flag_generate_lto || flag_generate_offload))
19940 && VAR_P (decl)
19941 && !DECL_EXTERNAL (decl)
19942 && TREE_STATIC (decl)
19943 && DECL_NAME (decl)
19944 && !DECL_HARD_REGISTER (decl)
19945 && DECL_MODE (decl) != VOIDmode)
19946 {
19947 rtl = make_decl_rtl_for_debug (decl);
19948 if (!MEM_P (rtl)
19949 || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF
19950 || SYMBOL_REF_DECL (XEXP (rtl, 0)) != decl)
19951 rtl = NULL_RTX;
19952 }
19953
19954 return rtl;
19955 }
19956
19957 /* Check whether decl is a Fortran COMMON symbol. If not, NULL_TREE is
19958 returned. If so, the decl for the COMMON block is returned, and the
19959 value is the offset into the common block for the symbol. */
19960
19961 static tree
19962 fortran_common (tree decl, HOST_WIDE_INT *value)
19963 {
19964 tree val_expr, cvar;
19965 machine_mode mode;
19966 poly_int64 bitsize, bitpos;
19967 tree offset;
19968 HOST_WIDE_INT cbitpos;
19969 int unsignedp, reversep, volatilep = 0;
19970
19971 /* If the decl isn't a VAR_DECL, or if it isn't static, or if
19972 it does not have a value (the offset into the common area), or if it
19973 is thread local (as opposed to global) then it isn't common, and shouldn't
19974 be handled as such. */
19975 if (!VAR_P (decl)
19976 || !TREE_STATIC (decl)
19977 || !DECL_HAS_VALUE_EXPR_P (decl)
19978 || !is_fortran ())
19979 return NULL_TREE;
19980
19981 val_expr = DECL_VALUE_EXPR (decl);
19982 if (TREE_CODE (val_expr) != COMPONENT_REF)
19983 return NULL_TREE;
19984
19985 cvar = get_inner_reference (val_expr, &bitsize, &bitpos, &offset, &mode,
19986 &unsignedp, &reversep, &volatilep);
19987
19988 if (cvar == NULL_TREE
19989 || !VAR_P (cvar)
19990 || DECL_ARTIFICIAL (cvar)
19991 || !TREE_PUBLIC (cvar)
19992 /* We don't expect to have to cope with variable offsets,
19993 since at present all static data must have a constant size. */
19994 || !bitpos.is_constant (&cbitpos))
19995 return NULL_TREE;
19996
19997 *value = 0;
19998 if (offset != NULL)
19999 {
20000 if (!tree_fits_shwi_p (offset))
20001 return NULL_TREE;
20002 *value = tree_to_shwi (offset);
20003 }
20004 if (cbitpos != 0)
20005 *value += cbitpos / BITS_PER_UNIT;
20006
20007 return cvar;
20008 }
20009
20010 /* Generate *either* a DW_AT_location attribute or else a DW_AT_const_value
20011 data attribute for a variable or a parameter. We generate the
20012 DW_AT_const_value attribute only in those cases where the given variable
20013 or parameter does not have a true "location" either in memory or in a
20014 register. This can happen (for example) when a constant is passed as an
20015 actual argument in a call to an inline function. (It's possible that
20016 these things can crop up in other ways also.) Note that one type of
20017 constant value which can be passed into an inlined function is a constant
20018 pointer. This can happen for example if an actual argument in an inlined
20019 function call evaluates to a compile-time constant address.
20020
20021 CACHE_P is true if it is worth caching the location list for DECL,
20022 so that future calls can reuse it rather than regenerate it from scratch.
20023 This is true for BLOCK_NONLOCALIZED_VARS in inlined subroutines,
20024 since we will need to refer to them each time the function is inlined. */
20025
20026 static bool
20027 add_location_or_const_value_attribute (dw_die_ref die, tree decl, bool cache_p)
20028 {
20029 rtx rtl;
20030 dw_loc_list_ref list;
20031 var_loc_list *loc_list;
20032 cached_dw_loc_list *cache;
20033
20034 if (early_dwarf)
20035 return false;
20036
20037 if (TREE_CODE (decl) == ERROR_MARK)
20038 return false;
20039
20040 if (get_AT (die, DW_AT_location)
20041 || get_AT (die, DW_AT_const_value))
20042 return true;
20043
20044 gcc_assert (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL
20045 || TREE_CODE (decl) == RESULT_DECL);
20046
20047 /* Try to get some constant RTL for this decl, and use that as the value of
20048 the location. */
20049
20050 rtl = rtl_for_decl_location (decl);
20051 if (rtl && (CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20052 && add_const_value_attribute (die, rtl))
20053 return true;
20054
20055 /* See if we have single element location list that is equivalent to
20056 a constant value. That way we are better to use add_const_value_attribute
20057 rather than expanding constant value equivalent. */
20058 loc_list = lookup_decl_loc (decl);
20059 if (loc_list
20060 && loc_list->first
20061 && loc_list->first->next == NULL
20062 && NOTE_P (loc_list->first->loc)
20063 && NOTE_VAR_LOCATION (loc_list->first->loc)
20064 && NOTE_VAR_LOCATION_LOC (loc_list->first->loc))
20065 {
20066 struct var_loc_node *node;
20067
20068 node = loc_list->first;
20069 rtl = NOTE_VAR_LOCATION_LOC (node->loc);
20070 if (GET_CODE (rtl) == EXPR_LIST)
20071 rtl = XEXP (rtl, 0);
20072 if ((CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20073 && add_const_value_attribute (die, rtl))
20074 return true;
20075 }
20076 /* If this decl is from BLOCK_NONLOCALIZED_VARS, we might need its
20077 list several times. See if we've already cached the contents. */
20078 list = NULL;
20079 if (loc_list == NULL || cached_dw_loc_list_table == NULL)
20080 cache_p = false;
20081 if (cache_p)
20082 {
20083 cache = cached_dw_loc_list_table->find_with_hash (decl, DECL_UID (decl));
20084 if (cache)
20085 list = cache->loc_list;
20086 }
20087 if (list == NULL)
20088 {
20089 list = loc_list_from_tree (decl, decl_by_reference_p (decl) ? 0 : 2,
20090 NULL);
20091 /* It is usually worth caching this result if the decl is from
20092 BLOCK_NONLOCALIZED_VARS and if the list has at least two elements. */
20093 if (cache_p && list && list->dw_loc_next)
20094 {
20095 cached_dw_loc_list **slot
20096 = cached_dw_loc_list_table->find_slot_with_hash (decl,
20097 DECL_UID (decl),
20098 INSERT);
20099 cache = ggc_cleared_alloc<cached_dw_loc_list> ();
20100 cache->decl_id = DECL_UID (decl);
20101 cache->loc_list = list;
20102 *slot = cache;
20103 }
20104 }
20105 if (list)
20106 {
20107 add_AT_location_description (die, DW_AT_location, list);
20108 return true;
20109 }
20110 /* None of that worked, so it must not really have a location;
20111 try adding a constant value attribute from the DECL_INITIAL. */
20112 return tree_add_const_value_attribute_for_decl (die, decl);
20113 }
20114
20115 /* Helper function for tree_add_const_value_attribute. Natively encode
20116 initializer INIT into an array. Return true if successful. */
20117
20118 static bool
20119 native_encode_initializer (tree init, unsigned char *array, int size)
20120 {
20121 tree type;
20122
20123 if (init == NULL_TREE)
20124 return false;
20125
20126 STRIP_NOPS (init);
20127 switch (TREE_CODE (init))
20128 {
20129 case STRING_CST:
20130 type = TREE_TYPE (init);
20131 if (TREE_CODE (type) == ARRAY_TYPE)
20132 {
20133 tree enttype = TREE_TYPE (type);
20134 scalar_int_mode mode;
20135
20136 if (!is_int_mode (TYPE_MODE (enttype), &mode)
20137 || GET_MODE_SIZE (mode) != 1)
20138 return false;
20139 if (int_size_in_bytes (type) != size)
20140 return false;
20141 if (size > TREE_STRING_LENGTH (init))
20142 {
20143 memcpy (array, TREE_STRING_POINTER (init),
20144 TREE_STRING_LENGTH (init));
20145 memset (array + TREE_STRING_LENGTH (init),
20146 '\0', size - TREE_STRING_LENGTH (init));
20147 }
20148 else
20149 memcpy (array, TREE_STRING_POINTER (init), size);
20150 return true;
20151 }
20152 return false;
20153 case CONSTRUCTOR:
20154 type = TREE_TYPE (init);
20155 if (int_size_in_bytes (type) != size)
20156 return false;
20157 if (TREE_CODE (type) == ARRAY_TYPE)
20158 {
20159 HOST_WIDE_INT min_index;
20160 unsigned HOST_WIDE_INT cnt;
20161 int curpos = 0, fieldsize;
20162 constructor_elt *ce;
20163
20164 if (TYPE_DOMAIN (type) == NULL_TREE
20165 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
20166 return false;
20167
20168 fieldsize = int_size_in_bytes (TREE_TYPE (type));
20169 if (fieldsize <= 0)
20170 return false;
20171
20172 min_index = tree_to_shwi (TYPE_MIN_VALUE (TYPE_DOMAIN (type)));
20173 memset (array, '\0', size);
20174 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
20175 {
20176 tree val = ce->value;
20177 tree index = ce->index;
20178 int pos = curpos;
20179 if (index && TREE_CODE (index) == RANGE_EXPR)
20180 pos = (tree_to_shwi (TREE_OPERAND (index, 0)) - min_index)
20181 * fieldsize;
20182 else if (index)
20183 pos = (tree_to_shwi (index) - min_index) * fieldsize;
20184
20185 if (val)
20186 {
20187 STRIP_NOPS (val);
20188 if (!native_encode_initializer (val, array + pos, fieldsize))
20189 return false;
20190 }
20191 curpos = pos + fieldsize;
20192 if (index && TREE_CODE (index) == RANGE_EXPR)
20193 {
20194 int count = tree_to_shwi (TREE_OPERAND (index, 1))
20195 - tree_to_shwi (TREE_OPERAND (index, 0));
20196 while (count-- > 0)
20197 {
20198 if (val)
20199 memcpy (array + curpos, array + pos, fieldsize);
20200 curpos += fieldsize;
20201 }
20202 }
20203 gcc_assert (curpos <= size);
20204 }
20205 return true;
20206 }
20207 else if (TREE_CODE (type) == RECORD_TYPE
20208 || TREE_CODE (type) == UNION_TYPE)
20209 {
20210 tree field = NULL_TREE;
20211 unsigned HOST_WIDE_INT cnt;
20212 constructor_elt *ce;
20213
20214 if (int_size_in_bytes (type) != size)
20215 return false;
20216
20217 if (TREE_CODE (type) == RECORD_TYPE)
20218 field = TYPE_FIELDS (type);
20219
20220 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
20221 {
20222 tree val = ce->value;
20223 int pos, fieldsize;
20224
20225 if (ce->index != 0)
20226 field = ce->index;
20227
20228 if (val)
20229 STRIP_NOPS (val);
20230
20231 if (field == NULL_TREE || DECL_BIT_FIELD (field))
20232 return false;
20233
20234 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
20235 && TYPE_DOMAIN (TREE_TYPE (field))
20236 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
20237 return false;
20238 else if (DECL_SIZE_UNIT (field) == NULL_TREE
20239 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
20240 return false;
20241 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
20242 pos = int_byte_position (field);
20243 gcc_assert (pos + fieldsize <= size);
20244 if (val && fieldsize != 0
20245 && !native_encode_initializer (val, array + pos, fieldsize))
20246 return false;
20247 }
20248 return true;
20249 }
20250 return false;
20251 case VIEW_CONVERT_EXPR:
20252 case NON_LVALUE_EXPR:
20253 return native_encode_initializer (TREE_OPERAND (init, 0), array, size);
20254 default:
20255 return native_encode_expr (init, array, size) == size;
20256 }
20257 }
20258
20259 /* Attach a DW_AT_const_value attribute to DIE. The value of the
20260 attribute is the const value T. */
20261
20262 static bool
20263 tree_add_const_value_attribute (dw_die_ref die, tree t)
20264 {
20265 tree init;
20266 tree type = TREE_TYPE (t);
20267 rtx rtl;
20268
20269 if (!t || !TREE_TYPE (t) || TREE_TYPE (t) == error_mark_node)
20270 return false;
20271
20272 init = t;
20273 gcc_assert (!DECL_P (init));
20274
20275 if (TREE_CODE (init) == INTEGER_CST)
20276 {
20277 if (tree_fits_uhwi_p (init))
20278 {
20279 add_AT_unsigned (die, DW_AT_const_value, tree_to_uhwi (init));
20280 return true;
20281 }
20282 if (tree_fits_shwi_p (init))
20283 {
20284 add_AT_int (die, DW_AT_const_value, tree_to_shwi (init));
20285 return true;
20286 }
20287 }
20288 if (! early_dwarf)
20289 {
20290 rtl = rtl_for_decl_init (init, type);
20291 if (rtl)
20292 return add_const_value_attribute (die, rtl);
20293 }
20294 /* If the host and target are sane, try harder. */
20295 if (CHAR_BIT == 8 && BITS_PER_UNIT == 8
20296 && initializer_constant_valid_p (init, type))
20297 {
20298 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (init));
20299 if (size > 0 && (int) size == size)
20300 {
20301 unsigned char *array = ggc_cleared_vec_alloc<unsigned char> (size);
20302
20303 if (native_encode_initializer (init, array, size))
20304 {
20305 add_AT_vec (die, DW_AT_const_value, size, 1, array);
20306 return true;
20307 }
20308 ggc_free (array);
20309 }
20310 }
20311 return false;
20312 }
20313
20314 /* Attach a DW_AT_const_value attribute to VAR_DIE. The value of the
20315 attribute is the const value of T, where T is an integral constant
20316 variable with static storage duration
20317 (so it can't be a PARM_DECL or a RESULT_DECL). */
20318
20319 static bool
20320 tree_add_const_value_attribute_for_decl (dw_die_ref var_die, tree decl)
20321 {
20322
20323 if (!decl
20324 || (!VAR_P (decl) && TREE_CODE (decl) != CONST_DECL)
20325 || (VAR_P (decl) && !TREE_STATIC (decl)))
20326 return false;
20327
20328 if (TREE_READONLY (decl)
20329 && ! TREE_THIS_VOLATILE (decl)
20330 && DECL_INITIAL (decl))
20331 /* OK */;
20332 else
20333 return false;
20334
20335 /* Don't add DW_AT_const_value if abstract origin already has one. */
20336 if (get_AT (var_die, DW_AT_const_value))
20337 return false;
20338
20339 return tree_add_const_value_attribute (var_die, DECL_INITIAL (decl));
20340 }
20341
20342 /* Convert the CFI instructions for the current function into a
20343 location list. This is used for DW_AT_frame_base when we targeting
20344 a dwarf2 consumer that does not support the dwarf3
20345 DW_OP_call_frame_cfa. OFFSET is a constant to be added to all CFA
20346 expressions. */
20347
20348 static dw_loc_list_ref
20349 convert_cfa_to_fb_loc_list (HOST_WIDE_INT offset)
20350 {
20351 int ix;
20352 dw_fde_ref fde;
20353 dw_loc_list_ref list, *list_tail;
20354 dw_cfi_ref cfi;
20355 dw_cfa_location last_cfa, next_cfa;
20356 const char *start_label, *last_label, *section;
20357 dw_cfa_location remember;
20358
20359 fde = cfun->fde;
20360 gcc_assert (fde != NULL);
20361
20362 section = secname_for_decl (current_function_decl);
20363 list_tail = &list;
20364 list = NULL;
20365
20366 memset (&next_cfa, 0, sizeof (next_cfa));
20367 next_cfa.reg = INVALID_REGNUM;
20368 remember = next_cfa;
20369
20370 start_label = fde->dw_fde_begin;
20371
20372 /* ??? Bald assumption that the CIE opcode list does not contain
20373 advance opcodes. */
20374 FOR_EACH_VEC_ELT (*cie_cfi_vec, ix, cfi)
20375 lookup_cfa_1 (cfi, &next_cfa, &remember);
20376
20377 last_cfa = next_cfa;
20378 last_label = start_label;
20379
20380 if (fde->dw_fde_second_begin && fde->dw_fde_switch_cfi_index == 0)
20381 {
20382 /* If the first partition contained no CFI adjustments, the
20383 CIE opcodes apply to the whole first partition. */
20384 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20385 fde->dw_fde_begin, 0, fde->dw_fde_end, 0, section);
20386 list_tail =&(*list_tail)->dw_loc_next;
20387 start_label = last_label = fde->dw_fde_second_begin;
20388 }
20389
20390 FOR_EACH_VEC_SAFE_ELT (fde->dw_fde_cfi, ix, cfi)
20391 {
20392 switch (cfi->dw_cfi_opc)
20393 {
20394 case DW_CFA_set_loc:
20395 case DW_CFA_advance_loc1:
20396 case DW_CFA_advance_loc2:
20397 case DW_CFA_advance_loc4:
20398 if (!cfa_equal_p (&last_cfa, &next_cfa))
20399 {
20400 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20401 start_label, 0, last_label, 0, section);
20402
20403 list_tail = &(*list_tail)->dw_loc_next;
20404 last_cfa = next_cfa;
20405 start_label = last_label;
20406 }
20407 last_label = cfi->dw_cfi_oprnd1.dw_cfi_addr;
20408 break;
20409
20410 case DW_CFA_advance_loc:
20411 /* The encoding is complex enough that we should never emit this. */
20412 gcc_unreachable ();
20413
20414 default:
20415 lookup_cfa_1 (cfi, &next_cfa, &remember);
20416 break;
20417 }
20418 if (ix + 1 == fde->dw_fde_switch_cfi_index)
20419 {
20420 if (!cfa_equal_p (&last_cfa, &next_cfa))
20421 {
20422 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20423 start_label, 0, last_label, 0, section);
20424
20425 list_tail = &(*list_tail)->dw_loc_next;
20426 last_cfa = next_cfa;
20427 start_label = last_label;
20428 }
20429 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20430 start_label, 0, fde->dw_fde_end, 0, section);
20431 list_tail = &(*list_tail)->dw_loc_next;
20432 start_label = last_label = fde->dw_fde_second_begin;
20433 }
20434 }
20435
20436 if (!cfa_equal_p (&last_cfa, &next_cfa))
20437 {
20438 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20439 start_label, 0, last_label, 0, section);
20440 list_tail = &(*list_tail)->dw_loc_next;
20441 start_label = last_label;
20442 }
20443
20444 *list_tail = new_loc_list (build_cfa_loc (&next_cfa, offset),
20445 start_label, 0,
20446 fde->dw_fde_second_begin
20447 ? fde->dw_fde_second_end : fde->dw_fde_end, 0,
20448 section);
20449
20450 maybe_gen_llsym (list);
20451
20452 return list;
20453 }
20454
20455 /* Compute a displacement from the "steady-state frame pointer" to the
20456 frame base (often the same as the CFA), and store it in
20457 frame_pointer_fb_offset. OFFSET is added to the displacement
20458 before the latter is negated. */
20459
20460 static void
20461 compute_frame_pointer_to_fb_displacement (poly_int64 offset)
20462 {
20463 rtx reg, elim;
20464
20465 #ifdef FRAME_POINTER_CFA_OFFSET
20466 reg = frame_pointer_rtx;
20467 offset += FRAME_POINTER_CFA_OFFSET (current_function_decl);
20468 #else
20469 reg = arg_pointer_rtx;
20470 offset += ARG_POINTER_CFA_OFFSET (current_function_decl);
20471 #endif
20472
20473 elim = (ira_use_lra_p
20474 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
20475 : eliminate_regs (reg, VOIDmode, NULL_RTX));
20476 elim = strip_offset_and_add (elim, &offset);
20477
20478 frame_pointer_fb_offset = -offset;
20479
20480 /* ??? AVR doesn't set up valid eliminations when there is no stack frame
20481 in which to eliminate. This is because it's stack pointer isn't
20482 directly accessible as a register within the ISA. To work around
20483 this, assume that while we cannot provide a proper value for
20484 frame_pointer_fb_offset, we won't need one either. */
20485 frame_pointer_fb_offset_valid
20486 = ((SUPPORTS_STACK_ALIGNMENT
20487 && (elim == hard_frame_pointer_rtx
20488 || elim == stack_pointer_rtx))
20489 || elim == (frame_pointer_needed
20490 ? hard_frame_pointer_rtx
20491 : stack_pointer_rtx));
20492 }
20493
20494 /* Generate a DW_AT_name attribute given some string value to be included as
20495 the value of the attribute. */
20496
20497 static void
20498 add_name_attribute (dw_die_ref die, const char *name_string)
20499 {
20500 if (name_string != NULL && *name_string != 0)
20501 {
20502 if (demangle_name_func)
20503 name_string = (*demangle_name_func) (name_string);
20504
20505 add_AT_string (die, DW_AT_name, name_string);
20506 }
20507 }
20508
20509 /* Retrieve the descriptive type of TYPE, if any, make sure it has a
20510 DIE and attach a DW_AT_GNAT_descriptive_type attribute to the DIE
20511 of TYPE accordingly.
20512
20513 ??? This is a temporary measure until after we're able to generate
20514 regular DWARF for the complex Ada type system. */
20515
20516 static void
20517 add_gnat_descriptive_type_attribute (dw_die_ref die, tree type,
20518 dw_die_ref context_die)
20519 {
20520 tree dtype;
20521 dw_die_ref dtype_die;
20522
20523 if (!lang_hooks.types.descriptive_type)
20524 return;
20525
20526 dtype = lang_hooks.types.descriptive_type (type);
20527 if (!dtype)
20528 return;
20529
20530 dtype_die = lookup_type_die (dtype);
20531 if (!dtype_die)
20532 {
20533 gen_type_die (dtype, context_die);
20534 dtype_die = lookup_type_die (dtype);
20535 gcc_assert (dtype_die);
20536 }
20537
20538 add_AT_die_ref (die, DW_AT_GNAT_descriptive_type, dtype_die);
20539 }
20540
20541 /* Retrieve the comp_dir string suitable for use with DW_AT_comp_dir. */
20542
20543 static const char *
20544 comp_dir_string (void)
20545 {
20546 const char *wd;
20547 char *wd1;
20548 static const char *cached_wd = NULL;
20549
20550 if (cached_wd != NULL)
20551 return cached_wd;
20552
20553 wd = get_src_pwd ();
20554 if (wd == NULL)
20555 return NULL;
20556
20557 if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR)
20558 {
20559 int wdlen;
20560
20561 wdlen = strlen (wd);
20562 wd1 = ggc_vec_alloc<char> (wdlen + 2);
20563 strcpy (wd1, wd);
20564 wd1 [wdlen] = DIR_SEPARATOR;
20565 wd1 [wdlen + 1] = 0;
20566 wd = wd1;
20567 }
20568
20569 cached_wd = remap_debug_filename (wd);
20570 return cached_wd;
20571 }
20572
20573 /* Generate a DW_AT_comp_dir attribute for DIE. */
20574
20575 static void
20576 add_comp_dir_attribute (dw_die_ref die)
20577 {
20578 const char * wd = comp_dir_string ();
20579 if (wd != NULL)
20580 add_AT_string (die, DW_AT_comp_dir, wd);
20581 }
20582
20583 /* Given a tree node VALUE describing a scalar attribute ATTR (i.e. a bound, a
20584 pointer computation, ...), output a representation for that bound according
20585 to the accepted FORMS (see enum dw_scalar_form) and add it to DIE. See
20586 loc_list_from_tree for the meaning of CONTEXT. */
20587
20588 static void
20589 add_scalar_info (dw_die_ref die, enum dwarf_attribute attr, tree value,
20590 int forms, struct loc_descr_context *context)
20591 {
20592 dw_die_ref context_die, decl_die;
20593 dw_loc_list_ref list;
20594 bool strip_conversions = true;
20595 bool placeholder_seen = false;
20596
20597 while (strip_conversions)
20598 switch (TREE_CODE (value))
20599 {
20600 case ERROR_MARK:
20601 case SAVE_EXPR:
20602 return;
20603
20604 CASE_CONVERT:
20605 case VIEW_CONVERT_EXPR:
20606 value = TREE_OPERAND (value, 0);
20607 break;
20608
20609 default:
20610 strip_conversions = false;
20611 break;
20612 }
20613
20614 /* If possible and permitted, output the attribute as a constant. */
20615 if ((forms & dw_scalar_form_constant) != 0
20616 && TREE_CODE (value) == INTEGER_CST)
20617 {
20618 unsigned int prec = simple_type_size_in_bits (TREE_TYPE (value));
20619
20620 /* If HOST_WIDE_INT is big enough then represent the bound as
20621 a constant value. We need to choose a form based on
20622 whether the type is signed or unsigned. We cannot just
20623 call add_AT_unsigned if the value itself is positive
20624 (add_AT_unsigned might add the unsigned value encoded as
20625 DW_FORM_data[1248]). Some DWARF consumers will lookup the
20626 bounds type and then sign extend any unsigned values found
20627 for signed types. This is needed only for
20628 DW_AT_{lower,upper}_bound, since for most other attributes,
20629 consumers will treat DW_FORM_data[1248] as unsigned values,
20630 regardless of the underlying type. */
20631 if (prec <= HOST_BITS_PER_WIDE_INT
20632 || tree_fits_uhwi_p (value))
20633 {
20634 if (TYPE_UNSIGNED (TREE_TYPE (value)))
20635 add_AT_unsigned (die, attr, TREE_INT_CST_LOW (value));
20636 else
20637 add_AT_int (die, attr, TREE_INT_CST_LOW (value));
20638 }
20639 else
20640 /* Otherwise represent the bound as an unsigned value with
20641 the precision of its type. The precision and signedness
20642 of the type will be necessary to re-interpret it
20643 unambiguously. */
20644 add_AT_wide (die, attr, wi::to_wide (value));
20645 return;
20646 }
20647
20648 /* Otherwise, if it's possible and permitted too, output a reference to
20649 another DIE. */
20650 if ((forms & dw_scalar_form_reference) != 0)
20651 {
20652 tree decl = NULL_TREE;
20653
20654 /* Some type attributes reference an outer type. For instance, the upper
20655 bound of an array may reference an embedding record (this happens in
20656 Ada). */
20657 if (TREE_CODE (value) == COMPONENT_REF
20658 && TREE_CODE (TREE_OPERAND (value, 0)) == PLACEHOLDER_EXPR
20659 && TREE_CODE (TREE_OPERAND (value, 1)) == FIELD_DECL)
20660 decl = TREE_OPERAND (value, 1);
20661
20662 else if (VAR_P (value)
20663 || TREE_CODE (value) == PARM_DECL
20664 || TREE_CODE (value) == RESULT_DECL)
20665 decl = value;
20666
20667 if (decl != NULL_TREE)
20668 {
20669 dw_die_ref decl_die = lookup_decl_die (decl);
20670
20671 /* ??? Can this happen, or should the variable have been bound
20672 first? Probably it can, since I imagine that we try to create
20673 the types of parameters in the order in which they exist in
20674 the list, and won't have created a forward reference to a
20675 later parameter. */
20676 if (decl_die != NULL)
20677 {
20678 add_AT_die_ref (die, attr, decl_die);
20679 return;
20680 }
20681 }
20682 }
20683
20684 /* Last chance: try to create a stack operation procedure to evaluate the
20685 value. Do nothing if even that is not possible or permitted. */
20686 if ((forms & dw_scalar_form_exprloc) == 0)
20687 return;
20688
20689 list = loc_list_from_tree (value, 2, context);
20690 if (context && context->placeholder_arg)
20691 {
20692 placeholder_seen = context->placeholder_seen;
20693 context->placeholder_seen = false;
20694 }
20695 if (list == NULL || single_element_loc_list_p (list))
20696 {
20697 /* If this attribute is not a reference nor constant, it is
20698 a DWARF expression rather than location description. For that
20699 loc_list_from_tree (value, 0, &context) is needed. */
20700 dw_loc_list_ref list2 = loc_list_from_tree (value, 0, context);
20701 if (list2 && single_element_loc_list_p (list2))
20702 {
20703 if (placeholder_seen)
20704 {
20705 struct dwarf_procedure_info dpi;
20706 dpi.fndecl = NULL_TREE;
20707 dpi.args_count = 1;
20708 if (!resolve_args_picking (list2->expr, 1, &dpi))
20709 return;
20710 }
20711 add_AT_loc (die, attr, list2->expr);
20712 return;
20713 }
20714 }
20715
20716 /* If that failed to give a single element location list, fall back to
20717 outputting this as a reference... still if permitted. */
20718 if (list == NULL
20719 || (forms & dw_scalar_form_reference) == 0
20720 || placeholder_seen)
20721 return;
20722
20723 if (current_function_decl == 0)
20724 context_die = comp_unit_die ();
20725 else
20726 context_die = lookup_decl_die (current_function_decl);
20727
20728 decl_die = new_die (DW_TAG_variable, context_die, value);
20729 add_AT_flag (decl_die, DW_AT_artificial, 1);
20730 add_type_attribute (decl_die, TREE_TYPE (value), TYPE_QUAL_CONST, false,
20731 context_die);
20732 add_AT_location_description (decl_die, DW_AT_location, list);
20733 add_AT_die_ref (die, attr, decl_die);
20734 }
20735
20736 /* Return the default for DW_AT_lower_bound, or -1 if there is not any
20737 default. */
20738
20739 static int
20740 lower_bound_default (void)
20741 {
20742 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
20743 {
20744 case DW_LANG_C:
20745 case DW_LANG_C89:
20746 case DW_LANG_C99:
20747 case DW_LANG_C11:
20748 case DW_LANG_C_plus_plus:
20749 case DW_LANG_C_plus_plus_11:
20750 case DW_LANG_C_plus_plus_14:
20751 case DW_LANG_ObjC:
20752 case DW_LANG_ObjC_plus_plus:
20753 return 0;
20754 case DW_LANG_Fortran77:
20755 case DW_LANG_Fortran90:
20756 case DW_LANG_Fortran95:
20757 case DW_LANG_Fortran03:
20758 case DW_LANG_Fortran08:
20759 return 1;
20760 case DW_LANG_UPC:
20761 case DW_LANG_D:
20762 case DW_LANG_Python:
20763 return dwarf_version >= 4 ? 0 : -1;
20764 case DW_LANG_Ada95:
20765 case DW_LANG_Ada83:
20766 case DW_LANG_Cobol74:
20767 case DW_LANG_Cobol85:
20768 case DW_LANG_Modula2:
20769 case DW_LANG_PLI:
20770 return dwarf_version >= 4 ? 1 : -1;
20771 default:
20772 return -1;
20773 }
20774 }
20775
20776 /* Given a tree node describing an array bound (either lower or upper) output
20777 a representation for that bound. */
20778
20779 static void
20780 add_bound_info (dw_die_ref subrange_die, enum dwarf_attribute bound_attr,
20781 tree bound, struct loc_descr_context *context)
20782 {
20783 int dflt;
20784
20785 while (1)
20786 switch (TREE_CODE (bound))
20787 {
20788 /* Strip all conversions. */
20789 CASE_CONVERT:
20790 case VIEW_CONVERT_EXPR:
20791 bound = TREE_OPERAND (bound, 0);
20792 break;
20793
20794 /* All fixed-bounds are represented by INTEGER_CST nodes. Lower bounds
20795 are even omitted when they are the default. */
20796 case INTEGER_CST:
20797 /* If the value for this bound is the default one, we can even omit the
20798 attribute. */
20799 if (bound_attr == DW_AT_lower_bound
20800 && tree_fits_shwi_p (bound)
20801 && (dflt = lower_bound_default ()) != -1
20802 && tree_to_shwi (bound) == dflt)
20803 return;
20804
20805 /* FALLTHRU */
20806
20807 default:
20808 /* Because of the complex interaction there can be with other GNAT
20809 encodings, GDB isn't ready yet to handle proper DWARF description
20810 for self-referencial subrange bounds: let GNAT encodings do the
20811 magic in such a case. */
20812 if (is_ada ()
20813 && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL
20814 && contains_placeholder_p (bound))
20815 return;
20816
20817 add_scalar_info (subrange_die, bound_attr, bound,
20818 dw_scalar_form_constant
20819 | dw_scalar_form_exprloc
20820 | dw_scalar_form_reference,
20821 context);
20822 return;
20823 }
20824 }
20825
20826 /* Add subscript info to TYPE_DIE, describing an array TYPE, collapsing
20827 possibly nested array subscripts in a flat sequence if COLLAPSE_P is true.
20828 Note that the block of subscript information for an array type also
20829 includes information about the element type of the given array type.
20830
20831 This function reuses previously set type and bound information if
20832 available. */
20833
20834 static void
20835 add_subscript_info (dw_die_ref type_die, tree type, bool collapse_p)
20836 {
20837 unsigned dimension_number;
20838 tree lower, upper;
20839 dw_die_ref child = type_die->die_child;
20840
20841 for (dimension_number = 0;
20842 TREE_CODE (type) == ARRAY_TYPE && (dimension_number == 0 || collapse_p);
20843 type = TREE_TYPE (type), dimension_number++)
20844 {
20845 tree domain = TYPE_DOMAIN (type);
20846
20847 if (TYPE_STRING_FLAG (type) && is_fortran () && dimension_number > 0)
20848 break;
20849
20850 /* Arrays come in three flavors: Unspecified bounds, fixed bounds,
20851 and (in GNU C only) variable bounds. Handle all three forms
20852 here. */
20853
20854 /* Find and reuse a previously generated DW_TAG_subrange_type if
20855 available.
20856
20857 For multi-dimensional arrays, as we iterate through the
20858 various dimensions in the enclosing for loop above, we also
20859 iterate through the DIE children and pick at each
20860 DW_TAG_subrange_type previously generated (if available).
20861 Each child DW_TAG_subrange_type DIE describes the range of
20862 the current dimension. At this point we should have as many
20863 DW_TAG_subrange_type's as we have dimensions in the
20864 array. */
20865 dw_die_ref subrange_die = NULL;
20866 if (child)
20867 while (1)
20868 {
20869 child = child->die_sib;
20870 if (child->die_tag == DW_TAG_subrange_type)
20871 subrange_die = child;
20872 if (child == type_die->die_child)
20873 {
20874 /* If we wrapped around, stop looking next time. */
20875 child = NULL;
20876 break;
20877 }
20878 if (child->die_tag == DW_TAG_subrange_type)
20879 break;
20880 }
20881 if (!subrange_die)
20882 subrange_die = new_die (DW_TAG_subrange_type, type_die, NULL);
20883
20884 if (domain)
20885 {
20886 /* We have an array type with specified bounds. */
20887 lower = TYPE_MIN_VALUE (domain);
20888 upper = TYPE_MAX_VALUE (domain);
20889
20890 /* Define the index type. */
20891 if (TREE_TYPE (domain)
20892 && !get_AT (subrange_die, DW_AT_type))
20893 {
20894 /* ??? This is probably an Ada unnamed subrange type. Ignore the
20895 TREE_TYPE field. We can't emit debug info for this
20896 because it is an unnamed integral type. */
20897 if (TREE_CODE (domain) == INTEGER_TYPE
20898 && TYPE_NAME (domain) == NULL_TREE
20899 && TREE_CODE (TREE_TYPE (domain)) == INTEGER_TYPE
20900 && TYPE_NAME (TREE_TYPE (domain)) == NULL_TREE)
20901 ;
20902 else
20903 add_type_attribute (subrange_die, TREE_TYPE (domain),
20904 TYPE_UNQUALIFIED, false, type_die);
20905 }
20906
20907 /* ??? If upper is NULL, the array has unspecified length,
20908 but it does have a lower bound. This happens with Fortran
20909 dimension arr(N:*)
20910 Since the debugger is definitely going to need to know N
20911 to produce useful results, go ahead and output the lower
20912 bound solo, and hope the debugger can cope. */
20913
20914 if (!get_AT (subrange_die, DW_AT_lower_bound))
20915 add_bound_info (subrange_die, DW_AT_lower_bound, lower, NULL);
20916 if (upper && !get_AT (subrange_die, DW_AT_upper_bound))
20917 add_bound_info (subrange_die, DW_AT_upper_bound, upper, NULL);
20918 }
20919
20920 /* Otherwise we have an array type with an unspecified length. The
20921 DWARF-2 spec does not say how to handle this; let's just leave out the
20922 bounds. */
20923 }
20924 }
20925
20926 /* Add a DW_AT_byte_size attribute to DIE with TREE_NODE's size. */
20927
20928 static void
20929 add_byte_size_attribute (dw_die_ref die, tree tree_node)
20930 {
20931 dw_die_ref decl_die;
20932 HOST_WIDE_INT size;
20933 dw_loc_descr_ref size_expr = NULL;
20934
20935 switch (TREE_CODE (tree_node))
20936 {
20937 case ERROR_MARK:
20938 size = 0;
20939 break;
20940 case ENUMERAL_TYPE:
20941 case RECORD_TYPE:
20942 case UNION_TYPE:
20943 case QUAL_UNION_TYPE:
20944 if (TREE_CODE (TYPE_SIZE_UNIT (tree_node)) == VAR_DECL
20945 && (decl_die = lookup_decl_die (TYPE_SIZE_UNIT (tree_node))))
20946 {
20947 add_AT_die_ref (die, DW_AT_byte_size, decl_die);
20948 return;
20949 }
20950 size_expr = type_byte_size (tree_node, &size);
20951 break;
20952 case FIELD_DECL:
20953 /* For a data member of a struct or union, the DW_AT_byte_size is
20954 generally given as the number of bytes normally allocated for an
20955 object of the *declared* type of the member itself. This is true
20956 even for bit-fields. */
20957 size = int_size_in_bytes (field_type (tree_node));
20958 break;
20959 default:
20960 gcc_unreachable ();
20961 }
20962
20963 /* Support for dynamically-sized objects was introduced by DWARFv3.
20964 At the moment, GDB does not handle variable byte sizes very well,
20965 though. */
20966 if ((dwarf_version >= 3 || !dwarf_strict)
20967 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL
20968 && size_expr != NULL)
20969 add_AT_loc (die, DW_AT_byte_size, size_expr);
20970
20971 /* Note that `size' might be -1 when we get to this point. If it is, that
20972 indicates that the byte size of the entity in question is variable and
20973 that we could not generate a DWARF expression that computes it. */
20974 if (size >= 0)
20975 add_AT_unsigned (die, DW_AT_byte_size, size);
20976 }
20977
20978 /* Add a DW_AT_alignment attribute to DIE with TREE_NODE's non-default
20979 alignment. */
20980
20981 static void
20982 add_alignment_attribute (dw_die_ref die, tree tree_node)
20983 {
20984 if (dwarf_version < 5 && dwarf_strict)
20985 return;
20986
20987 unsigned align;
20988
20989 if (DECL_P (tree_node))
20990 {
20991 if (!DECL_USER_ALIGN (tree_node))
20992 return;
20993
20994 align = DECL_ALIGN_UNIT (tree_node);
20995 }
20996 else if (TYPE_P (tree_node))
20997 {
20998 if (!TYPE_USER_ALIGN (tree_node))
20999 return;
21000
21001 align = TYPE_ALIGN_UNIT (tree_node);
21002 }
21003 else
21004 gcc_unreachable ();
21005
21006 add_AT_unsigned (die, DW_AT_alignment, align);
21007 }
21008
21009 /* For a FIELD_DECL node which represents a bit-field, output an attribute
21010 which specifies the distance in bits from the highest order bit of the
21011 "containing object" for the bit-field to the highest order bit of the
21012 bit-field itself.
21013
21014 For any given bit-field, the "containing object" is a hypothetical object
21015 (of some integral or enum type) within which the given bit-field lives. The
21016 type of this hypothetical "containing object" is always the same as the
21017 declared type of the individual bit-field itself. The determination of the
21018 exact location of the "containing object" for a bit-field is rather
21019 complicated. It's handled by the `field_byte_offset' function (above).
21020
21021 CTX is required: see the comment for VLR_CONTEXT.
21022
21023 Note that it is the size (in bytes) of the hypothetical "containing object"
21024 which will be given in the DW_AT_byte_size attribute for this bit-field.
21025 (See `byte_size_attribute' above). */
21026
21027 static inline void
21028 add_bit_offset_attribute (dw_die_ref die, tree decl, struct vlr_context *ctx)
21029 {
21030 HOST_WIDE_INT object_offset_in_bytes;
21031 tree original_type = DECL_BIT_FIELD_TYPE (decl);
21032 HOST_WIDE_INT bitpos_int;
21033 HOST_WIDE_INT highest_order_object_bit_offset;
21034 HOST_WIDE_INT highest_order_field_bit_offset;
21035 HOST_WIDE_INT bit_offset;
21036
21037 field_byte_offset (decl, ctx, &object_offset_in_bytes);
21038
21039 /* Must be a field and a bit field. */
21040 gcc_assert (original_type && TREE_CODE (decl) == FIELD_DECL);
21041
21042 /* We can't yet handle bit-fields whose offsets are variable, so if we
21043 encounter such things, just return without generating any attribute
21044 whatsoever. Likewise for variable or too large size. */
21045 if (! tree_fits_shwi_p (bit_position (decl))
21046 || ! tree_fits_uhwi_p (DECL_SIZE (decl)))
21047 return;
21048
21049 bitpos_int = int_bit_position (decl);
21050
21051 /* Note that the bit offset is always the distance (in bits) from the
21052 highest-order bit of the "containing object" to the highest-order bit of
21053 the bit-field itself. Since the "high-order end" of any object or field
21054 is different on big-endian and little-endian machines, the computation
21055 below must take account of these differences. */
21056 highest_order_object_bit_offset = object_offset_in_bytes * BITS_PER_UNIT;
21057 highest_order_field_bit_offset = bitpos_int;
21058
21059 if (! BYTES_BIG_ENDIAN)
21060 {
21061 highest_order_field_bit_offset += tree_to_shwi (DECL_SIZE (decl));
21062 highest_order_object_bit_offset +=
21063 simple_type_size_in_bits (original_type);
21064 }
21065
21066 bit_offset
21067 = (! BYTES_BIG_ENDIAN
21068 ? highest_order_object_bit_offset - highest_order_field_bit_offset
21069 : highest_order_field_bit_offset - highest_order_object_bit_offset);
21070
21071 if (bit_offset < 0)
21072 add_AT_int (die, DW_AT_bit_offset, bit_offset);
21073 else
21074 add_AT_unsigned (die, DW_AT_bit_offset, (unsigned HOST_WIDE_INT) bit_offset);
21075 }
21076
21077 /* For a FIELD_DECL node which represents a bit field, output an attribute
21078 which specifies the length in bits of the given field. */
21079
21080 static inline void
21081 add_bit_size_attribute (dw_die_ref die, tree decl)
21082 {
21083 /* Must be a field and a bit field. */
21084 gcc_assert (TREE_CODE (decl) == FIELD_DECL
21085 && DECL_BIT_FIELD_TYPE (decl));
21086
21087 if (tree_fits_uhwi_p (DECL_SIZE (decl)))
21088 add_AT_unsigned (die, DW_AT_bit_size, tree_to_uhwi (DECL_SIZE (decl)));
21089 }
21090
21091 /* If the compiled language is ANSI C, then add a 'prototyped'
21092 attribute, if arg types are given for the parameters of a function. */
21093
21094 static inline void
21095 add_prototyped_attribute (dw_die_ref die, tree func_type)
21096 {
21097 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
21098 {
21099 case DW_LANG_C:
21100 case DW_LANG_C89:
21101 case DW_LANG_C99:
21102 case DW_LANG_C11:
21103 case DW_LANG_ObjC:
21104 if (prototype_p (func_type))
21105 add_AT_flag (die, DW_AT_prototyped, 1);
21106 break;
21107 default:
21108 break;
21109 }
21110 }
21111
21112 /* Add an 'abstract_origin' attribute below a given DIE. The DIE is found
21113 by looking in the type declaration, the object declaration equate table or
21114 the block mapping. */
21115
21116 static inline dw_die_ref
21117 add_abstract_origin_attribute (dw_die_ref die, tree origin)
21118 {
21119 dw_die_ref origin_die = NULL;
21120
21121 if (DECL_P (origin))
21122 {
21123 dw_die_ref c;
21124 origin_die = lookup_decl_die (origin);
21125 /* "Unwrap" the decls DIE which we put in the imported unit context.
21126 We are looking for the abstract copy here. */
21127 if (in_lto_p
21128 && origin_die
21129 && (c = get_AT_ref (origin_die, DW_AT_abstract_origin))
21130 /* ??? Identify this better. */
21131 && c->with_offset)
21132 origin_die = c;
21133 }
21134 else if (TYPE_P (origin))
21135 origin_die = lookup_type_die (origin);
21136 else if (TREE_CODE (origin) == BLOCK)
21137 origin_die = BLOCK_DIE (origin);
21138
21139 /* XXX: Functions that are never lowered don't always have correct block
21140 trees (in the case of java, they simply have no block tree, in some other
21141 languages). For these functions, there is nothing we can really do to
21142 output correct debug info for inlined functions in all cases. Rather
21143 than die, we'll just produce deficient debug info now, in that we will
21144 have variables without a proper abstract origin. In the future, when all
21145 functions are lowered, we should re-add a gcc_assert (origin_die)
21146 here. */
21147
21148 if (origin_die)
21149 add_AT_die_ref (die, DW_AT_abstract_origin, origin_die);
21150 return origin_die;
21151 }
21152
21153 /* We do not currently support the pure_virtual attribute. */
21154
21155 static inline void
21156 add_pure_or_virtual_attribute (dw_die_ref die, tree func_decl)
21157 {
21158 if (DECL_VINDEX (func_decl))
21159 {
21160 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
21161
21162 if (tree_fits_shwi_p (DECL_VINDEX (func_decl)))
21163 add_AT_loc (die, DW_AT_vtable_elem_location,
21164 new_loc_descr (DW_OP_constu,
21165 tree_to_shwi (DECL_VINDEX (func_decl)),
21166 0));
21167
21168 /* GNU extension: Record what type this method came from originally. */
21169 if (debug_info_level > DINFO_LEVEL_TERSE
21170 && DECL_CONTEXT (func_decl))
21171 add_AT_die_ref (die, DW_AT_containing_type,
21172 lookup_type_die (DECL_CONTEXT (func_decl)));
21173 }
21174 }
21175 \f
21176 /* Add a DW_AT_linkage_name or DW_AT_MIPS_linkage_name attribute for the
21177 given decl. This used to be a vendor extension until after DWARF 4
21178 standardized it. */
21179
21180 static void
21181 add_linkage_attr (dw_die_ref die, tree decl)
21182 {
21183 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
21184
21185 /* Mimic what assemble_name_raw does with a leading '*'. */
21186 if (name[0] == '*')
21187 name = &name[1];
21188
21189 if (dwarf_version >= 4)
21190 add_AT_string (die, DW_AT_linkage_name, name);
21191 else
21192 add_AT_string (die, DW_AT_MIPS_linkage_name, name);
21193 }
21194
21195 /* Add source coordinate attributes for the given decl. */
21196
21197 static void
21198 add_src_coords_attributes (dw_die_ref die, tree decl)
21199 {
21200 expanded_location s;
21201
21202 if (LOCATION_LOCUS (DECL_SOURCE_LOCATION (decl)) == UNKNOWN_LOCATION)
21203 return;
21204 s = expand_location (DECL_SOURCE_LOCATION (decl));
21205 add_AT_file (die, DW_AT_decl_file, lookup_filename (s.file));
21206 add_AT_unsigned (die, DW_AT_decl_line, s.line);
21207 if (debug_column_info && s.column)
21208 add_AT_unsigned (die, DW_AT_decl_column, s.column);
21209 }
21210
21211 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl. */
21212
21213 static void
21214 add_linkage_name_raw (dw_die_ref die, tree decl)
21215 {
21216 /* Defer until we have an assembler name set. */
21217 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
21218 {
21219 limbo_die_node *asm_name;
21220
21221 asm_name = ggc_cleared_alloc<limbo_die_node> ();
21222 asm_name->die = die;
21223 asm_name->created_for = decl;
21224 asm_name->next = deferred_asm_name;
21225 deferred_asm_name = asm_name;
21226 }
21227 else if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl))
21228 add_linkage_attr (die, decl);
21229 }
21230
21231 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl if desired. */
21232
21233 static void
21234 add_linkage_name (dw_die_ref die, tree decl)
21235 {
21236 if (debug_info_level > DINFO_LEVEL_NONE
21237 && VAR_OR_FUNCTION_DECL_P (decl)
21238 && TREE_PUBLIC (decl)
21239 && !(VAR_P (decl) && DECL_REGISTER (decl))
21240 && die->die_tag != DW_TAG_member)
21241 add_linkage_name_raw (die, decl);
21242 }
21243
21244 /* Add a DW_AT_name attribute and source coordinate attribute for the
21245 given decl, but only if it actually has a name. */
21246
21247 static void
21248 add_name_and_src_coords_attributes (dw_die_ref die, tree decl,
21249 bool no_linkage_name)
21250 {
21251 tree decl_name;
21252
21253 decl_name = DECL_NAME (decl);
21254 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
21255 {
21256 const char *name = dwarf2_name (decl, 0);
21257 if (name)
21258 add_name_attribute (die, name);
21259 if (! DECL_ARTIFICIAL (decl))
21260 add_src_coords_attributes (die, decl);
21261
21262 if (!no_linkage_name)
21263 add_linkage_name (die, decl);
21264 }
21265
21266 #ifdef VMS_DEBUGGING_INFO
21267 /* Get the function's name, as described by its RTL. This may be different
21268 from the DECL_NAME name used in the source file. */
21269 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_ASM_WRITTEN (decl))
21270 {
21271 add_AT_addr (die, DW_AT_VMS_rtnbeg_pd_address,
21272 XEXP (DECL_RTL (decl), 0), false);
21273 vec_safe_push (used_rtx_array, XEXP (DECL_RTL (decl), 0));
21274 }
21275 #endif /* VMS_DEBUGGING_INFO */
21276 }
21277
21278 /* Add VALUE as a DW_AT_discr_value attribute to DIE. */
21279
21280 static void
21281 add_discr_value (dw_die_ref die, dw_discr_value *value)
21282 {
21283 dw_attr_node attr;
21284
21285 attr.dw_attr = DW_AT_discr_value;
21286 attr.dw_attr_val.val_class = dw_val_class_discr_value;
21287 attr.dw_attr_val.val_entry = NULL;
21288 attr.dw_attr_val.v.val_discr_value.pos = value->pos;
21289 if (value->pos)
21290 attr.dw_attr_val.v.val_discr_value.v.uval = value->v.uval;
21291 else
21292 attr.dw_attr_val.v.val_discr_value.v.sval = value->v.sval;
21293 add_dwarf_attr (die, &attr);
21294 }
21295
21296 /* Add DISCR_LIST as a DW_AT_discr_list to DIE. */
21297
21298 static void
21299 add_discr_list (dw_die_ref die, dw_discr_list_ref discr_list)
21300 {
21301 dw_attr_node attr;
21302
21303 attr.dw_attr = DW_AT_discr_list;
21304 attr.dw_attr_val.val_class = dw_val_class_discr_list;
21305 attr.dw_attr_val.val_entry = NULL;
21306 attr.dw_attr_val.v.val_discr_list = discr_list;
21307 add_dwarf_attr (die, &attr);
21308 }
21309
21310 static inline dw_discr_list_ref
21311 AT_discr_list (dw_attr_node *attr)
21312 {
21313 return attr->dw_attr_val.v.val_discr_list;
21314 }
21315
21316 #ifdef VMS_DEBUGGING_INFO
21317 /* Output the debug main pointer die for VMS */
21318
21319 void
21320 dwarf2out_vms_debug_main_pointer (void)
21321 {
21322 char label[MAX_ARTIFICIAL_LABEL_BYTES];
21323 dw_die_ref die;
21324
21325 /* Allocate the VMS debug main subprogram die. */
21326 die = new_die_raw (DW_TAG_subprogram);
21327 add_name_attribute (die, VMS_DEBUG_MAIN_POINTER);
21328 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
21329 current_function_funcdef_no);
21330 add_AT_lbl_id (die, DW_AT_entry_pc, label);
21331
21332 /* Make it the first child of comp_unit_die (). */
21333 die->die_parent = comp_unit_die ();
21334 if (comp_unit_die ()->die_child)
21335 {
21336 die->die_sib = comp_unit_die ()->die_child->die_sib;
21337 comp_unit_die ()->die_child->die_sib = die;
21338 }
21339 else
21340 {
21341 die->die_sib = die;
21342 comp_unit_die ()->die_child = die;
21343 }
21344 }
21345 #endif /* VMS_DEBUGGING_INFO */
21346
21347 /* Push a new declaration scope. */
21348
21349 static void
21350 push_decl_scope (tree scope)
21351 {
21352 vec_safe_push (decl_scope_table, scope);
21353 }
21354
21355 /* Pop a declaration scope. */
21356
21357 static inline void
21358 pop_decl_scope (void)
21359 {
21360 decl_scope_table->pop ();
21361 }
21362
21363 /* walk_tree helper function for uses_local_type, below. */
21364
21365 static tree
21366 uses_local_type_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
21367 {
21368 if (!TYPE_P (*tp))
21369 *walk_subtrees = 0;
21370 else
21371 {
21372 tree name = TYPE_NAME (*tp);
21373 if (name && DECL_P (name) && decl_function_context (name))
21374 return *tp;
21375 }
21376 return NULL_TREE;
21377 }
21378
21379 /* If TYPE involves a function-local type (including a local typedef to a
21380 non-local type), returns that type; otherwise returns NULL_TREE. */
21381
21382 static tree
21383 uses_local_type (tree type)
21384 {
21385 tree used = walk_tree_without_duplicates (&type, uses_local_type_r, NULL);
21386 return used;
21387 }
21388
21389 /* Return the DIE for the scope that immediately contains this type.
21390 Non-named types that do not involve a function-local type get global
21391 scope. Named types nested in namespaces or other types get their
21392 containing scope. All other types (i.e. function-local named types) get
21393 the current active scope. */
21394
21395 static dw_die_ref
21396 scope_die_for (tree t, dw_die_ref context_die)
21397 {
21398 dw_die_ref scope_die = NULL;
21399 tree containing_scope;
21400
21401 /* Non-types always go in the current scope. */
21402 gcc_assert (TYPE_P (t));
21403
21404 /* Use the scope of the typedef, rather than the scope of the type
21405 it refers to. */
21406 if (TYPE_NAME (t) && DECL_P (TYPE_NAME (t)))
21407 containing_scope = DECL_CONTEXT (TYPE_NAME (t));
21408 else
21409 containing_scope = TYPE_CONTEXT (t);
21410
21411 /* Use the containing namespace if there is one. */
21412 if (containing_scope && TREE_CODE (containing_scope) == NAMESPACE_DECL)
21413 {
21414 if (context_die == lookup_decl_die (containing_scope))
21415 /* OK */;
21416 else if (debug_info_level > DINFO_LEVEL_TERSE)
21417 context_die = get_context_die (containing_scope);
21418 else
21419 containing_scope = NULL_TREE;
21420 }
21421
21422 /* Ignore function type "scopes" from the C frontend. They mean that
21423 a tagged type is local to a parmlist of a function declarator, but
21424 that isn't useful to DWARF. */
21425 if (containing_scope && TREE_CODE (containing_scope) == FUNCTION_TYPE)
21426 containing_scope = NULL_TREE;
21427
21428 if (SCOPE_FILE_SCOPE_P (containing_scope))
21429 {
21430 /* If T uses a local type keep it local as well, to avoid references
21431 to function-local DIEs from outside the function. */
21432 if (current_function_decl && uses_local_type (t))
21433 scope_die = context_die;
21434 else
21435 scope_die = comp_unit_die ();
21436 }
21437 else if (TYPE_P (containing_scope))
21438 {
21439 /* For types, we can just look up the appropriate DIE. */
21440 if (debug_info_level > DINFO_LEVEL_TERSE)
21441 scope_die = get_context_die (containing_scope);
21442 else
21443 {
21444 scope_die = lookup_type_die_strip_naming_typedef (containing_scope);
21445 if (scope_die == NULL)
21446 scope_die = comp_unit_die ();
21447 }
21448 }
21449 else
21450 scope_die = context_die;
21451
21452 return scope_die;
21453 }
21454
21455 /* Returns nonzero if CONTEXT_DIE is internal to a function. */
21456
21457 static inline int
21458 local_scope_p (dw_die_ref context_die)
21459 {
21460 for (; context_die; context_die = context_die->die_parent)
21461 if (context_die->die_tag == DW_TAG_inlined_subroutine
21462 || context_die->die_tag == DW_TAG_subprogram)
21463 return 1;
21464
21465 return 0;
21466 }
21467
21468 /* Returns nonzero if CONTEXT_DIE is a class. */
21469
21470 static inline int
21471 class_scope_p (dw_die_ref context_die)
21472 {
21473 return (context_die
21474 && (context_die->die_tag == DW_TAG_structure_type
21475 || context_die->die_tag == DW_TAG_class_type
21476 || context_die->die_tag == DW_TAG_interface_type
21477 || context_die->die_tag == DW_TAG_union_type));
21478 }
21479
21480 /* Returns nonzero if CONTEXT_DIE is a class or namespace, for deciding
21481 whether or not to treat a DIE in this context as a declaration. */
21482
21483 static inline int
21484 class_or_namespace_scope_p (dw_die_ref context_die)
21485 {
21486 return (class_scope_p (context_die)
21487 || (context_die && context_die->die_tag == DW_TAG_namespace));
21488 }
21489
21490 /* Many forms of DIEs require a "type description" attribute. This
21491 routine locates the proper "type descriptor" die for the type given
21492 by 'type' plus any additional qualifiers given by 'cv_quals', and
21493 adds a DW_AT_type attribute below the given die. */
21494
21495 static void
21496 add_type_attribute (dw_die_ref object_die, tree type, int cv_quals,
21497 bool reverse, dw_die_ref context_die)
21498 {
21499 enum tree_code code = TREE_CODE (type);
21500 dw_die_ref type_die = NULL;
21501
21502 /* ??? If this type is an unnamed subrange type of an integral, floating-point
21503 or fixed-point type, use the inner type. This is because we have no
21504 support for unnamed types in base_type_die. This can happen if this is
21505 an Ada subrange type. Correct solution is emit a subrange type die. */
21506 if ((code == INTEGER_TYPE || code == REAL_TYPE || code == FIXED_POINT_TYPE)
21507 && TREE_TYPE (type) != 0 && TYPE_NAME (type) == 0)
21508 type = TREE_TYPE (type), code = TREE_CODE (type);
21509
21510 if (code == ERROR_MARK
21511 /* Handle a special case. For functions whose return type is void, we
21512 generate *no* type attribute. (Note that no object may have type
21513 `void', so this only applies to function return types). */
21514 || code == VOID_TYPE)
21515 return;
21516
21517 type_die = modified_type_die (type,
21518 cv_quals | TYPE_QUALS (type),
21519 reverse,
21520 context_die);
21521
21522 if (type_die != NULL)
21523 add_AT_die_ref (object_die, DW_AT_type, type_die);
21524 }
21525
21526 /* Given an object die, add the calling convention attribute for the
21527 function call type. */
21528 static void
21529 add_calling_convention_attribute (dw_die_ref subr_die, tree decl)
21530 {
21531 enum dwarf_calling_convention value = DW_CC_normal;
21532
21533 value = ((enum dwarf_calling_convention)
21534 targetm.dwarf_calling_convention (TREE_TYPE (decl)));
21535
21536 if (is_fortran ()
21537 && id_equal (DECL_ASSEMBLER_NAME (decl), "MAIN__"))
21538 {
21539 /* DWARF 2 doesn't provide a way to identify a program's source-level
21540 entry point. DW_AT_calling_convention attributes are only meant
21541 to describe functions' calling conventions. However, lacking a
21542 better way to signal the Fortran main program, we used this for
21543 a long time, following existing custom. Now, DWARF 4 has
21544 DW_AT_main_subprogram, which we add below, but some tools still
21545 rely on the old way, which we thus keep. */
21546 value = DW_CC_program;
21547
21548 if (dwarf_version >= 4 || !dwarf_strict)
21549 add_AT_flag (subr_die, DW_AT_main_subprogram, 1);
21550 }
21551
21552 /* Only add the attribute if the backend requests it, and
21553 is not DW_CC_normal. */
21554 if (value && (value != DW_CC_normal))
21555 add_AT_unsigned (subr_die, DW_AT_calling_convention, value);
21556 }
21557
21558 /* Given a tree pointer to a struct, class, union, or enum type node, return
21559 a pointer to the (string) tag name for the given type, or zero if the type
21560 was declared without a tag. */
21561
21562 static const char *
21563 type_tag (const_tree type)
21564 {
21565 const char *name = 0;
21566
21567 if (TYPE_NAME (type) != 0)
21568 {
21569 tree t = 0;
21570
21571 /* Find the IDENTIFIER_NODE for the type name. */
21572 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
21573 && !TYPE_NAMELESS (type))
21574 t = TYPE_NAME (type);
21575
21576 /* The g++ front end makes the TYPE_NAME of *each* tagged type point to
21577 a TYPE_DECL node, regardless of whether or not a `typedef' was
21578 involved. */
21579 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
21580 && ! DECL_IGNORED_P (TYPE_NAME (type)))
21581 {
21582 /* We want to be extra verbose. Don't call dwarf_name if
21583 DECL_NAME isn't set. The default hook for decl_printable_name
21584 doesn't like that, and in this context it's correct to return
21585 0, instead of "<anonymous>" or the like. */
21586 if (DECL_NAME (TYPE_NAME (type))
21587 && !DECL_NAMELESS (TYPE_NAME (type)))
21588 name = lang_hooks.dwarf_name (TYPE_NAME (type), 2);
21589 }
21590
21591 /* Now get the name as a string, or invent one. */
21592 if (!name && t != 0)
21593 name = IDENTIFIER_POINTER (t);
21594 }
21595
21596 return (name == 0 || *name == '\0') ? 0 : name;
21597 }
21598
21599 /* Return the type associated with a data member, make a special check
21600 for bit field types. */
21601
21602 static inline tree
21603 member_declared_type (const_tree member)
21604 {
21605 return (DECL_BIT_FIELD_TYPE (member)
21606 ? DECL_BIT_FIELD_TYPE (member) : TREE_TYPE (member));
21607 }
21608
21609 /* Get the decl's label, as described by its RTL. This may be different
21610 from the DECL_NAME name used in the source file. */
21611
21612 #if 0
21613 static const char *
21614 decl_start_label (tree decl)
21615 {
21616 rtx x;
21617 const char *fnname;
21618
21619 x = DECL_RTL (decl);
21620 gcc_assert (MEM_P (x));
21621
21622 x = XEXP (x, 0);
21623 gcc_assert (GET_CODE (x) == SYMBOL_REF);
21624
21625 fnname = XSTR (x, 0);
21626 return fnname;
21627 }
21628 #endif
21629 \f
21630 /* For variable-length arrays that have been previously generated, but
21631 may be incomplete due to missing subscript info, fill the subscript
21632 info. Return TRUE if this is one of those cases. */
21633 static bool
21634 fill_variable_array_bounds (tree type)
21635 {
21636 if (TREE_ASM_WRITTEN (type)
21637 && TREE_CODE (type) == ARRAY_TYPE
21638 && variably_modified_type_p (type, NULL))
21639 {
21640 dw_die_ref array_die = lookup_type_die (type);
21641 if (!array_die)
21642 return false;
21643 add_subscript_info (array_die, type, !is_ada ());
21644 return true;
21645 }
21646 return false;
21647 }
21648
21649 /* These routines generate the internal representation of the DIE's for
21650 the compilation unit. Debugging information is collected by walking
21651 the declaration trees passed in from dwarf2out_decl(). */
21652
21653 static void
21654 gen_array_type_die (tree type, dw_die_ref context_die)
21655 {
21656 dw_die_ref array_die;
21657
21658 /* GNU compilers represent multidimensional array types as sequences of one
21659 dimensional array types whose element types are themselves array types.
21660 We sometimes squish that down to a single array_type DIE with multiple
21661 subscripts in the Dwarf debugging info. The draft Dwarf specification
21662 say that we are allowed to do this kind of compression in C, because
21663 there is no difference between an array of arrays and a multidimensional
21664 array. We don't do this for Ada to remain as close as possible to the
21665 actual representation, which is especially important against the language
21666 flexibilty wrt arrays of variable size. */
21667
21668 bool collapse_nested_arrays = !is_ada ();
21669
21670 if (fill_variable_array_bounds (type))
21671 return;
21672
21673 dw_die_ref scope_die = scope_die_for (type, context_die);
21674 tree element_type;
21675
21676 /* Emit DW_TAG_string_type for Fortran character types (with kind 1 only, as
21677 DW_TAG_string_type doesn't have DW_AT_type attribute). */
21678 if (TYPE_STRING_FLAG (type)
21679 && TREE_CODE (type) == ARRAY_TYPE
21680 && is_fortran ()
21681 && TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (char_type_node))
21682 {
21683 HOST_WIDE_INT size;
21684
21685 array_die = new_die (DW_TAG_string_type, scope_die, type);
21686 add_name_attribute (array_die, type_tag (type));
21687 equate_type_number_to_die (type, array_die);
21688 size = int_size_in_bytes (type);
21689 if (size >= 0)
21690 add_AT_unsigned (array_die, DW_AT_byte_size, size);
21691 /* ??? We can't annotate types late, but for LTO we may not
21692 generate a location early either (gfortran.dg/save_6.f90). */
21693 else if (! (early_dwarf && (flag_generate_lto || flag_generate_offload))
21694 && TYPE_DOMAIN (type) != NULL_TREE
21695 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != NULL_TREE)
21696 {
21697 tree szdecl = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
21698 tree rszdecl = szdecl;
21699
21700 size = int_size_in_bytes (TREE_TYPE (szdecl));
21701 if (!DECL_P (szdecl))
21702 {
21703 if (TREE_CODE (szdecl) == INDIRECT_REF
21704 && DECL_P (TREE_OPERAND (szdecl, 0)))
21705 {
21706 rszdecl = TREE_OPERAND (szdecl, 0);
21707 if (int_size_in_bytes (TREE_TYPE (rszdecl))
21708 != DWARF2_ADDR_SIZE)
21709 size = 0;
21710 }
21711 else
21712 size = 0;
21713 }
21714 if (size > 0)
21715 {
21716 dw_loc_list_ref loc
21717 = loc_list_from_tree (rszdecl, szdecl == rszdecl ? 2 : 0,
21718 NULL);
21719 if (loc)
21720 {
21721 add_AT_location_description (array_die, DW_AT_string_length,
21722 loc);
21723 if (size != DWARF2_ADDR_SIZE)
21724 add_AT_unsigned (array_die, dwarf_version >= 5
21725 ? DW_AT_string_length_byte_size
21726 : DW_AT_byte_size, size);
21727 }
21728 }
21729 }
21730 return;
21731 }
21732
21733 array_die = new_die (DW_TAG_array_type, scope_die, type);
21734 add_name_attribute (array_die, type_tag (type));
21735 equate_type_number_to_die (type, array_die);
21736
21737 if (TREE_CODE (type) == VECTOR_TYPE)
21738 add_AT_flag (array_die, DW_AT_GNU_vector, 1);
21739
21740 /* For Fortran multidimensional arrays use DW_ORD_col_major ordering. */
21741 if (is_fortran ()
21742 && TREE_CODE (type) == ARRAY_TYPE
21743 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE
21744 && !TYPE_STRING_FLAG (TREE_TYPE (type)))
21745 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21746
21747 #if 0
21748 /* We default the array ordering. Debuggers will probably do the right
21749 things even if DW_AT_ordering is not present. It's not even an issue
21750 until we start to get into multidimensional arrays anyway. If a debugger
21751 is ever caught doing the Wrong Thing for multi-dimensional arrays,
21752 then we'll have to put the DW_AT_ordering attribute back in. (But if
21753 and when we find out that we need to put these in, we will only do so
21754 for multidimensional arrays. */
21755 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21756 #endif
21757
21758 if (TREE_CODE (type) == VECTOR_TYPE)
21759 {
21760 /* For VECTOR_TYPEs we use an array die with appropriate bounds. */
21761 dw_die_ref subrange_die = new_die (DW_TAG_subrange_type, array_die, NULL);
21762 add_bound_info (subrange_die, DW_AT_lower_bound, size_zero_node, NULL);
21763 add_bound_info (subrange_die, DW_AT_upper_bound,
21764 size_int (TYPE_VECTOR_SUBPARTS (type) - 1), NULL);
21765 }
21766 else
21767 add_subscript_info (array_die, type, collapse_nested_arrays);
21768
21769 /* Add representation of the type of the elements of this array type and
21770 emit the corresponding DIE if we haven't done it already. */
21771 element_type = TREE_TYPE (type);
21772 if (collapse_nested_arrays)
21773 while (TREE_CODE (element_type) == ARRAY_TYPE)
21774 {
21775 if (TYPE_STRING_FLAG (element_type) && is_fortran ())
21776 break;
21777 element_type = TREE_TYPE (element_type);
21778 }
21779
21780 add_type_attribute (array_die, element_type, TYPE_UNQUALIFIED,
21781 TREE_CODE (type) == ARRAY_TYPE
21782 && TYPE_REVERSE_STORAGE_ORDER (type),
21783 context_die);
21784
21785 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21786 if (TYPE_ARTIFICIAL (type))
21787 add_AT_flag (array_die, DW_AT_artificial, 1);
21788
21789 if (get_AT (array_die, DW_AT_name))
21790 add_pubtype (type, array_die);
21791
21792 add_alignment_attribute (array_die, type);
21793 }
21794
21795 /* This routine generates DIE for array with hidden descriptor, details
21796 are filled into *info by a langhook. */
21797
21798 static void
21799 gen_descr_array_type_die (tree type, struct array_descr_info *info,
21800 dw_die_ref context_die)
21801 {
21802 const dw_die_ref scope_die = scope_die_for (type, context_die);
21803 const dw_die_ref array_die = new_die (DW_TAG_array_type, scope_die, type);
21804 struct loc_descr_context context = { type, info->base_decl, NULL,
21805 false, false };
21806 enum dwarf_tag subrange_tag = DW_TAG_subrange_type;
21807 int dim;
21808
21809 add_name_attribute (array_die, type_tag (type));
21810 equate_type_number_to_die (type, array_die);
21811
21812 if (info->ndimensions > 1)
21813 switch (info->ordering)
21814 {
21815 case array_descr_ordering_row_major:
21816 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21817 break;
21818 case array_descr_ordering_column_major:
21819 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21820 break;
21821 default:
21822 break;
21823 }
21824
21825 if (dwarf_version >= 3 || !dwarf_strict)
21826 {
21827 if (info->data_location)
21828 add_scalar_info (array_die, DW_AT_data_location, info->data_location,
21829 dw_scalar_form_exprloc, &context);
21830 if (info->associated)
21831 add_scalar_info (array_die, DW_AT_associated, info->associated,
21832 dw_scalar_form_constant
21833 | dw_scalar_form_exprloc
21834 | dw_scalar_form_reference, &context);
21835 if (info->allocated)
21836 add_scalar_info (array_die, DW_AT_allocated, info->allocated,
21837 dw_scalar_form_constant
21838 | dw_scalar_form_exprloc
21839 | dw_scalar_form_reference, &context);
21840 if (info->stride)
21841 {
21842 const enum dwarf_attribute attr
21843 = (info->stride_in_bits) ? DW_AT_bit_stride : DW_AT_byte_stride;
21844 const int forms
21845 = (info->stride_in_bits)
21846 ? dw_scalar_form_constant
21847 : (dw_scalar_form_constant
21848 | dw_scalar_form_exprloc
21849 | dw_scalar_form_reference);
21850
21851 add_scalar_info (array_die, attr, info->stride, forms, &context);
21852 }
21853 }
21854 if (dwarf_version >= 5)
21855 {
21856 if (info->rank)
21857 {
21858 add_scalar_info (array_die, DW_AT_rank, info->rank,
21859 dw_scalar_form_constant
21860 | dw_scalar_form_exprloc, &context);
21861 subrange_tag = DW_TAG_generic_subrange;
21862 context.placeholder_arg = true;
21863 }
21864 }
21865
21866 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21867
21868 for (dim = 0; dim < info->ndimensions; dim++)
21869 {
21870 dw_die_ref subrange_die = new_die (subrange_tag, array_die, NULL);
21871
21872 if (info->dimen[dim].bounds_type)
21873 add_type_attribute (subrange_die,
21874 info->dimen[dim].bounds_type, TYPE_UNQUALIFIED,
21875 false, context_die);
21876 if (info->dimen[dim].lower_bound)
21877 add_bound_info (subrange_die, DW_AT_lower_bound,
21878 info->dimen[dim].lower_bound, &context);
21879 if (info->dimen[dim].upper_bound)
21880 add_bound_info (subrange_die, DW_AT_upper_bound,
21881 info->dimen[dim].upper_bound, &context);
21882 if ((dwarf_version >= 3 || !dwarf_strict) && info->dimen[dim].stride)
21883 add_scalar_info (subrange_die, DW_AT_byte_stride,
21884 info->dimen[dim].stride,
21885 dw_scalar_form_constant
21886 | dw_scalar_form_exprloc
21887 | dw_scalar_form_reference,
21888 &context);
21889 }
21890
21891 gen_type_die (info->element_type, context_die);
21892 add_type_attribute (array_die, info->element_type, TYPE_UNQUALIFIED,
21893 TREE_CODE (type) == ARRAY_TYPE
21894 && TYPE_REVERSE_STORAGE_ORDER (type),
21895 context_die);
21896
21897 if (get_AT (array_die, DW_AT_name))
21898 add_pubtype (type, array_die);
21899
21900 add_alignment_attribute (array_die, type);
21901 }
21902
21903 #if 0
21904 static void
21905 gen_entry_point_die (tree decl, dw_die_ref context_die)
21906 {
21907 tree origin = decl_ultimate_origin (decl);
21908 dw_die_ref decl_die = new_die (DW_TAG_entry_point, context_die, decl);
21909
21910 if (origin != NULL)
21911 add_abstract_origin_attribute (decl_die, origin);
21912 else
21913 {
21914 add_name_and_src_coords_attributes (decl_die, decl);
21915 add_type_attribute (decl_die, TREE_TYPE (TREE_TYPE (decl)),
21916 TYPE_UNQUALIFIED, false, context_die);
21917 }
21918
21919 if (DECL_ABSTRACT_P (decl))
21920 equate_decl_number_to_die (decl, decl_die);
21921 else
21922 add_AT_lbl_id (decl_die, DW_AT_low_pc, decl_start_label (decl));
21923 }
21924 #endif
21925
21926 /* Walk through the list of incomplete types again, trying once more to
21927 emit full debugging info for them. */
21928
21929 static void
21930 retry_incomplete_types (void)
21931 {
21932 set_early_dwarf s;
21933 int i;
21934
21935 for (i = vec_safe_length (incomplete_types) - 1; i >= 0; i--)
21936 if (should_emit_struct_debug ((*incomplete_types)[i], DINFO_USAGE_DIR_USE))
21937 gen_type_die ((*incomplete_types)[i], comp_unit_die ());
21938 vec_safe_truncate (incomplete_types, 0);
21939 }
21940
21941 /* Determine what tag to use for a record type. */
21942
21943 static enum dwarf_tag
21944 record_type_tag (tree type)
21945 {
21946 if (! lang_hooks.types.classify_record)
21947 return DW_TAG_structure_type;
21948
21949 switch (lang_hooks.types.classify_record (type))
21950 {
21951 case RECORD_IS_STRUCT:
21952 return DW_TAG_structure_type;
21953
21954 case RECORD_IS_CLASS:
21955 return DW_TAG_class_type;
21956
21957 case RECORD_IS_INTERFACE:
21958 if (dwarf_version >= 3 || !dwarf_strict)
21959 return DW_TAG_interface_type;
21960 return DW_TAG_structure_type;
21961
21962 default:
21963 gcc_unreachable ();
21964 }
21965 }
21966
21967 /* Generate a DIE to represent an enumeration type. Note that these DIEs
21968 include all of the information about the enumeration values also. Each
21969 enumerated type name/value is listed as a child of the enumerated type
21970 DIE. */
21971
21972 static dw_die_ref
21973 gen_enumeration_type_die (tree type, dw_die_ref context_die)
21974 {
21975 dw_die_ref type_die = lookup_type_die (type);
21976 dw_die_ref orig_type_die = type_die;
21977
21978 if (type_die == NULL)
21979 {
21980 type_die = new_die (DW_TAG_enumeration_type,
21981 scope_die_for (type, context_die), type);
21982 equate_type_number_to_die (type, type_die);
21983 add_name_attribute (type_die, type_tag (type));
21984 if ((dwarf_version >= 4 || !dwarf_strict)
21985 && ENUM_IS_SCOPED (type))
21986 add_AT_flag (type_die, DW_AT_enum_class, 1);
21987 if (ENUM_IS_OPAQUE (type) && TYPE_SIZE (type))
21988 add_AT_flag (type_die, DW_AT_declaration, 1);
21989 if (!dwarf_strict)
21990 add_AT_unsigned (type_die, DW_AT_encoding,
21991 TYPE_UNSIGNED (type)
21992 ? DW_ATE_unsigned
21993 : DW_ATE_signed);
21994 }
21995 else if (! TYPE_SIZE (type) || ENUM_IS_OPAQUE (type))
21996 return type_die;
21997 else
21998 remove_AT (type_die, DW_AT_declaration);
21999
22000 /* Handle a GNU C/C++ extension, i.e. incomplete enum types. If the
22001 given enum type is incomplete, do not generate the DW_AT_byte_size
22002 attribute or the DW_AT_element_list attribute. */
22003 if (TYPE_SIZE (type))
22004 {
22005 tree link;
22006
22007 if (!ENUM_IS_OPAQUE (type))
22008 TREE_ASM_WRITTEN (type) = 1;
22009 if (!orig_type_die || !get_AT (type_die, DW_AT_byte_size))
22010 add_byte_size_attribute (type_die, type);
22011 if (!orig_type_die || !get_AT (type_die, DW_AT_alignment))
22012 add_alignment_attribute (type_die, type);
22013 if ((dwarf_version >= 3 || !dwarf_strict)
22014 && (!orig_type_die || !get_AT (type_die, DW_AT_type)))
22015 {
22016 tree underlying = lang_hooks.types.enum_underlying_base_type (type);
22017 add_type_attribute (type_die, underlying, TYPE_UNQUALIFIED, false,
22018 context_die);
22019 }
22020 if (TYPE_STUB_DECL (type) != NULL_TREE)
22021 {
22022 if (!orig_type_die || !get_AT (type_die, DW_AT_decl_file))
22023 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
22024 if (!orig_type_die || !get_AT (type_die, DW_AT_accessibility))
22025 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
22026 }
22027
22028 /* If the first reference to this type was as the return type of an
22029 inline function, then it may not have a parent. Fix this now. */
22030 if (type_die->die_parent == NULL)
22031 add_child_die (scope_die_for (type, context_die), type_die);
22032
22033 for (link = TYPE_VALUES (type);
22034 link != NULL; link = TREE_CHAIN (link))
22035 {
22036 dw_die_ref enum_die = new_die (DW_TAG_enumerator, type_die, link);
22037 tree value = TREE_VALUE (link);
22038
22039 gcc_assert (!ENUM_IS_OPAQUE (type));
22040 add_name_attribute (enum_die,
22041 IDENTIFIER_POINTER (TREE_PURPOSE (link)));
22042
22043 if (TREE_CODE (value) == CONST_DECL)
22044 value = DECL_INITIAL (value);
22045
22046 if (simple_type_size_in_bits (TREE_TYPE (value))
22047 <= HOST_BITS_PER_WIDE_INT || tree_fits_shwi_p (value))
22048 {
22049 /* For constant forms created by add_AT_unsigned DWARF
22050 consumers (GDB, elfutils, etc.) always zero extend
22051 the value. Only when the actual value is negative
22052 do we need to use add_AT_int to generate a constant
22053 form that can represent negative values. */
22054 HOST_WIDE_INT val = TREE_INT_CST_LOW (value);
22055 if (TYPE_UNSIGNED (TREE_TYPE (value)) || val >= 0)
22056 add_AT_unsigned (enum_die, DW_AT_const_value,
22057 (unsigned HOST_WIDE_INT) val);
22058 else
22059 add_AT_int (enum_die, DW_AT_const_value, val);
22060 }
22061 else
22062 /* Enumeration constants may be wider than HOST_WIDE_INT. Handle
22063 that here. TODO: This should be re-worked to use correct
22064 signed/unsigned double tags for all cases. */
22065 add_AT_wide (enum_die, DW_AT_const_value, wi::to_wide (value));
22066 }
22067
22068 add_gnat_descriptive_type_attribute (type_die, type, context_die);
22069 if (TYPE_ARTIFICIAL (type)
22070 && (!orig_type_die || !get_AT (type_die, DW_AT_artificial)))
22071 add_AT_flag (type_die, DW_AT_artificial, 1);
22072 }
22073 else
22074 add_AT_flag (type_die, DW_AT_declaration, 1);
22075
22076 add_pubtype (type, type_die);
22077
22078 return type_die;
22079 }
22080
22081 /* Generate a DIE to represent either a real live formal parameter decl or to
22082 represent just the type of some formal parameter position in some function
22083 type.
22084
22085 Note that this routine is a bit unusual because its argument may be a
22086 ..._DECL node (i.e. either a PARM_DECL or perhaps a VAR_DECL which
22087 represents an inlining of some PARM_DECL) or else some sort of a ..._TYPE
22088 node. If it's the former then this function is being called to output a
22089 DIE to represent a formal parameter object (or some inlining thereof). If
22090 it's the latter, then this function is only being called to output a
22091 DW_TAG_formal_parameter DIE to stand as a placeholder for some formal
22092 argument type of some subprogram type.
22093 If EMIT_NAME_P is true, name and source coordinate attributes
22094 are emitted. */
22095
22096 static dw_die_ref
22097 gen_formal_parameter_die (tree node, tree origin, bool emit_name_p,
22098 dw_die_ref context_die)
22099 {
22100 tree node_or_origin = node ? node : origin;
22101 tree ultimate_origin;
22102 dw_die_ref parm_die = NULL;
22103
22104 if (DECL_P (node_or_origin))
22105 {
22106 parm_die = lookup_decl_die (node);
22107
22108 /* If the contexts differ, we may not be talking about the same
22109 thing.
22110 ??? When in LTO the DIE parent is the "abstract" copy and the
22111 context_die is the specification "copy". But this whole block
22112 should eventually be no longer needed. */
22113 if (parm_die && parm_die->die_parent != context_die && !in_lto_p)
22114 {
22115 if (!DECL_ABSTRACT_P (node))
22116 {
22117 /* This can happen when creating an inlined instance, in
22118 which case we need to create a new DIE that will get
22119 annotated with DW_AT_abstract_origin. */
22120 parm_die = NULL;
22121 }
22122 else
22123 gcc_unreachable ();
22124 }
22125
22126 if (parm_die && parm_die->die_parent == NULL)
22127 {
22128 /* Check that parm_die already has the right attributes that
22129 we would have added below. If any attributes are
22130 missing, fall through to add them. */
22131 if (! DECL_ABSTRACT_P (node_or_origin)
22132 && !get_AT (parm_die, DW_AT_location)
22133 && !get_AT (parm_die, DW_AT_const_value))
22134 /* We are missing location info, and are about to add it. */
22135 ;
22136 else
22137 {
22138 add_child_die (context_die, parm_die);
22139 return parm_die;
22140 }
22141 }
22142 }
22143
22144 /* If we have a previously generated DIE, use it, unless this is an
22145 concrete instance (origin != NULL), in which case we need a new
22146 DIE with a corresponding DW_AT_abstract_origin. */
22147 bool reusing_die;
22148 if (parm_die && origin == NULL)
22149 reusing_die = true;
22150 else
22151 {
22152 parm_die = new_die (DW_TAG_formal_parameter, context_die, node);
22153 reusing_die = false;
22154 }
22155
22156 switch (TREE_CODE_CLASS (TREE_CODE (node_or_origin)))
22157 {
22158 case tcc_declaration:
22159 ultimate_origin = decl_ultimate_origin (node_or_origin);
22160 if (node || ultimate_origin)
22161 origin = ultimate_origin;
22162
22163 if (reusing_die)
22164 goto add_location;
22165
22166 if (origin != NULL)
22167 add_abstract_origin_attribute (parm_die, origin);
22168 else if (emit_name_p)
22169 add_name_and_src_coords_attributes (parm_die, node);
22170 if (origin == NULL
22171 || (! DECL_ABSTRACT_P (node_or_origin)
22172 && variably_modified_type_p (TREE_TYPE (node_or_origin),
22173 decl_function_context
22174 (node_or_origin))))
22175 {
22176 tree type = TREE_TYPE (node_or_origin);
22177 if (decl_by_reference_p (node_or_origin))
22178 add_type_attribute (parm_die, TREE_TYPE (type),
22179 TYPE_UNQUALIFIED,
22180 false, context_die);
22181 else
22182 add_type_attribute (parm_die, type,
22183 decl_quals (node_or_origin),
22184 false, context_die);
22185 }
22186 if (origin == NULL && DECL_ARTIFICIAL (node))
22187 add_AT_flag (parm_die, DW_AT_artificial, 1);
22188 add_location:
22189 if (node && node != origin)
22190 equate_decl_number_to_die (node, parm_die);
22191 if (! DECL_ABSTRACT_P (node_or_origin))
22192 add_location_or_const_value_attribute (parm_die, node_or_origin,
22193 node == NULL);
22194
22195 break;
22196
22197 case tcc_type:
22198 /* We were called with some kind of a ..._TYPE node. */
22199 add_type_attribute (parm_die, node_or_origin, TYPE_UNQUALIFIED, false,
22200 context_die);
22201 break;
22202
22203 default:
22204 gcc_unreachable ();
22205 }
22206
22207 return parm_die;
22208 }
22209
22210 /* Generate and return a DW_TAG_GNU_formal_parameter_pack. Also generate
22211 children DW_TAG_formal_parameter DIEs representing the arguments of the
22212 parameter pack.
22213
22214 PARM_PACK must be a function parameter pack.
22215 PACK_ARG is the first argument of the parameter pack. Its TREE_CHAIN
22216 must point to the subsequent arguments of the function PACK_ARG belongs to.
22217 SUBR_DIE is the DIE of the function PACK_ARG belongs to.
22218 If NEXT_ARG is non NULL, *NEXT_ARG is set to the function argument
22219 following the last one for which a DIE was generated. */
22220
22221 static dw_die_ref
22222 gen_formal_parameter_pack_die (tree parm_pack,
22223 tree pack_arg,
22224 dw_die_ref subr_die,
22225 tree *next_arg)
22226 {
22227 tree arg;
22228 dw_die_ref parm_pack_die;
22229
22230 gcc_assert (parm_pack
22231 && lang_hooks.function_parameter_pack_p (parm_pack)
22232 && subr_die);
22233
22234 parm_pack_die = new_die (DW_TAG_GNU_formal_parameter_pack, subr_die, parm_pack);
22235 add_src_coords_attributes (parm_pack_die, parm_pack);
22236
22237 for (arg = pack_arg; arg; arg = DECL_CHAIN (arg))
22238 {
22239 if (! lang_hooks.decls.function_parm_expanded_from_pack_p (arg,
22240 parm_pack))
22241 break;
22242 gen_formal_parameter_die (arg, NULL,
22243 false /* Don't emit name attribute. */,
22244 parm_pack_die);
22245 }
22246 if (next_arg)
22247 *next_arg = arg;
22248 return parm_pack_die;
22249 }
22250
22251 /* Generate a special type of DIE used as a stand-in for a trailing ellipsis
22252 at the end of an (ANSI prototyped) formal parameters list. */
22253
22254 static void
22255 gen_unspecified_parameters_die (tree decl_or_type, dw_die_ref context_die)
22256 {
22257 new_die (DW_TAG_unspecified_parameters, context_die, decl_or_type);
22258 }
22259
22260 /* Generate a list of nameless DW_TAG_formal_parameter DIEs (and perhaps a
22261 DW_TAG_unspecified_parameters DIE) to represent the types of the formal
22262 parameters as specified in some function type specification (except for
22263 those which appear as part of a function *definition*). */
22264
22265 static void
22266 gen_formal_types_die (tree function_or_method_type, dw_die_ref context_die)
22267 {
22268 tree link;
22269 tree formal_type = NULL;
22270 tree first_parm_type;
22271 tree arg;
22272
22273 if (TREE_CODE (function_or_method_type) == FUNCTION_DECL)
22274 {
22275 arg = DECL_ARGUMENTS (function_or_method_type);
22276 function_or_method_type = TREE_TYPE (function_or_method_type);
22277 }
22278 else
22279 arg = NULL_TREE;
22280
22281 first_parm_type = TYPE_ARG_TYPES (function_or_method_type);
22282
22283 /* Make our first pass over the list of formal parameter types and output a
22284 DW_TAG_formal_parameter DIE for each one. */
22285 for (link = first_parm_type; link; )
22286 {
22287 dw_die_ref parm_die;
22288
22289 formal_type = TREE_VALUE (link);
22290 if (formal_type == void_type_node)
22291 break;
22292
22293 /* Output a (nameless) DIE to represent the formal parameter itself. */
22294 if (!POINTER_BOUNDS_TYPE_P (formal_type))
22295 {
22296 parm_die = gen_formal_parameter_die (formal_type, NULL,
22297 true /* Emit name attribute. */,
22298 context_die);
22299 if (TREE_CODE (function_or_method_type) == METHOD_TYPE
22300 && link == first_parm_type)
22301 {
22302 add_AT_flag (parm_die, DW_AT_artificial, 1);
22303 if (dwarf_version >= 3 || !dwarf_strict)
22304 add_AT_die_ref (context_die, DW_AT_object_pointer, parm_die);
22305 }
22306 else if (arg && DECL_ARTIFICIAL (arg))
22307 add_AT_flag (parm_die, DW_AT_artificial, 1);
22308 }
22309
22310 link = TREE_CHAIN (link);
22311 if (arg)
22312 arg = DECL_CHAIN (arg);
22313 }
22314
22315 /* If this function type has an ellipsis, add a
22316 DW_TAG_unspecified_parameters DIE to the end of the parameter list. */
22317 if (formal_type != void_type_node)
22318 gen_unspecified_parameters_die (function_or_method_type, context_die);
22319
22320 /* Make our second (and final) pass over the list of formal parameter types
22321 and output DIEs to represent those types (as necessary). */
22322 for (link = TYPE_ARG_TYPES (function_or_method_type);
22323 link && TREE_VALUE (link);
22324 link = TREE_CHAIN (link))
22325 gen_type_die (TREE_VALUE (link), context_die);
22326 }
22327
22328 /* We want to generate the DIE for TYPE so that we can generate the
22329 die for MEMBER, which has been defined; we will need to refer back
22330 to the member declaration nested within TYPE. If we're trying to
22331 generate minimal debug info for TYPE, processing TYPE won't do the
22332 trick; we need to attach the member declaration by hand. */
22333
22334 static void
22335 gen_type_die_for_member (tree type, tree member, dw_die_ref context_die)
22336 {
22337 gen_type_die (type, context_die);
22338
22339 /* If we're trying to avoid duplicate debug info, we may not have
22340 emitted the member decl for this function. Emit it now. */
22341 if (TYPE_STUB_DECL (type)
22342 && TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))
22343 && ! lookup_decl_die (member))
22344 {
22345 dw_die_ref type_die;
22346 gcc_assert (!decl_ultimate_origin (member));
22347
22348 push_decl_scope (type);
22349 type_die = lookup_type_die_strip_naming_typedef (type);
22350 if (TREE_CODE (member) == FUNCTION_DECL)
22351 gen_subprogram_die (member, type_die);
22352 else if (TREE_CODE (member) == FIELD_DECL)
22353 {
22354 /* Ignore the nameless fields that are used to skip bits but handle
22355 C++ anonymous unions and structs. */
22356 if (DECL_NAME (member) != NULL_TREE
22357 || TREE_CODE (TREE_TYPE (member)) == UNION_TYPE
22358 || TREE_CODE (TREE_TYPE (member)) == RECORD_TYPE)
22359 {
22360 struct vlr_context vlr_ctx = {
22361 DECL_CONTEXT (member), /* struct_type */
22362 NULL_TREE /* variant_part_offset */
22363 };
22364 gen_type_die (member_declared_type (member), type_die);
22365 gen_field_die (member, &vlr_ctx, type_die);
22366 }
22367 }
22368 else
22369 gen_variable_die (member, NULL_TREE, type_die);
22370
22371 pop_decl_scope ();
22372 }
22373 }
22374 \f
22375 /* Forward declare these functions, because they are mutually recursive
22376 with their set_block_* pairing functions. */
22377 static void set_decl_origin_self (tree);
22378
22379 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
22380 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
22381 that it points to the node itself, thus indicating that the node is its
22382 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
22383 the given node is NULL, recursively descend the decl/block tree which
22384 it is the root of, and for each other ..._DECL or BLOCK node contained
22385 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
22386 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
22387 values to point to themselves. */
22388
22389 static void
22390 set_block_origin_self (tree stmt)
22391 {
22392 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
22393 {
22394 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
22395
22396 {
22397 tree local_decl;
22398
22399 for (local_decl = BLOCK_VARS (stmt);
22400 local_decl != NULL_TREE;
22401 local_decl = DECL_CHAIN (local_decl))
22402 /* Do not recurse on nested functions since the inlining status
22403 of parent and child can be different as per the DWARF spec. */
22404 if (TREE_CODE (local_decl) != FUNCTION_DECL
22405 && !DECL_EXTERNAL (local_decl))
22406 set_decl_origin_self (local_decl);
22407 }
22408
22409 {
22410 tree subblock;
22411
22412 for (subblock = BLOCK_SUBBLOCKS (stmt);
22413 subblock != NULL_TREE;
22414 subblock = BLOCK_CHAIN (subblock))
22415 set_block_origin_self (subblock); /* Recurse. */
22416 }
22417 }
22418 }
22419
22420 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
22421 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
22422 node to so that it points to the node itself, thus indicating that the
22423 node represents its own (abstract) origin. Additionally, if the
22424 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
22425 the decl/block tree of which the given node is the root of, and for
22426 each other ..._DECL or BLOCK node contained therein whose
22427 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
22428 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
22429 point to themselves. */
22430
22431 static void
22432 set_decl_origin_self (tree decl)
22433 {
22434 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
22435 {
22436 DECL_ABSTRACT_ORIGIN (decl) = decl;
22437 if (TREE_CODE (decl) == FUNCTION_DECL)
22438 {
22439 tree arg;
22440
22441 for (arg = DECL_ARGUMENTS (decl); arg; arg = DECL_CHAIN (arg))
22442 DECL_ABSTRACT_ORIGIN (arg) = arg;
22443 if (DECL_INITIAL (decl) != NULL_TREE
22444 && DECL_INITIAL (decl) != error_mark_node)
22445 set_block_origin_self (DECL_INITIAL (decl));
22446 }
22447 }
22448 }
22449 \f
22450 /* Mark the early DIE for DECL as the abstract instance. */
22451
22452 static void
22453 dwarf2out_abstract_function (tree decl)
22454 {
22455 dw_die_ref old_die;
22456
22457 /* Make sure we have the actual abstract inline, not a clone. */
22458 decl = DECL_ORIGIN (decl);
22459
22460 if (DECL_IGNORED_P (decl))
22461 return;
22462
22463 old_die = lookup_decl_die (decl);
22464 /* With early debug we always have an old DIE unless we are in LTO
22465 and the user did not compile but only link with debug. */
22466 if (in_lto_p && ! old_die)
22467 return;
22468 gcc_assert (old_die != NULL);
22469 if (get_AT (old_die, DW_AT_inline)
22470 || get_AT (old_die, DW_AT_abstract_origin))
22471 /* We've already generated the abstract instance. */
22472 return;
22473
22474 /* Go ahead and put DW_AT_inline on the DIE. */
22475 if (DECL_DECLARED_INLINE_P (decl))
22476 {
22477 if (cgraph_function_possibly_inlined_p (decl))
22478 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_inlined);
22479 else
22480 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_not_inlined);
22481 }
22482 else
22483 {
22484 if (cgraph_function_possibly_inlined_p (decl))
22485 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_inlined);
22486 else
22487 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_not_inlined);
22488 }
22489
22490 if (DECL_DECLARED_INLINE_P (decl)
22491 && lookup_attribute ("artificial", DECL_ATTRIBUTES (decl)))
22492 add_AT_flag (old_die, DW_AT_artificial, 1);
22493
22494 set_decl_origin_self (decl);
22495 }
22496
22497 /* Helper function of premark_used_types() which gets called through
22498 htab_traverse.
22499
22500 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22501 marked as unused by prune_unused_types. */
22502
22503 bool
22504 premark_used_types_helper (tree const &type, void *)
22505 {
22506 dw_die_ref die;
22507
22508 die = lookup_type_die (type);
22509 if (die != NULL)
22510 die->die_perennial_p = 1;
22511 return true;
22512 }
22513
22514 /* Helper function of premark_types_used_by_global_vars which gets called
22515 through htab_traverse.
22516
22517 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22518 marked as unused by prune_unused_types. The DIE of the type is marked
22519 only if the global variable using the type will actually be emitted. */
22520
22521 int
22522 premark_types_used_by_global_vars_helper (types_used_by_vars_entry **slot,
22523 void *)
22524 {
22525 struct types_used_by_vars_entry *entry;
22526 dw_die_ref die;
22527
22528 entry = (struct types_used_by_vars_entry *) *slot;
22529 gcc_assert (entry->type != NULL
22530 && entry->var_decl != NULL);
22531 die = lookup_type_die (entry->type);
22532 if (die)
22533 {
22534 /* Ask cgraph if the global variable really is to be emitted.
22535 If yes, then we'll keep the DIE of ENTRY->TYPE. */
22536 varpool_node *node = varpool_node::get (entry->var_decl);
22537 if (node && node->definition)
22538 {
22539 die->die_perennial_p = 1;
22540 /* Keep the parent DIEs as well. */
22541 while ((die = die->die_parent) && die->die_perennial_p == 0)
22542 die->die_perennial_p = 1;
22543 }
22544 }
22545 return 1;
22546 }
22547
22548 /* Mark all members of used_types_hash as perennial. */
22549
22550 static void
22551 premark_used_types (struct function *fun)
22552 {
22553 if (fun && fun->used_types_hash)
22554 fun->used_types_hash->traverse<void *, premark_used_types_helper> (NULL);
22555 }
22556
22557 /* Mark all members of types_used_by_vars_entry as perennial. */
22558
22559 static void
22560 premark_types_used_by_global_vars (void)
22561 {
22562 if (types_used_by_vars_hash)
22563 types_used_by_vars_hash
22564 ->traverse<void *, premark_types_used_by_global_vars_helper> (NULL);
22565 }
22566
22567 /* Generate a DW_TAG_call_site DIE in function DECL under SUBR_DIE
22568 for CA_LOC call arg loc node. */
22569
22570 static dw_die_ref
22571 gen_call_site_die (tree decl, dw_die_ref subr_die,
22572 struct call_arg_loc_node *ca_loc)
22573 {
22574 dw_die_ref stmt_die = NULL, die;
22575 tree block = ca_loc->block;
22576
22577 while (block
22578 && block != DECL_INITIAL (decl)
22579 && TREE_CODE (block) == BLOCK)
22580 {
22581 stmt_die = BLOCK_DIE (block);
22582 if (stmt_die)
22583 break;
22584 block = BLOCK_SUPERCONTEXT (block);
22585 }
22586 if (stmt_die == NULL)
22587 stmt_die = subr_die;
22588 die = new_die (dwarf_TAG (DW_TAG_call_site), stmt_die, NULL_TREE);
22589 add_AT_lbl_id (die, dwarf_AT (DW_AT_call_return_pc), ca_loc->label);
22590 if (ca_loc->tail_call_p)
22591 add_AT_flag (die, dwarf_AT (DW_AT_call_tail_call), 1);
22592 if (ca_loc->symbol_ref)
22593 {
22594 dw_die_ref tdie = lookup_decl_die (SYMBOL_REF_DECL (ca_loc->symbol_ref));
22595 if (tdie)
22596 add_AT_die_ref (die, dwarf_AT (DW_AT_call_origin), tdie);
22597 else
22598 add_AT_addr (die, dwarf_AT (DW_AT_call_origin), ca_loc->symbol_ref,
22599 false);
22600 }
22601 return die;
22602 }
22603
22604 /* Generate a DIE to represent a declared function (either file-scope or
22605 block-local). */
22606
22607 static void
22608 gen_subprogram_die (tree decl, dw_die_ref context_die)
22609 {
22610 tree origin = decl_ultimate_origin (decl);
22611 dw_die_ref subr_die;
22612 dw_die_ref old_die = lookup_decl_die (decl);
22613
22614 /* This function gets called multiple times for different stages of
22615 the debug process. For example, for func() in this code:
22616
22617 namespace S
22618 {
22619 void func() { ... }
22620 }
22621
22622 ...we get called 4 times. Twice in early debug and twice in
22623 late debug:
22624
22625 Early debug
22626 -----------
22627
22628 1. Once while generating func() within the namespace. This is
22629 the declaration. The declaration bit below is set, as the
22630 context is the namespace.
22631
22632 A new DIE will be generated with DW_AT_declaration set.
22633
22634 2. Once for func() itself. This is the specification. The
22635 declaration bit below is clear as the context is the CU.
22636
22637 We will use the cached DIE from (1) to create a new DIE with
22638 DW_AT_specification pointing to the declaration in (1).
22639
22640 Late debug via rest_of_handle_final()
22641 -------------------------------------
22642
22643 3. Once generating func() within the namespace. This is also the
22644 declaration, as in (1), but this time we will early exit below
22645 as we have a cached DIE and a declaration needs no additional
22646 annotations (no locations), as the source declaration line
22647 info is enough.
22648
22649 4. Once for func() itself. As in (2), this is the specification,
22650 but this time we will re-use the cached DIE, and just annotate
22651 it with the location information that should now be available.
22652
22653 For something without namespaces, but with abstract instances, we
22654 are also called a multiple times:
22655
22656 class Base
22657 {
22658 public:
22659 Base (); // constructor declaration (1)
22660 };
22661
22662 Base::Base () { } // constructor specification (2)
22663
22664 Early debug
22665 -----------
22666
22667 1. Once for the Base() constructor by virtue of it being a
22668 member of the Base class. This is done via
22669 rest_of_type_compilation.
22670
22671 This is a declaration, so a new DIE will be created with
22672 DW_AT_declaration.
22673
22674 2. Once for the Base() constructor definition, but this time
22675 while generating the abstract instance of the base
22676 constructor (__base_ctor) which is being generated via early
22677 debug of reachable functions.
22678
22679 Even though we have a cached version of the declaration (1),
22680 we will create a DW_AT_specification of the declaration DIE
22681 in (1).
22682
22683 3. Once for the __base_ctor itself, but this time, we generate
22684 an DW_AT_abstract_origin version of the DW_AT_specification in
22685 (2).
22686
22687 Late debug via rest_of_handle_final
22688 -----------------------------------
22689
22690 4. One final time for the __base_ctor (which will have a cached
22691 DIE with DW_AT_abstract_origin created in (3). This time,
22692 we will just annotate the location information now
22693 available.
22694 */
22695 int declaration = (current_function_decl != decl
22696 || class_or_namespace_scope_p (context_die));
22697
22698 /* A declaration that has been previously dumped needs no
22699 additional information. */
22700 if (old_die && declaration)
22701 return;
22702
22703 /* Now that the C++ front end lazily declares artificial member fns, we
22704 might need to retrofit the declaration into its class. */
22705 if (!declaration && !origin && !old_die
22706 && DECL_CONTEXT (decl) && TYPE_P (DECL_CONTEXT (decl))
22707 && !class_or_namespace_scope_p (context_die)
22708 && debug_info_level > DINFO_LEVEL_TERSE)
22709 old_die = force_decl_die (decl);
22710
22711 /* A concrete instance, tag a new DIE with DW_AT_abstract_origin. */
22712 if (origin != NULL)
22713 {
22714 gcc_assert (!declaration || local_scope_p (context_die));
22715
22716 /* Fixup die_parent for the abstract instance of a nested
22717 inline function. */
22718 if (old_die && old_die->die_parent == NULL)
22719 add_child_die (context_die, old_die);
22720
22721 if (old_die && get_AT_ref (old_die, DW_AT_abstract_origin))
22722 {
22723 /* If we have a DW_AT_abstract_origin we have a working
22724 cached version. */
22725 subr_die = old_die;
22726 }
22727 else
22728 {
22729 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22730 add_abstract_origin_attribute (subr_die, origin);
22731 /* This is where the actual code for a cloned function is.
22732 Let's emit linkage name attribute for it. This helps
22733 debuggers to e.g, set breakpoints into
22734 constructors/destructors when the user asks "break
22735 K::K". */
22736 add_linkage_name (subr_die, decl);
22737 }
22738 }
22739 /* A cached copy, possibly from early dwarf generation. Reuse as
22740 much as possible. */
22741 else if (old_die)
22742 {
22743 if (!get_AT_flag (old_die, DW_AT_declaration)
22744 /* We can have a normal definition following an inline one in the
22745 case of redefinition of GNU C extern inlines.
22746 It seems reasonable to use AT_specification in this case. */
22747 && !get_AT (old_die, DW_AT_inline))
22748 {
22749 /* Detect and ignore this case, where we are trying to output
22750 something we have already output. */
22751 if (get_AT (old_die, DW_AT_low_pc)
22752 || get_AT (old_die, DW_AT_ranges))
22753 return;
22754
22755 /* If we have no location information, this must be a
22756 partially generated DIE from early dwarf generation.
22757 Fall through and generate it. */
22758 }
22759
22760 /* If the definition comes from the same place as the declaration,
22761 maybe use the old DIE. We always want the DIE for this function
22762 that has the *_pc attributes to be under comp_unit_die so the
22763 debugger can find it. We also need to do this for abstract
22764 instances of inlines, since the spec requires the out-of-line copy
22765 to have the same parent. For local class methods, this doesn't
22766 apply; we just use the old DIE. */
22767 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
22768 struct dwarf_file_data * file_index = lookup_filename (s.file);
22769 if ((is_cu_die (old_die->die_parent)
22770 /* This condition fixes the inconsistency/ICE with the
22771 following Fortran test (or some derivative thereof) while
22772 building libgfortran:
22773
22774 module some_m
22775 contains
22776 logical function funky (FLAG)
22777 funky = .true.
22778 end function
22779 end module
22780 */
22781 || (old_die->die_parent
22782 && old_die->die_parent->die_tag == DW_TAG_module)
22783 || context_die == NULL)
22784 && (DECL_ARTIFICIAL (decl)
22785 /* The location attributes may be in the abstract origin
22786 which in the case of LTO might be not available to
22787 look at. */
22788 || get_AT (old_die, DW_AT_abstract_origin)
22789 || (get_AT_file (old_die, DW_AT_decl_file) == file_index
22790 && (get_AT_unsigned (old_die, DW_AT_decl_line)
22791 == (unsigned) s.line)
22792 && (!debug_column_info
22793 || s.column == 0
22794 || (get_AT_unsigned (old_die, DW_AT_decl_column)
22795 == (unsigned) s.column)))))
22796 {
22797 subr_die = old_die;
22798
22799 /* Clear out the declaration attribute, but leave the
22800 parameters so they can be augmented with location
22801 information later. Unless this was a declaration, in
22802 which case, wipe out the nameless parameters and recreate
22803 them further down. */
22804 if (remove_AT (subr_die, DW_AT_declaration))
22805 {
22806
22807 remove_AT (subr_die, DW_AT_object_pointer);
22808 remove_child_TAG (subr_die, DW_TAG_formal_parameter);
22809 }
22810 }
22811 /* Make a specification pointing to the previously built
22812 declaration. */
22813 else
22814 {
22815 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22816 add_AT_specification (subr_die, old_die);
22817 add_pubname (decl, subr_die);
22818 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
22819 add_AT_file (subr_die, DW_AT_decl_file, file_index);
22820 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
22821 add_AT_unsigned (subr_die, DW_AT_decl_line, s.line);
22822 if (debug_column_info
22823 && s.column
22824 && (get_AT_unsigned (old_die, DW_AT_decl_column)
22825 != (unsigned) s.column))
22826 add_AT_unsigned (subr_die, DW_AT_decl_column, s.column);
22827
22828 /* If the prototype had an 'auto' or 'decltype(auto)' return type,
22829 emit the real type on the definition die. */
22830 if (is_cxx () && debug_info_level > DINFO_LEVEL_TERSE)
22831 {
22832 dw_die_ref die = get_AT_ref (old_die, DW_AT_type);
22833 if (die == auto_die || die == decltype_auto_die)
22834 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22835 TYPE_UNQUALIFIED, false, context_die);
22836 }
22837
22838 /* When we process the method declaration, we haven't seen
22839 the out-of-class defaulted definition yet, so we have to
22840 recheck now. */
22841 if ((dwarf_version >= 5 || ! dwarf_strict)
22842 && !get_AT (subr_die, DW_AT_defaulted))
22843 {
22844 int defaulted
22845 = lang_hooks.decls.decl_dwarf_attribute (decl,
22846 DW_AT_defaulted);
22847 if (defaulted != -1)
22848 {
22849 /* Other values must have been handled before. */
22850 gcc_assert (defaulted == DW_DEFAULTED_out_of_class);
22851 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22852 }
22853 }
22854 }
22855 }
22856 /* Create a fresh DIE for anything else. */
22857 else
22858 {
22859 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22860
22861 if (TREE_PUBLIC (decl))
22862 add_AT_flag (subr_die, DW_AT_external, 1);
22863
22864 add_name_and_src_coords_attributes (subr_die, decl);
22865 add_pubname (decl, subr_die);
22866 if (debug_info_level > DINFO_LEVEL_TERSE)
22867 {
22868 add_prototyped_attribute (subr_die, TREE_TYPE (decl));
22869 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22870 TYPE_UNQUALIFIED, false, context_die);
22871 }
22872
22873 add_pure_or_virtual_attribute (subr_die, decl);
22874 if (DECL_ARTIFICIAL (decl))
22875 add_AT_flag (subr_die, DW_AT_artificial, 1);
22876
22877 if (TREE_THIS_VOLATILE (decl) && (dwarf_version >= 5 || !dwarf_strict))
22878 add_AT_flag (subr_die, DW_AT_noreturn, 1);
22879
22880 add_alignment_attribute (subr_die, decl);
22881
22882 add_accessibility_attribute (subr_die, decl);
22883 }
22884
22885 /* Unless we have an existing non-declaration DIE, equate the new
22886 DIE. */
22887 if (!old_die || is_declaration_die (old_die))
22888 equate_decl_number_to_die (decl, subr_die);
22889
22890 if (declaration)
22891 {
22892 if (!old_die || !get_AT (old_die, DW_AT_inline))
22893 {
22894 add_AT_flag (subr_die, DW_AT_declaration, 1);
22895
22896 /* If this is an explicit function declaration then generate
22897 a DW_AT_explicit attribute. */
22898 if ((dwarf_version >= 3 || !dwarf_strict)
22899 && lang_hooks.decls.decl_dwarf_attribute (decl,
22900 DW_AT_explicit) == 1)
22901 add_AT_flag (subr_die, DW_AT_explicit, 1);
22902
22903 /* If this is a C++11 deleted special function member then generate
22904 a DW_AT_deleted attribute. */
22905 if ((dwarf_version >= 5 || !dwarf_strict)
22906 && lang_hooks.decls.decl_dwarf_attribute (decl,
22907 DW_AT_deleted) == 1)
22908 add_AT_flag (subr_die, DW_AT_deleted, 1);
22909
22910 /* If this is a C++11 defaulted special function member then
22911 generate a DW_AT_defaulted attribute. */
22912 if (dwarf_version >= 5 || !dwarf_strict)
22913 {
22914 int defaulted
22915 = lang_hooks.decls.decl_dwarf_attribute (decl,
22916 DW_AT_defaulted);
22917 if (defaulted != -1)
22918 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22919 }
22920
22921 /* If this is a C++11 non-static member function with & ref-qualifier
22922 then generate a DW_AT_reference attribute. */
22923 if ((dwarf_version >= 5 || !dwarf_strict)
22924 && lang_hooks.decls.decl_dwarf_attribute (decl,
22925 DW_AT_reference) == 1)
22926 add_AT_flag (subr_die, DW_AT_reference, 1);
22927
22928 /* If this is a C++11 non-static member function with &&
22929 ref-qualifier then generate a DW_AT_reference attribute. */
22930 if ((dwarf_version >= 5 || !dwarf_strict)
22931 && lang_hooks.decls.decl_dwarf_attribute (decl,
22932 DW_AT_rvalue_reference)
22933 == 1)
22934 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
22935 }
22936 }
22937 /* For non DECL_EXTERNALs, if range information is available, fill
22938 the DIE with it. */
22939 else if (!DECL_EXTERNAL (decl) && !early_dwarf)
22940 {
22941 HOST_WIDE_INT cfa_fb_offset;
22942
22943 struct function *fun = DECL_STRUCT_FUNCTION (decl);
22944
22945 if (!crtl->has_bb_partition)
22946 {
22947 dw_fde_ref fde = fun->fde;
22948 if (fde->dw_fde_begin)
22949 {
22950 /* We have already generated the labels. */
22951 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
22952 fde->dw_fde_end, false);
22953 }
22954 else
22955 {
22956 /* Create start/end labels and add the range. */
22957 char label_id_low[MAX_ARTIFICIAL_LABEL_BYTES];
22958 char label_id_high[MAX_ARTIFICIAL_LABEL_BYTES];
22959 ASM_GENERATE_INTERNAL_LABEL (label_id_low, FUNC_BEGIN_LABEL,
22960 current_function_funcdef_no);
22961 ASM_GENERATE_INTERNAL_LABEL (label_id_high, FUNC_END_LABEL,
22962 current_function_funcdef_no);
22963 add_AT_low_high_pc (subr_die, label_id_low, label_id_high,
22964 false);
22965 }
22966
22967 #if VMS_DEBUGGING_INFO
22968 /* HP OpenVMS Industry Standard 64: DWARF Extensions
22969 Section 2.3 Prologue and Epilogue Attributes:
22970 When a breakpoint is set on entry to a function, it is generally
22971 desirable for execution to be suspended, not on the very first
22972 instruction of the function, but rather at a point after the
22973 function's frame has been set up, after any language defined local
22974 declaration processing has been completed, and before execution of
22975 the first statement of the function begins. Debuggers generally
22976 cannot properly determine where this point is. Similarly for a
22977 breakpoint set on exit from a function. The prologue and epilogue
22978 attributes allow a compiler to communicate the location(s) to use. */
22979
22980 {
22981 if (fde->dw_fde_vms_end_prologue)
22982 add_AT_vms_delta (subr_die, DW_AT_HP_prologue,
22983 fde->dw_fde_begin, fde->dw_fde_vms_end_prologue);
22984
22985 if (fde->dw_fde_vms_begin_epilogue)
22986 add_AT_vms_delta (subr_die, DW_AT_HP_epilogue,
22987 fde->dw_fde_begin, fde->dw_fde_vms_begin_epilogue);
22988 }
22989 #endif
22990
22991 }
22992 else
22993 {
22994 /* Generate pubnames entries for the split function code ranges. */
22995 dw_fde_ref fde = fun->fde;
22996
22997 if (fde->dw_fde_second_begin)
22998 {
22999 if (dwarf_version >= 3 || !dwarf_strict)
23000 {
23001 /* We should use ranges for non-contiguous code section
23002 addresses. Use the actual code range for the initial
23003 section, since the HOT/COLD labels might precede an
23004 alignment offset. */
23005 bool range_list_added = false;
23006 add_ranges_by_labels (subr_die, fde->dw_fde_begin,
23007 fde->dw_fde_end, &range_list_added,
23008 false);
23009 add_ranges_by_labels (subr_die, fde->dw_fde_second_begin,
23010 fde->dw_fde_second_end,
23011 &range_list_added, false);
23012 if (range_list_added)
23013 add_ranges (NULL);
23014 }
23015 else
23016 {
23017 /* There is no real support in DW2 for this .. so we make
23018 a work-around. First, emit the pub name for the segment
23019 containing the function label. Then make and emit a
23020 simplified subprogram DIE for the second segment with the
23021 name pre-fixed by __hot/cold_sect_of_. We use the same
23022 linkage name for the second die so that gdb will find both
23023 sections when given "b foo". */
23024 const char *name = NULL;
23025 tree decl_name = DECL_NAME (decl);
23026 dw_die_ref seg_die;
23027
23028 /* Do the 'primary' section. */
23029 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
23030 fde->dw_fde_end, false);
23031
23032 /* Build a minimal DIE for the secondary section. */
23033 seg_die = new_die (DW_TAG_subprogram,
23034 subr_die->die_parent, decl);
23035
23036 if (TREE_PUBLIC (decl))
23037 add_AT_flag (seg_die, DW_AT_external, 1);
23038
23039 if (decl_name != NULL
23040 && IDENTIFIER_POINTER (decl_name) != NULL)
23041 {
23042 name = dwarf2_name (decl, 1);
23043 if (! DECL_ARTIFICIAL (decl))
23044 add_src_coords_attributes (seg_die, decl);
23045
23046 add_linkage_name (seg_die, decl);
23047 }
23048 gcc_assert (name != NULL);
23049 add_pure_or_virtual_attribute (seg_die, decl);
23050 if (DECL_ARTIFICIAL (decl))
23051 add_AT_flag (seg_die, DW_AT_artificial, 1);
23052
23053 name = concat ("__second_sect_of_", name, NULL);
23054 add_AT_low_high_pc (seg_die, fde->dw_fde_second_begin,
23055 fde->dw_fde_second_end, false);
23056 add_name_attribute (seg_die, name);
23057 if (want_pubnames ())
23058 add_pubname_string (name, seg_die);
23059 }
23060 }
23061 else
23062 add_AT_low_high_pc (subr_die, fde->dw_fde_begin, fde->dw_fde_end,
23063 false);
23064 }
23065
23066 cfa_fb_offset = CFA_FRAME_BASE_OFFSET (decl);
23067
23068 /* We define the "frame base" as the function's CFA. This is more
23069 convenient for several reasons: (1) It's stable across the prologue
23070 and epilogue, which makes it better than just a frame pointer,
23071 (2) With dwarf3, there exists a one-byte encoding that allows us
23072 to reference the .debug_frame data by proxy, but failing that,
23073 (3) We can at least reuse the code inspection and interpretation
23074 code that determines the CFA position at various points in the
23075 function. */
23076 if (dwarf_version >= 3 && targetm.debug_unwind_info () == UI_DWARF2)
23077 {
23078 dw_loc_descr_ref op = new_loc_descr (DW_OP_call_frame_cfa, 0, 0);
23079 add_AT_loc (subr_die, DW_AT_frame_base, op);
23080 }
23081 else
23082 {
23083 dw_loc_list_ref list = convert_cfa_to_fb_loc_list (cfa_fb_offset);
23084 if (list->dw_loc_next)
23085 add_AT_loc_list (subr_die, DW_AT_frame_base, list);
23086 else
23087 add_AT_loc (subr_die, DW_AT_frame_base, list->expr);
23088 }
23089
23090 /* Compute a displacement from the "steady-state frame pointer" to
23091 the CFA. The former is what all stack slots and argument slots
23092 will reference in the rtl; the latter is what we've told the
23093 debugger about. We'll need to adjust all frame_base references
23094 by this displacement. */
23095 compute_frame_pointer_to_fb_displacement (cfa_fb_offset);
23096
23097 if (fun->static_chain_decl)
23098 {
23099 /* DWARF requires here a location expression that computes the
23100 address of the enclosing subprogram's frame base. The machinery
23101 in tree-nested.c is supposed to store this specific address in the
23102 last field of the FRAME record. */
23103 const tree frame_type
23104 = TREE_TYPE (TREE_TYPE (fun->static_chain_decl));
23105 const tree fb_decl = tree_last (TYPE_FIELDS (frame_type));
23106
23107 tree fb_expr
23108 = build1 (INDIRECT_REF, frame_type, fun->static_chain_decl);
23109 fb_expr = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
23110 fb_expr, fb_decl, NULL_TREE);
23111
23112 add_AT_location_description (subr_die, DW_AT_static_link,
23113 loc_list_from_tree (fb_expr, 0, NULL));
23114 }
23115
23116 resolve_variable_values ();
23117 }
23118
23119 /* Generate child dies for template paramaters. */
23120 if (early_dwarf && debug_info_level > DINFO_LEVEL_TERSE)
23121 gen_generic_params_dies (decl);
23122
23123 /* Now output descriptions of the arguments for this function. This gets
23124 (unnecessarily?) complex because of the fact that the DECL_ARGUMENT list
23125 for a FUNCTION_DECL doesn't indicate cases where there was a trailing
23126 `...' at the end of the formal parameter list. In order to find out if
23127 there was a trailing ellipsis or not, we must instead look at the type
23128 associated with the FUNCTION_DECL. This will be a node of type
23129 FUNCTION_TYPE. If the chain of type nodes hanging off of this
23130 FUNCTION_TYPE node ends with a void_type_node then there should *not* be
23131 an ellipsis at the end. */
23132
23133 /* In the case where we are describing a mere function declaration, all we
23134 need to do here (and all we *can* do here) is to describe the *types* of
23135 its formal parameters. */
23136 if (debug_info_level <= DINFO_LEVEL_TERSE)
23137 ;
23138 else if (declaration)
23139 gen_formal_types_die (decl, subr_die);
23140 else
23141 {
23142 /* Generate DIEs to represent all known formal parameters. */
23143 tree parm = DECL_ARGUMENTS (decl);
23144 tree generic_decl = early_dwarf
23145 ? lang_hooks.decls.get_generic_function_decl (decl) : NULL;
23146 tree generic_decl_parm = generic_decl
23147 ? DECL_ARGUMENTS (generic_decl)
23148 : NULL;
23149
23150 /* Now we want to walk the list of parameters of the function and
23151 emit their relevant DIEs.
23152
23153 We consider the case of DECL being an instance of a generic function
23154 as well as it being a normal function.
23155
23156 If DECL is an instance of a generic function we walk the
23157 parameters of the generic function declaration _and_ the parameters of
23158 DECL itself. This is useful because we want to emit specific DIEs for
23159 function parameter packs and those are declared as part of the
23160 generic function declaration. In that particular case,
23161 the parameter pack yields a DW_TAG_GNU_formal_parameter_pack DIE.
23162 That DIE has children DIEs representing the set of arguments
23163 of the pack. Note that the set of pack arguments can be empty.
23164 In that case, the DW_TAG_GNU_formal_parameter_pack DIE will not have any
23165 children DIE.
23166
23167 Otherwise, we just consider the parameters of DECL. */
23168 while (generic_decl_parm || parm)
23169 {
23170 if (generic_decl_parm
23171 && lang_hooks.function_parameter_pack_p (generic_decl_parm))
23172 gen_formal_parameter_pack_die (generic_decl_parm,
23173 parm, subr_die,
23174 &parm);
23175 else if (parm && !POINTER_BOUNDS_P (parm))
23176 {
23177 dw_die_ref parm_die = gen_decl_die (parm, NULL, NULL, subr_die);
23178
23179 if (early_dwarf
23180 && parm == DECL_ARGUMENTS (decl)
23181 && TREE_CODE (TREE_TYPE (decl)) == METHOD_TYPE
23182 && parm_die
23183 && (dwarf_version >= 3 || !dwarf_strict))
23184 add_AT_die_ref (subr_die, DW_AT_object_pointer, parm_die);
23185
23186 parm = DECL_CHAIN (parm);
23187 }
23188 else if (parm)
23189 parm = DECL_CHAIN (parm);
23190
23191 if (generic_decl_parm)
23192 generic_decl_parm = DECL_CHAIN (generic_decl_parm);
23193 }
23194
23195 /* Decide whether we need an unspecified_parameters DIE at the end.
23196 There are 2 more cases to do this for: 1) the ansi ... declaration -
23197 this is detectable when the end of the arg list is not a
23198 void_type_node 2) an unprototyped function declaration (not a
23199 definition). This just means that we have no info about the
23200 parameters at all. */
23201 if (early_dwarf)
23202 {
23203 if (prototype_p (TREE_TYPE (decl)))
23204 {
23205 /* This is the prototyped case, check for.... */
23206 if (stdarg_p (TREE_TYPE (decl)))
23207 gen_unspecified_parameters_die (decl, subr_die);
23208 }
23209 else if (DECL_INITIAL (decl) == NULL_TREE)
23210 gen_unspecified_parameters_die (decl, subr_die);
23211 }
23212 }
23213
23214 if (subr_die != old_die)
23215 /* Add the calling convention attribute if requested. */
23216 add_calling_convention_attribute (subr_die, decl);
23217
23218 /* Output Dwarf info for all of the stuff within the body of the function
23219 (if it has one - it may be just a declaration).
23220
23221 OUTER_SCOPE is a pointer to the outermost BLOCK node created to represent
23222 a function. This BLOCK actually represents the outermost binding contour
23223 for the function, i.e. the contour in which the function's formal
23224 parameters and labels get declared. Curiously, it appears that the front
23225 end doesn't actually put the PARM_DECL nodes for the current function onto
23226 the BLOCK_VARS list for this outer scope, but are strung off of the
23227 DECL_ARGUMENTS list for the function instead.
23228
23229 The BLOCK_VARS list for the `outer_scope' does provide us with a list of
23230 the LABEL_DECL nodes for the function however, and we output DWARF info
23231 for those in decls_for_scope. Just within the `outer_scope' there will be
23232 a BLOCK node representing the function's outermost pair of curly braces,
23233 and any blocks used for the base and member initializers of a C++
23234 constructor function. */
23235 tree outer_scope = DECL_INITIAL (decl);
23236 if (! declaration && outer_scope && TREE_CODE (outer_scope) != ERROR_MARK)
23237 {
23238 int call_site_note_count = 0;
23239 int tail_call_site_note_count = 0;
23240
23241 /* Emit a DW_TAG_variable DIE for a named return value. */
23242 if (DECL_NAME (DECL_RESULT (decl)))
23243 gen_decl_die (DECL_RESULT (decl), NULL, NULL, subr_die);
23244
23245 /* The first time through decls_for_scope we will generate the
23246 DIEs for the locals. The second time, we fill in the
23247 location info. */
23248 decls_for_scope (outer_scope, subr_die);
23249
23250 if (call_arg_locations && (!dwarf_strict || dwarf_version >= 5))
23251 {
23252 struct call_arg_loc_node *ca_loc;
23253 for (ca_loc = call_arg_locations; ca_loc; ca_loc = ca_loc->next)
23254 {
23255 dw_die_ref die = NULL;
23256 rtx tloc = NULL_RTX, tlocc = NULL_RTX;
23257 rtx arg, next_arg;
23258
23259 for (arg = (ca_loc->call_arg_loc_note != NULL_RTX
23260 ? XEXP (ca_loc->call_arg_loc_note, 0)
23261 : NULL_RTX);
23262 arg; arg = next_arg)
23263 {
23264 dw_loc_descr_ref reg, val;
23265 machine_mode mode = GET_MODE (XEXP (XEXP (arg, 0), 1));
23266 dw_die_ref cdie, tdie = NULL;
23267
23268 next_arg = XEXP (arg, 1);
23269 if (REG_P (XEXP (XEXP (arg, 0), 0))
23270 && next_arg
23271 && MEM_P (XEXP (XEXP (next_arg, 0), 0))
23272 && REG_P (XEXP (XEXP (XEXP (next_arg, 0), 0), 0))
23273 && REGNO (XEXP (XEXP (arg, 0), 0))
23274 == REGNO (XEXP (XEXP (XEXP (next_arg, 0), 0), 0)))
23275 next_arg = XEXP (next_arg, 1);
23276 if (mode == VOIDmode)
23277 {
23278 mode = GET_MODE (XEXP (XEXP (arg, 0), 0));
23279 if (mode == VOIDmode)
23280 mode = GET_MODE (XEXP (arg, 0));
23281 }
23282 if (mode == VOIDmode || mode == BLKmode)
23283 continue;
23284 /* Get dynamic information about call target only if we
23285 have no static information: we cannot generate both
23286 DW_AT_call_origin and DW_AT_call_target
23287 attributes. */
23288 if (ca_loc->symbol_ref == NULL_RTX)
23289 {
23290 if (XEXP (XEXP (arg, 0), 0) == pc_rtx)
23291 {
23292 tloc = XEXP (XEXP (arg, 0), 1);
23293 continue;
23294 }
23295 else if (GET_CODE (XEXP (XEXP (arg, 0), 0)) == CLOBBER
23296 && XEXP (XEXP (XEXP (arg, 0), 0), 0) == pc_rtx)
23297 {
23298 tlocc = XEXP (XEXP (arg, 0), 1);
23299 continue;
23300 }
23301 }
23302 reg = NULL;
23303 if (REG_P (XEXP (XEXP (arg, 0), 0)))
23304 reg = reg_loc_descriptor (XEXP (XEXP (arg, 0), 0),
23305 VAR_INIT_STATUS_INITIALIZED);
23306 else if (MEM_P (XEXP (XEXP (arg, 0), 0)))
23307 {
23308 rtx mem = XEXP (XEXP (arg, 0), 0);
23309 reg = mem_loc_descriptor (XEXP (mem, 0),
23310 get_address_mode (mem),
23311 GET_MODE (mem),
23312 VAR_INIT_STATUS_INITIALIZED);
23313 }
23314 else if (GET_CODE (XEXP (XEXP (arg, 0), 0))
23315 == DEBUG_PARAMETER_REF)
23316 {
23317 tree tdecl
23318 = DEBUG_PARAMETER_REF_DECL (XEXP (XEXP (arg, 0), 0));
23319 tdie = lookup_decl_die (tdecl);
23320 if (tdie == NULL)
23321 continue;
23322 }
23323 else
23324 continue;
23325 if (reg == NULL
23326 && GET_CODE (XEXP (XEXP (arg, 0), 0))
23327 != DEBUG_PARAMETER_REF)
23328 continue;
23329 val = mem_loc_descriptor (XEXP (XEXP (arg, 0), 1), mode,
23330 VOIDmode,
23331 VAR_INIT_STATUS_INITIALIZED);
23332 if (val == NULL)
23333 continue;
23334 if (die == NULL)
23335 die = gen_call_site_die (decl, subr_die, ca_loc);
23336 cdie = new_die (dwarf_TAG (DW_TAG_call_site_parameter), die,
23337 NULL_TREE);
23338 if (reg != NULL)
23339 add_AT_loc (cdie, DW_AT_location, reg);
23340 else if (tdie != NULL)
23341 add_AT_die_ref (cdie, dwarf_AT (DW_AT_call_parameter),
23342 tdie);
23343 add_AT_loc (cdie, dwarf_AT (DW_AT_call_value), val);
23344 if (next_arg != XEXP (arg, 1))
23345 {
23346 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 1));
23347 if (mode == VOIDmode)
23348 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 0));
23349 val = mem_loc_descriptor (XEXP (XEXP (XEXP (arg, 1),
23350 0), 1),
23351 mode, VOIDmode,
23352 VAR_INIT_STATUS_INITIALIZED);
23353 if (val != NULL)
23354 add_AT_loc (cdie, dwarf_AT (DW_AT_call_data_value),
23355 val);
23356 }
23357 }
23358 if (die == NULL
23359 && (ca_loc->symbol_ref || tloc))
23360 die = gen_call_site_die (decl, subr_die, ca_loc);
23361 if (die != NULL && (tloc != NULL_RTX || tlocc != NULL_RTX))
23362 {
23363 dw_loc_descr_ref tval = NULL;
23364
23365 if (tloc != NULL_RTX)
23366 tval = mem_loc_descriptor (tloc,
23367 GET_MODE (tloc) == VOIDmode
23368 ? Pmode : GET_MODE (tloc),
23369 VOIDmode,
23370 VAR_INIT_STATUS_INITIALIZED);
23371 if (tval)
23372 add_AT_loc (die, dwarf_AT (DW_AT_call_target), tval);
23373 else if (tlocc != NULL_RTX)
23374 {
23375 tval = mem_loc_descriptor (tlocc,
23376 GET_MODE (tlocc) == VOIDmode
23377 ? Pmode : GET_MODE (tlocc),
23378 VOIDmode,
23379 VAR_INIT_STATUS_INITIALIZED);
23380 if (tval)
23381 add_AT_loc (die,
23382 dwarf_AT (DW_AT_call_target_clobbered),
23383 tval);
23384 }
23385 }
23386 if (die != NULL)
23387 {
23388 call_site_note_count++;
23389 if (ca_loc->tail_call_p)
23390 tail_call_site_note_count++;
23391 }
23392 }
23393 }
23394 call_arg_locations = NULL;
23395 call_arg_loc_last = NULL;
23396 if (tail_call_site_count >= 0
23397 && tail_call_site_count == tail_call_site_note_count
23398 && (!dwarf_strict || dwarf_version >= 5))
23399 {
23400 if (call_site_count >= 0
23401 && call_site_count == call_site_note_count)
23402 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_calls), 1);
23403 else
23404 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_tail_calls), 1);
23405 }
23406 call_site_count = -1;
23407 tail_call_site_count = -1;
23408 }
23409
23410 /* Mark used types after we have created DIEs for the functions scopes. */
23411 premark_used_types (DECL_STRUCT_FUNCTION (decl));
23412 }
23413
23414 /* Returns a hash value for X (which really is a die_struct). */
23415
23416 hashval_t
23417 block_die_hasher::hash (die_struct *d)
23418 {
23419 return (hashval_t) d->decl_id ^ htab_hash_pointer (d->die_parent);
23420 }
23421
23422 /* Return nonzero if decl_id and die_parent of die_struct X is the same
23423 as decl_id and die_parent of die_struct Y. */
23424
23425 bool
23426 block_die_hasher::equal (die_struct *x, die_struct *y)
23427 {
23428 return x->decl_id == y->decl_id && x->die_parent == y->die_parent;
23429 }
23430
23431 /* Hold information about markers for inlined entry points. */
23432 struct GTY ((for_user)) inline_entry_data
23433 {
23434 /* The block that's the inlined_function_outer_scope for an inlined
23435 function. */
23436 tree block;
23437
23438 /* The label at the inlined entry point. */
23439 const char *label_pfx;
23440 unsigned int label_num;
23441
23442 /* The view number to be used as the inlined entry point. */
23443 var_loc_view view;
23444 };
23445
23446 struct inline_entry_data_hasher : ggc_ptr_hash <inline_entry_data>
23447 {
23448 typedef tree compare_type;
23449 static inline hashval_t hash (const inline_entry_data *);
23450 static inline bool equal (const inline_entry_data *, const_tree);
23451 };
23452
23453 /* Hash table routines for inline_entry_data. */
23454
23455 inline hashval_t
23456 inline_entry_data_hasher::hash (const inline_entry_data *data)
23457 {
23458 return htab_hash_pointer (data->block);
23459 }
23460
23461 inline bool
23462 inline_entry_data_hasher::equal (const inline_entry_data *data,
23463 const_tree block)
23464 {
23465 return data->block == block;
23466 }
23467
23468 /* Inlined entry points pending DIE creation in this compilation unit. */
23469
23470 static GTY(()) hash_table<inline_entry_data_hasher> *inline_entry_data_table;
23471
23472
23473 /* Return TRUE if DECL, which may have been previously generated as
23474 OLD_DIE, is a candidate for a DW_AT_specification. DECLARATION is
23475 true if decl (or its origin) is either an extern declaration or a
23476 class/namespace scoped declaration.
23477
23478 The declare_in_namespace support causes us to get two DIEs for one
23479 variable, both of which are declarations. We want to avoid
23480 considering one to be a specification, so we must test for
23481 DECLARATION and DW_AT_declaration. */
23482 static inline bool
23483 decl_will_get_specification_p (dw_die_ref old_die, tree decl, bool declaration)
23484 {
23485 return (old_die && TREE_STATIC (decl) && !declaration
23486 && get_AT_flag (old_die, DW_AT_declaration) == 1);
23487 }
23488
23489 /* Return true if DECL is a local static. */
23490
23491 static inline bool
23492 local_function_static (tree decl)
23493 {
23494 gcc_assert (VAR_P (decl));
23495 return TREE_STATIC (decl)
23496 && DECL_CONTEXT (decl)
23497 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL;
23498 }
23499
23500 /* Generate a DIE to represent a declared data object.
23501 Either DECL or ORIGIN must be non-null. */
23502
23503 static void
23504 gen_variable_die (tree decl, tree origin, dw_die_ref context_die)
23505 {
23506 HOST_WIDE_INT off = 0;
23507 tree com_decl;
23508 tree decl_or_origin = decl ? decl : origin;
23509 tree ultimate_origin;
23510 dw_die_ref var_die;
23511 dw_die_ref old_die = decl ? lookup_decl_die (decl) : NULL;
23512 bool declaration = (DECL_EXTERNAL (decl_or_origin)
23513 || class_or_namespace_scope_p (context_die));
23514 bool specialization_p = false;
23515 bool no_linkage_name = false;
23516
23517 /* While C++ inline static data members have definitions inside of the
23518 class, force the first DIE to be a declaration, then let gen_member_die
23519 reparent it to the class context and call gen_variable_die again
23520 to create the outside of the class DIE for the definition. */
23521 if (!declaration
23522 && old_die == NULL
23523 && decl
23524 && DECL_CONTEXT (decl)
23525 && TYPE_P (DECL_CONTEXT (decl))
23526 && lang_hooks.decls.decl_dwarf_attribute (decl, DW_AT_inline) != -1)
23527 {
23528 declaration = true;
23529 if (dwarf_version < 5)
23530 no_linkage_name = true;
23531 }
23532
23533 ultimate_origin = decl_ultimate_origin (decl_or_origin);
23534 if (decl || ultimate_origin)
23535 origin = ultimate_origin;
23536 com_decl = fortran_common (decl_or_origin, &off);
23537
23538 /* Symbol in common gets emitted as a child of the common block, in the form
23539 of a data member. */
23540 if (com_decl)
23541 {
23542 dw_die_ref com_die;
23543 dw_loc_list_ref loc = NULL;
23544 die_node com_die_arg;
23545
23546 var_die = lookup_decl_die (decl_or_origin);
23547 if (var_die)
23548 {
23549 if (! early_dwarf && get_AT (var_die, DW_AT_location) == NULL)
23550 {
23551 loc = loc_list_from_tree (com_decl, off ? 1 : 2, NULL);
23552 if (loc)
23553 {
23554 if (off)
23555 {
23556 /* Optimize the common case. */
23557 if (single_element_loc_list_p (loc)
23558 && loc->expr->dw_loc_opc == DW_OP_addr
23559 && loc->expr->dw_loc_next == NULL
23560 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr)
23561 == SYMBOL_REF)
23562 {
23563 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23564 loc->expr->dw_loc_oprnd1.v.val_addr
23565 = plus_constant (GET_MODE (x), x , off);
23566 }
23567 else
23568 loc_list_plus_const (loc, off);
23569 }
23570 add_AT_location_description (var_die, DW_AT_location, loc);
23571 remove_AT (var_die, DW_AT_declaration);
23572 }
23573 }
23574 return;
23575 }
23576
23577 if (common_block_die_table == NULL)
23578 common_block_die_table = hash_table<block_die_hasher>::create_ggc (10);
23579
23580 com_die_arg.decl_id = DECL_UID (com_decl);
23581 com_die_arg.die_parent = context_die;
23582 com_die = common_block_die_table->find (&com_die_arg);
23583 if (! early_dwarf)
23584 loc = loc_list_from_tree (com_decl, 2, NULL);
23585 if (com_die == NULL)
23586 {
23587 const char *cnam
23588 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (com_decl));
23589 die_node **slot;
23590
23591 com_die = new_die (DW_TAG_common_block, context_die, decl);
23592 add_name_and_src_coords_attributes (com_die, com_decl);
23593 if (loc)
23594 {
23595 add_AT_location_description (com_die, DW_AT_location, loc);
23596 /* Avoid sharing the same loc descriptor between
23597 DW_TAG_common_block and DW_TAG_variable. */
23598 loc = loc_list_from_tree (com_decl, 2, NULL);
23599 }
23600 else if (DECL_EXTERNAL (decl_or_origin))
23601 add_AT_flag (com_die, DW_AT_declaration, 1);
23602 if (want_pubnames ())
23603 add_pubname_string (cnam, com_die); /* ??? needed? */
23604 com_die->decl_id = DECL_UID (com_decl);
23605 slot = common_block_die_table->find_slot (com_die, INSERT);
23606 *slot = com_die;
23607 }
23608 else if (get_AT (com_die, DW_AT_location) == NULL && loc)
23609 {
23610 add_AT_location_description (com_die, DW_AT_location, loc);
23611 loc = loc_list_from_tree (com_decl, 2, NULL);
23612 remove_AT (com_die, DW_AT_declaration);
23613 }
23614 var_die = new_die (DW_TAG_variable, com_die, decl);
23615 add_name_and_src_coords_attributes (var_die, decl_or_origin);
23616 add_type_attribute (var_die, TREE_TYPE (decl_or_origin),
23617 decl_quals (decl_or_origin), false,
23618 context_die);
23619 add_alignment_attribute (var_die, decl);
23620 add_AT_flag (var_die, DW_AT_external, 1);
23621 if (loc)
23622 {
23623 if (off)
23624 {
23625 /* Optimize the common case. */
23626 if (single_element_loc_list_p (loc)
23627 && loc->expr->dw_loc_opc == DW_OP_addr
23628 && loc->expr->dw_loc_next == NULL
23629 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF)
23630 {
23631 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23632 loc->expr->dw_loc_oprnd1.v.val_addr
23633 = plus_constant (GET_MODE (x), x, off);
23634 }
23635 else
23636 loc_list_plus_const (loc, off);
23637 }
23638 add_AT_location_description (var_die, DW_AT_location, loc);
23639 }
23640 else if (DECL_EXTERNAL (decl_or_origin))
23641 add_AT_flag (var_die, DW_AT_declaration, 1);
23642 if (decl)
23643 equate_decl_number_to_die (decl, var_die);
23644 return;
23645 }
23646
23647 if (old_die)
23648 {
23649 if (declaration)
23650 {
23651 /* A declaration that has been previously dumped, needs no
23652 further annotations, since it doesn't need location on
23653 the second pass. */
23654 return;
23655 }
23656 else if (decl_will_get_specification_p (old_die, decl, declaration)
23657 && !get_AT (old_die, DW_AT_specification))
23658 {
23659 /* Fall-thru so we can make a new variable die along with a
23660 DW_AT_specification. */
23661 }
23662 else if (origin && old_die->die_parent != context_die)
23663 {
23664 /* If we will be creating an inlined instance, we need a
23665 new DIE that will get annotated with
23666 DW_AT_abstract_origin. */
23667 gcc_assert (!DECL_ABSTRACT_P (decl));
23668 }
23669 else
23670 {
23671 /* If a DIE was dumped early, it still needs location info.
23672 Skip to where we fill the location bits. */
23673 var_die = old_die;
23674
23675 /* ??? In LTRANS we cannot annotate early created variably
23676 modified type DIEs without copying them and adjusting all
23677 references to them. Thus we dumped them again. Also add a
23678 reference to them but beware of -g0 compile and -g link
23679 in which case the reference will be already present. */
23680 tree type = TREE_TYPE (decl_or_origin);
23681 if (in_lto_p
23682 && ! get_AT (var_die, DW_AT_type)
23683 && variably_modified_type_p
23684 (type, decl_function_context (decl_or_origin)))
23685 {
23686 if (decl_by_reference_p (decl_or_origin))
23687 add_type_attribute (var_die, TREE_TYPE (type),
23688 TYPE_UNQUALIFIED, false, context_die);
23689 else
23690 add_type_attribute (var_die, type, decl_quals (decl_or_origin),
23691 false, context_die);
23692 }
23693
23694 goto gen_variable_die_location;
23695 }
23696 }
23697
23698 /* For static data members, the declaration in the class is supposed
23699 to have DW_TAG_member tag in DWARF{3,4} and we emit it for compatibility
23700 also in DWARF2; the specification should still be DW_TAG_variable
23701 referencing the DW_TAG_member DIE. */
23702 if (declaration && class_scope_p (context_die) && dwarf_version < 5)
23703 var_die = new_die (DW_TAG_member, context_die, decl);
23704 else
23705 var_die = new_die (DW_TAG_variable, context_die, decl);
23706
23707 if (origin != NULL)
23708 add_abstract_origin_attribute (var_die, origin);
23709
23710 /* Loop unrolling can create multiple blocks that refer to the same
23711 static variable, so we must test for the DW_AT_declaration flag.
23712
23713 ??? Loop unrolling/reorder_blocks should perhaps be rewritten to
23714 copy decls and set the DECL_ABSTRACT_P flag on them instead of
23715 sharing them.
23716
23717 ??? Duplicated blocks have been rewritten to use .debug_ranges. */
23718 else if (decl_will_get_specification_p (old_die, decl, declaration))
23719 {
23720 /* This is a definition of a C++ class level static. */
23721 add_AT_specification (var_die, old_die);
23722 specialization_p = true;
23723 if (DECL_NAME (decl))
23724 {
23725 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
23726 struct dwarf_file_data * file_index = lookup_filename (s.file);
23727
23728 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
23729 add_AT_file (var_die, DW_AT_decl_file, file_index);
23730
23731 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
23732 add_AT_unsigned (var_die, DW_AT_decl_line, s.line);
23733
23734 if (debug_column_info
23735 && s.column
23736 && (get_AT_unsigned (old_die, DW_AT_decl_column)
23737 != (unsigned) s.column))
23738 add_AT_unsigned (var_die, DW_AT_decl_column, s.column);
23739
23740 if (old_die->die_tag == DW_TAG_member)
23741 add_linkage_name (var_die, decl);
23742 }
23743 }
23744 else
23745 add_name_and_src_coords_attributes (var_die, decl, no_linkage_name);
23746
23747 if ((origin == NULL && !specialization_p)
23748 || (origin != NULL
23749 && !DECL_ABSTRACT_P (decl_or_origin)
23750 && variably_modified_type_p (TREE_TYPE (decl_or_origin),
23751 decl_function_context
23752 (decl_or_origin))))
23753 {
23754 tree type = TREE_TYPE (decl_or_origin);
23755
23756 if (decl_by_reference_p (decl_or_origin))
23757 add_type_attribute (var_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
23758 context_die);
23759 else
23760 add_type_attribute (var_die, type, decl_quals (decl_or_origin), false,
23761 context_die);
23762 }
23763
23764 if (origin == NULL && !specialization_p)
23765 {
23766 if (TREE_PUBLIC (decl))
23767 add_AT_flag (var_die, DW_AT_external, 1);
23768
23769 if (DECL_ARTIFICIAL (decl))
23770 add_AT_flag (var_die, DW_AT_artificial, 1);
23771
23772 add_alignment_attribute (var_die, decl);
23773
23774 add_accessibility_attribute (var_die, decl);
23775 }
23776
23777 if (declaration)
23778 add_AT_flag (var_die, DW_AT_declaration, 1);
23779
23780 if (decl && (DECL_ABSTRACT_P (decl)
23781 || !old_die || is_declaration_die (old_die)))
23782 equate_decl_number_to_die (decl, var_die);
23783
23784 gen_variable_die_location:
23785 if (! declaration
23786 && (! DECL_ABSTRACT_P (decl_or_origin)
23787 /* Local static vars are shared between all clones/inlines,
23788 so emit DW_AT_location on the abstract DIE if DECL_RTL is
23789 already set. */
23790 || (VAR_P (decl_or_origin)
23791 && TREE_STATIC (decl_or_origin)
23792 && DECL_RTL_SET_P (decl_or_origin))))
23793 {
23794 if (early_dwarf)
23795 add_pubname (decl_or_origin, var_die);
23796 else
23797 add_location_or_const_value_attribute (var_die, decl_or_origin,
23798 decl == NULL);
23799 }
23800 else
23801 tree_add_const_value_attribute_for_decl (var_die, decl_or_origin);
23802
23803 if ((dwarf_version >= 4 || !dwarf_strict)
23804 && lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23805 DW_AT_const_expr) == 1
23806 && !get_AT (var_die, DW_AT_const_expr)
23807 && !specialization_p)
23808 add_AT_flag (var_die, DW_AT_const_expr, 1);
23809
23810 if (!dwarf_strict)
23811 {
23812 int inl = lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23813 DW_AT_inline);
23814 if (inl != -1
23815 && !get_AT (var_die, DW_AT_inline)
23816 && !specialization_p)
23817 add_AT_unsigned (var_die, DW_AT_inline, inl);
23818 }
23819 }
23820
23821 /* Generate a DIE to represent a named constant. */
23822
23823 static void
23824 gen_const_die (tree decl, dw_die_ref context_die)
23825 {
23826 dw_die_ref const_die;
23827 tree type = TREE_TYPE (decl);
23828
23829 const_die = lookup_decl_die (decl);
23830 if (const_die)
23831 return;
23832
23833 const_die = new_die (DW_TAG_constant, context_die, decl);
23834 equate_decl_number_to_die (decl, const_die);
23835 add_name_and_src_coords_attributes (const_die, decl);
23836 add_type_attribute (const_die, type, TYPE_QUAL_CONST, false, context_die);
23837 if (TREE_PUBLIC (decl))
23838 add_AT_flag (const_die, DW_AT_external, 1);
23839 if (DECL_ARTIFICIAL (decl))
23840 add_AT_flag (const_die, DW_AT_artificial, 1);
23841 tree_add_const_value_attribute_for_decl (const_die, decl);
23842 }
23843
23844 /* Generate a DIE to represent a label identifier. */
23845
23846 static void
23847 gen_label_die (tree decl, dw_die_ref context_die)
23848 {
23849 tree origin = decl_ultimate_origin (decl);
23850 dw_die_ref lbl_die = lookup_decl_die (decl);
23851 rtx insn;
23852 char label[MAX_ARTIFICIAL_LABEL_BYTES];
23853
23854 if (!lbl_die)
23855 {
23856 lbl_die = new_die (DW_TAG_label, context_die, decl);
23857 equate_decl_number_to_die (decl, lbl_die);
23858
23859 if (origin != NULL)
23860 add_abstract_origin_attribute (lbl_die, origin);
23861 else
23862 add_name_and_src_coords_attributes (lbl_die, decl);
23863 }
23864
23865 if (DECL_ABSTRACT_P (decl))
23866 equate_decl_number_to_die (decl, lbl_die);
23867 else if (! early_dwarf)
23868 {
23869 insn = DECL_RTL_IF_SET (decl);
23870
23871 /* Deleted labels are programmer specified labels which have been
23872 eliminated because of various optimizations. We still emit them
23873 here so that it is possible to put breakpoints on them. */
23874 if (insn
23875 && (LABEL_P (insn)
23876 || ((NOTE_P (insn)
23877 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL))))
23878 {
23879 /* When optimization is enabled (via -O) some parts of the compiler
23880 (e.g. jump.c and cse.c) may try to delete CODE_LABEL insns which
23881 represent source-level labels which were explicitly declared by
23882 the user. This really shouldn't be happening though, so catch
23883 it if it ever does happen. */
23884 gcc_assert (!as_a<rtx_insn *> (insn)->deleted ());
23885
23886 ASM_GENERATE_INTERNAL_LABEL (label, "L", CODE_LABEL_NUMBER (insn));
23887 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23888 }
23889 else if (insn
23890 && NOTE_P (insn)
23891 && NOTE_KIND (insn) == NOTE_INSN_DELETED_DEBUG_LABEL
23892 && CODE_LABEL_NUMBER (insn) != -1)
23893 {
23894 ASM_GENERATE_INTERNAL_LABEL (label, "LDL", CODE_LABEL_NUMBER (insn));
23895 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23896 }
23897 }
23898 }
23899
23900 /* A helper function for gen_inlined_subroutine_die. Add source coordinate
23901 attributes to the DIE for a block STMT, to describe where the inlined
23902 function was called from. This is similar to add_src_coords_attributes. */
23903
23904 static inline void
23905 add_call_src_coords_attributes (tree stmt, dw_die_ref die)
23906 {
23907 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (stmt));
23908
23909 if (dwarf_version >= 3 || !dwarf_strict)
23910 {
23911 add_AT_file (die, DW_AT_call_file, lookup_filename (s.file));
23912 add_AT_unsigned (die, DW_AT_call_line, s.line);
23913 if (debug_column_info && s.column)
23914 add_AT_unsigned (die, DW_AT_call_column, s.column);
23915 }
23916 }
23917
23918
23919 /* A helper function for gen_lexical_block_die and gen_inlined_subroutine_die.
23920 Add low_pc and high_pc attributes to the DIE for a block STMT. */
23921
23922 static inline void
23923 add_high_low_attributes (tree stmt, dw_die_ref die)
23924 {
23925 char label[MAX_ARTIFICIAL_LABEL_BYTES];
23926
23927 if (inline_entry_data **iedp
23928 = !inline_entry_data_table ? NULL
23929 : inline_entry_data_table->find_slot_with_hash (stmt,
23930 htab_hash_pointer (stmt),
23931 NO_INSERT))
23932 {
23933 inline_entry_data *ied = *iedp;
23934 gcc_assert (MAY_HAVE_DEBUG_MARKER_INSNS);
23935 gcc_assert (debug_inline_points);
23936 gcc_assert (inlined_function_outer_scope_p (stmt));
23937
23938 ASM_GENERATE_INTERNAL_LABEL (label, ied->label_pfx, ied->label_num);
23939 add_AT_lbl_id (die, DW_AT_entry_pc, label);
23940
23941 if (debug_variable_location_views && !ZERO_VIEW_P (ied->view)
23942 && !dwarf_strict)
23943 {
23944 if (!output_asm_line_debug_info ())
23945 add_AT_unsigned (die, DW_AT_GNU_entry_view, ied->view);
23946 else
23947 {
23948 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", ied->view);
23949 /* FIXME: this will resolve to a small number. Could we
23950 possibly emit smaller data? Ideally we'd emit a
23951 uleb128, but that would make the size of DIEs
23952 impossible for the compiler to compute, since it's
23953 the assembler that computes the value of the view
23954 label in this case. Ideally, we'd have a single form
23955 encompassing both the address and the view, and
23956 indirecting them through a table might make things
23957 easier, but even that would be more wasteful,
23958 space-wise, than what we have now. */
23959 add_AT_symview (die, DW_AT_GNU_entry_view, label);
23960 }
23961 }
23962
23963 inline_entry_data_table->clear_slot (iedp);
23964 }
23965
23966 if (BLOCK_FRAGMENT_CHAIN (stmt)
23967 && (dwarf_version >= 3 || !dwarf_strict))
23968 {
23969 tree chain, superblock = NULL_TREE;
23970 dw_die_ref pdie;
23971 dw_attr_node *attr = NULL;
23972
23973 if (!debug_inline_points && inlined_function_outer_scope_p (stmt))
23974 {
23975 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
23976 BLOCK_NUMBER (stmt));
23977 add_AT_lbl_id (die, DW_AT_entry_pc, label);
23978 }
23979
23980 /* Optimize duplicate .debug_ranges lists or even tails of
23981 lists. If this BLOCK has same ranges as its supercontext,
23982 lookup DW_AT_ranges attribute in the supercontext (and
23983 recursively so), verify that the ranges_table contains the
23984 right values and use it instead of adding a new .debug_range. */
23985 for (chain = stmt, pdie = die;
23986 BLOCK_SAME_RANGE (chain);
23987 chain = BLOCK_SUPERCONTEXT (chain))
23988 {
23989 dw_attr_node *new_attr;
23990
23991 pdie = pdie->die_parent;
23992 if (pdie == NULL)
23993 break;
23994 if (BLOCK_SUPERCONTEXT (chain) == NULL_TREE)
23995 break;
23996 new_attr = get_AT (pdie, DW_AT_ranges);
23997 if (new_attr == NULL
23998 || new_attr->dw_attr_val.val_class != dw_val_class_range_list)
23999 break;
24000 attr = new_attr;
24001 superblock = BLOCK_SUPERCONTEXT (chain);
24002 }
24003 if (attr != NULL
24004 && ((*ranges_table)[attr->dw_attr_val.v.val_offset].num
24005 == BLOCK_NUMBER (superblock))
24006 && BLOCK_FRAGMENT_CHAIN (superblock))
24007 {
24008 unsigned long off = attr->dw_attr_val.v.val_offset;
24009 unsigned long supercnt = 0, thiscnt = 0;
24010 for (chain = BLOCK_FRAGMENT_CHAIN (superblock);
24011 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
24012 {
24013 ++supercnt;
24014 gcc_checking_assert ((*ranges_table)[off + supercnt].num
24015 == BLOCK_NUMBER (chain));
24016 }
24017 gcc_checking_assert ((*ranges_table)[off + supercnt + 1].num == 0);
24018 for (chain = BLOCK_FRAGMENT_CHAIN (stmt);
24019 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
24020 ++thiscnt;
24021 gcc_assert (supercnt >= thiscnt);
24022 add_AT_range_list (die, DW_AT_ranges, off + supercnt - thiscnt,
24023 false);
24024 note_rnglist_head (off + supercnt - thiscnt);
24025 return;
24026 }
24027
24028 unsigned int offset = add_ranges (stmt, true);
24029 add_AT_range_list (die, DW_AT_ranges, offset, false);
24030 note_rnglist_head (offset);
24031
24032 bool prev_in_cold = BLOCK_IN_COLD_SECTION_P (stmt);
24033 chain = BLOCK_FRAGMENT_CHAIN (stmt);
24034 do
24035 {
24036 add_ranges (chain, prev_in_cold != BLOCK_IN_COLD_SECTION_P (chain));
24037 prev_in_cold = BLOCK_IN_COLD_SECTION_P (chain);
24038 chain = BLOCK_FRAGMENT_CHAIN (chain);
24039 }
24040 while (chain);
24041 add_ranges (NULL);
24042 }
24043 else
24044 {
24045 char label_high[MAX_ARTIFICIAL_LABEL_BYTES];
24046 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
24047 BLOCK_NUMBER (stmt));
24048 ASM_GENERATE_INTERNAL_LABEL (label_high, BLOCK_END_LABEL,
24049 BLOCK_NUMBER (stmt));
24050 add_AT_low_high_pc (die, label, label_high, false);
24051 }
24052 }
24053
24054 /* Generate a DIE for a lexical block. */
24055
24056 static void
24057 gen_lexical_block_die (tree stmt, dw_die_ref context_die)
24058 {
24059 dw_die_ref old_die = BLOCK_DIE (stmt);
24060 dw_die_ref stmt_die = NULL;
24061 if (!old_die)
24062 {
24063 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24064 BLOCK_DIE (stmt) = stmt_die;
24065 }
24066
24067 if (BLOCK_ABSTRACT (stmt))
24068 {
24069 if (old_die)
24070 {
24071 /* This must have been generated early and it won't even
24072 need location information since it's a DW_AT_inline
24073 function. */
24074 if (flag_checking)
24075 for (dw_die_ref c = context_die; c; c = c->die_parent)
24076 if (c->die_tag == DW_TAG_inlined_subroutine
24077 || c->die_tag == DW_TAG_subprogram)
24078 {
24079 gcc_assert (get_AT (c, DW_AT_inline));
24080 break;
24081 }
24082 return;
24083 }
24084 }
24085 else if (BLOCK_ABSTRACT_ORIGIN (stmt))
24086 {
24087 /* If this is an inlined instance, create a new lexical die for
24088 anything below to attach DW_AT_abstract_origin to. */
24089 if (old_die)
24090 {
24091 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24092 BLOCK_DIE (stmt) = stmt_die;
24093 old_die = NULL;
24094 }
24095
24096 tree origin = block_ultimate_origin (stmt);
24097 if (origin != NULL_TREE && origin != stmt)
24098 add_abstract_origin_attribute (stmt_die, origin);
24099 }
24100
24101 if (old_die)
24102 stmt_die = old_die;
24103
24104 /* A non abstract block whose blocks have already been reordered
24105 should have the instruction range for this block. If so, set the
24106 high/low attributes. */
24107 if (!early_dwarf && !BLOCK_ABSTRACT (stmt) && TREE_ASM_WRITTEN (stmt))
24108 {
24109 gcc_assert (stmt_die);
24110 add_high_low_attributes (stmt, stmt_die);
24111 }
24112
24113 decls_for_scope (stmt, stmt_die);
24114 }
24115
24116 /* Generate a DIE for an inlined subprogram. */
24117
24118 static void
24119 gen_inlined_subroutine_die (tree stmt, dw_die_ref context_die)
24120 {
24121 tree decl;
24122
24123 /* The instance of function that is effectively being inlined shall not
24124 be abstract. */
24125 gcc_assert (! BLOCK_ABSTRACT (stmt));
24126
24127 decl = block_ultimate_origin (stmt);
24128
24129 /* Make sure any inlined functions are known to be inlineable. */
24130 gcc_checking_assert (DECL_ABSTRACT_P (decl)
24131 || cgraph_function_possibly_inlined_p (decl));
24132
24133 if (! BLOCK_ABSTRACT (stmt))
24134 {
24135 dw_die_ref subr_die
24136 = new_die (DW_TAG_inlined_subroutine, context_die, stmt);
24137
24138 if (call_arg_locations || debug_inline_points)
24139 BLOCK_DIE (stmt) = subr_die;
24140 add_abstract_origin_attribute (subr_die, decl);
24141 if (TREE_ASM_WRITTEN (stmt))
24142 add_high_low_attributes (stmt, subr_die);
24143 add_call_src_coords_attributes (stmt, subr_die);
24144
24145 decls_for_scope (stmt, subr_die);
24146 }
24147 }
24148
24149 /* Generate a DIE for a field in a record, or structure. CTX is required: see
24150 the comment for VLR_CONTEXT. */
24151
24152 static void
24153 gen_field_die (tree decl, struct vlr_context *ctx, dw_die_ref context_die)
24154 {
24155 dw_die_ref decl_die;
24156
24157 if (TREE_TYPE (decl) == error_mark_node)
24158 return;
24159
24160 decl_die = new_die (DW_TAG_member, context_die, decl);
24161 add_name_and_src_coords_attributes (decl_die, decl);
24162 add_type_attribute (decl_die, member_declared_type (decl), decl_quals (decl),
24163 TYPE_REVERSE_STORAGE_ORDER (DECL_FIELD_CONTEXT (decl)),
24164 context_die);
24165
24166 if (DECL_BIT_FIELD_TYPE (decl))
24167 {
24168 add_byte_size_attribute (decl_die, decl);
24169 add_bit_size_attribute (decl_die, decl);
24170 add_bit_offset_attribute (decl_die, decl, ctx);
24171 }
24172
24173 add_alignment_attribute (decl_die, decl);
24174
24175 /* If we have a variant part offset, then we are supposed to process a member
24176 of a QUAL_UNION_TYPE, which is how we represent variant parts in
24177 trees. */
24178 gcc_assert (ctx->variant_part_offset == NULL_TREE
24179 || TREE_CODE (DECL_FIELD_CONTEXT (decl)) != QUAL_UNION_TYPE);
24180 if (TREE_CODE (DECL_FIELD_CONTEXT (decl)) != UNION_TYPE)
24181 add_data_member_location_attribute (decl_die, decl, ctx);
24182
24183 if (DECL_ARTIFICIAL (decl))
24184 add_AT_flag (decl_die, DW_AT_artificial, 1);
24185
24186 add_accessibility_attribute (decl_die, decl);
24187
24188 /* Equate decl number to die, so that we can look up this decl later on. */
24189 equate_decl_number_to_die (decl, decl_die);
24190 }
24191
24192 /* Generate a DIE for a pointer to a member type. TYPE can be an
24193 OFFSET_TYPE, for a pointer to data member, or a RECORD_TYPE, for a
24194 pointer to member function. */
24195
24196 static void
24197 gen_ptr_to_mbr_type_die (tree type, dw_die_ref context_die)
24198 {
24199 if (lookup_type_die (type))
24200 return;
24201
24202 dw_die_ref ptr_die = new_die (DW_TAG_ptr_to_member_type,
24203 scope_die_for (type, context_die), type);
24204
24205 equate_type_number_to_die (type, ptr_die);
24206 add_AT_die_ref (ptr_die, DW_AT_containing_type,
24207 lookup_type_die (TYPE_OFFSET_BASETYPE (type)));
24208 add_type_attribute (ptr_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
24209 context_die);
24210 add_alignment_attribute (ptr_die, type);
24211
24212 if (TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE
24213 && TREE_CODE (TREE_TYPE (type)) != METHOD_TYPE)
24214 {
24215 dw_loc_descr_ref op = new_loc_descr (DW_OP_plus, 0, 0);
24216 add_AT_loc (ptr_die, DW_AT_use_location, op);
24217 }
24218 }
24219
24220 static char *producer_string;
24221
24222 /* Return a heap allocated producer string including command line options
24223 if -grecord-gcc-switches. */
24224
24225 static char *
24226 gen_producer_string (void)
24227 {
24228 size_t j;
24229 auto_vec<const char *> switches;
24230 const char *language_string = lang_hooks.name;
24231 char *producer, *tail;
24232 const char *p;
24233 size_t len = dwarf_record_gcc_switches ? 0 : 3;
24234 size_t plen = strlen (language_string) + 1 + strlen (version_string);
24235
24236 for (j = 1; dwarf_record_gcc_switches && j < save_decoded_options_count; j++)
24237 switch (save_decoded_options[j].opt_index)
24238 {
24239 case OPT_o:
24240 case OPT_d:
24241 case OPT_dumpbase:
24242 case OPT_dumpdir:
24243 case OPT_auxbase:
24244 case OPT_auxbase_strip:
24245 case OPT_quiet:
24246 case OPT_version:
24247 case OPT_v:
24248 case OPT_w:
24249 case OPT_L:
24250 case OPT_D:
24251 case OPT_I:
24252 case OPT_U:
24253 case OPT_SPECIAL_unknown:
24254 case OPT_SPECIAL_ignore:
24255 case OPT_SPECIAL_program_name:
24256 case OPT_SPECIAL_input_file:
24257 case OPT_grecord_gcc_switches:
24258 case OPT__output_pch_:
24259 case OPT_fdiagnostics_show_location_:
24260 case OPT_fdiagnostics_show_option:
24261 case OPT_fdiagnostics_show_caret:
24262 case OPT_fdiagnostics_color_:
24263 case OPT_fverbose_asm:
24264 case OPT____:
24265 case OPT__sysroot_:
24266 case OPT_nostdinc:
24267 case OPT_nostdinc__:
24268 case OPT_fpreprocessed:
24269 case OPT_fltrans_output_list_:
24270 case OPT_fresolution_:
24271 case OPT_fdebug_prefix_map_:
24272 case OPT_fmacro_prefix_map_:
24273 case OPT_ffile_prefix_map_:
24274 case OPT_fcompare_debug:
24275 case OPT_fchecking:
24276 case OPT_fchecking_:
24277 /* Ignore these. */
24278 continue;
24279 default:
24280 if (cl_options[save_decoded_options[j].opt_index].flags
24281 & CL_NO_DWARF_RECORD)
24282 continue;
24283 gcc_checking_assert (save_decoded_options[j].canonical_option[0][0]
24284 == '-');
24285 switch (save_decoded_options[j].canonical_option[0][1])
24286 {
24287 case 'M':
24288 case 'i':
24289 case 'W':
24290 continue;
24291 case 'f':
24292 if (strncmp (save_decoded_options[j].canonical_option[0] + 2,
24293 "dump", 4) == 0)
24294 continue;
24295 break;
24296 default:
24297 break;
24298 }
24299 switches.safe_push (save_decoded_options[j].orig_option_with_args_text);
24300 len += strlen (save_decoded_options[j].orig_option_with_args_text) + 1;
24301 break;
24302 }
24303
24304 producer = XNEWVEC (char, plen + 1 + len + 1);
24305 tail = producer;
24306 sprintf (tail, "%s %s", language_string, version_string);
24307 tail += plen;
24308
24309 FOR_EACH_VEC_ELT (switches, j, p)
24310 {
24311 len = strlen (p);
24312 *tail = ' ';
24313 memcpy (tail + 1, p, len);
24314 tail += len + 1;
24315 }
24316
24317 *tail = '\0';
24318 return producer;
24319 }
24320
24321 /* Given a C and/or C++ language/version string return the "highest".
24322 C++ is assumed to be "higher" than C in this case. Used for merging
24323 LTO translation unit languages. */
24324 static const char *
24325 highest_c_language (const char *lang1, const char *lang2)
24326 {
24327 if (strcmp ("GNU C++17", lang1) == 0 || strcmp ("GNU C++17", lang2) == 0)
24328 return "GNU C++17";
24329 if (strcmp ("GNU C++14", lang1) == 0 || strcmp ("GNU C++14", lang2) == 0)
24330 return "GNU C++14";
24331 if (strcmp ("GNU C++11", lang1) == 0 || strcmp ("GNU C++11", lang2) == 0)
24332 return "GNU C++11";
24333 if (strcmp ("GNU C++98", lang1) == 0 || strcmp ("GNU C++98", lang2) == 0)
24334 return "GNU C++98";
24335
24336 if (strcmp ("GNU C17", lang1) == 0 || strcmp ("GNU C17", lang2) == 0)
24337 return "GNU C17";
24338 if (strcmp ("GNU C11", lang1) == 0 || strcmp ("GNU C11", lang2) == 0)
24339 return "GNU C11";
24340 if (strcmp ("GNU C99", lang1) == 0 || strcmp ("GNU C99", lang2) == 0)
24341 return "GNU C99";
24342 if (strcmp ("GNU C89", lang1) == 0 || strcmp ("GNU C89", lang2) == 0)
24343 return "GNU C89";
24344
24345 gcc_unreachable ();
24346 }
24347
24348
24349 /* Generate the DIE for the compilation unit. */
24350
24351 static dw_die_ref
24352 gen_compile_unit_die (const char *filename)
24353 {
24354 dw_die_ref die;
24355 const char *language_string = lang_hooks.name;
24356 int language;
24357
24358 die = new_die (DW_TAG_compile_unit, NULL, NULL);
24359
24360 if (filename)
24361 {
24362 add_name_attribute (die, filename);
24363 /* Don't add cwd for <built-in>. */
24364 if (filename[0] != '<')
24365 add_comp_dir_attribute (die);
24366 }
24367
24368 add_AT_string (die, DW_AT_producer, producer_string ? producer_string : "");
24369
24370 /* If our producer is LTO try to figure out a common language to use
24371 from the global list of translation units. */
24372 if (strcmp (language_string, "GNU GIMPLE") == 0)
24373 {
24374 unsigned i;
24375 tree t;
24376 const char *common_lang = NULL;
24377
24378 FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, t)
24379 {
24380 if (!TRANSLATION_UNIT_LANGUAGE (t))
24381 continue;
24382 if (!common_lang)
24383 common_lang = TRANSLATION_UNIT_LANGUAGE (t);
24384 else if (strcmp (common_lang, TRANSLATION_UNIT_LANGUAGE (t)) == 0)
24385 ;
24386 else if (strncmp (common_lang, "GNU C", 5) == 0
24387 && strncmp (TRANSLATION_UNIT_LANGUAGE (t), "GNU C", 5) == 0)
24388 /* Mixing C and C++ is ok, use C++ in that case. */
24389 common_lang = highest_c_language (common_lang,
24390 TRANSLATION_UNIT_LANGUAGE (t));
24391 else
24392 {
24393 /* Fall back to C. */
24394 common_lang = NULL;
24395 break;
24396 }
24397 }
24398
24399 if (common_lang)
24400 language_string = common_lang;
24401 }
24402
24403 language = DW_LANG_C;
24404 if (strncmp (language_string, "GNU C", 5) == 0
24405 && ISDIGIT (language_string[5]))
24406 {
24407 language = DW_LANG_C89;
24408 if (dwarf_version >= 3 || !dwarf_strict)
24409 {
24410 if (strcmp (language_string, "GNU C89") != 0)
24411 language = DW_LANG_C99;
24412
24413 if (dwarf_version >= 5 /* || !dwarf_strict */)
24414 if (strcmp (language_string, "GNU C11") == 0
24415 || strcmp (language_string, "GNU C17") == 0)
24416 language = DW_LANG_C11;
24417 }
24418 }
24419 else if (strncmp (language_string, "GNU C++", 7) == 0)
24420 {
24421 language = DW_LANG_C_plus_plus;
24422 if (dwarf_version >= 5 /* || !dwarf_strict */)
24423 {
24424 if (strcmp (language_string, "GNU C++11") == 0)
24425 language = DW_LANG_C_plus_plus_11;
24426 else if (strcmp (language_string, "GNU C++14") == 0)
24427 language = DW_LANG_C_plus_plus_14;
24428 else if (strcmp (language_string, "GNU C++17") == 0)
24429 /* For now. */
24430 language = DW_LANG_C_plus_plus_14;
24431 }
24432 }
24433 else if (strcmp (language_string, "GNU F77") == 0)
24434 language = DW_LANG_Fortran77;
24435 else if (dwarf_version >= 3 || !dwarf_strict)
24436 {
24437 if (strcmp (language_string, "GNU Ada") == 0)
24438 language = DW_LANG_Ada95;
24439 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24440 {
24441 language = DW_LANG_Fortran95;
24442 if (dwarf_version >= 5 /* || !dwarf_strict */)
24443 {
24444 if (strcmp (language_string, "GNU Fortran2003") == 0)
24445 language = DW_LANG_Fortran03;
24446 else if (strcmp (language_string, "GNU Fortran2008") == 0)
24447 language = DW_LANG_Fortran08;
24448 }
24449 }
24450 else if (strcmp (language_string, "GNU Objective-C") == 0)
24451 language = DW_LANG_ObjC;
24452 else if (strcmp (language_string, "GNU Objective-C++") == 0)
24453 language = DW_LANG_ObjC_plus_plus;
24454 else if (dwarf_version >= 5 || !dwarf_strict)
24455 {
24456 if (strcmp (language_string, "GNU Go") == 0)
24457 language = DW_LANG_Go;
24458 }
24459 }
24460 /* Use a degraded Fortran setting in strict DWARF2 so is_fortran works. */
24461 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24462 language = DW_LANG_Fortran90;
24463
24464 add_AT_unsigned (die, DW_AT_language, language);
24465
24466 switch (language)
24467 {
24468 case DW_LANG_Fortran77:
24469 case DW_LANG_Fortran90:
24470 case DW_LANG_Fortran95:
24471 case DW_LANG_Fortran03:
24472 case DW_LANG_Fortran08:
24473 /* Fortran has case insensitive identifiers and the front-end
24474 lowercases everything. */
24475 add_AT_unsigned (die, DW_AT_identifier_case, DW_ID_down_case);
24476 break;
24477 default:
24478 /* The default DW_ID_case_sensitive doesn't need to be specified. */
24479 break;
24480 }
24481 return die;
24482 }
24483
24484 /* Generate the DIE for a base class. */
24485
24486 static void
24487 gen_inheritance_die (tree binfo, tree access, tree type,
24488 dw_die_ref context_die)
24489 {
24490 dw_die_ref die = new_die (DW_TAG_inheritance, context_die, binfo);
24491 struct vlr_context ctx = { type, NULL };
24492
24493 add_type_attribute (die, BINFO_TYPE (binfo), TYPE_UNQUALIFIED, false,
24494 context_die);
24495 add_data_member_location_attribute (die, binfo, &ctx);
24496
24497 if (BINFO_VIRTUAL_P (binfo))
24498 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
24499
24500 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
24501 children, otherwise the default is DW_ACCESS_public. In DWARF2
24502 the default has always been DW_ACCESS_private. */
24503 if (access == access_public_node)
24504 {
24505 if (dwarf_version == 2
24506 || context_die->die_tag == DW_TAG_class_type)
24507 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
24508 }
24509 else if (access == access_protected_node)
24510 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
24511 else if (dwarf_version > 2
24512 && context_die->die_tag != DW_TAG_class_type)
24513 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
24514 }
24515
24516 /* Return whether DECL is a FIELD_DECL that represents the variant part of a
24517 structure. */
24518 static bool
24519 is_variant_part (tree decl)
24520 {
24521 return (TREE_CODE (decl) == FIELD_DECL
24522 && TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE);
24523 }
24524
24525 /* Check that OPERAND is a reference to a field in STRUCT_TYPE. If it is,
24526 return the FIELD_DECL. Return NULL_TREE otherwise. */
24527
24528 static tree
24529 analyze_discr_in_predicate (tree operand, tree struct_type)
24530 {
24531 bool continue_stripping = true;
24532 while (continue_stripping)
24533 switch (TREE_CODE (operand))
24534 {
24535 CASE_CONVERT:
24536 operand = TREE_OPERAND (operand, 0);
24537 break;
24538 default:
24539 continue_stripping = false;
24540 break;
24541 }
24542
24543 /* Match field access to members of struct_type only. */
24544 if (TREE_CODE (operand) == COMPONENT_REF
24545 && TREE_CODE (TREE_OPERAND (operand, 0)) == PLACEHOLDER_EXPR
24546 && TREE_TYPE (TREE_OPERAND (operand, 0)) == struct_type
24547 && TREE_CODE (TREE_OPERAND (operand, 1)) == FIELD_DECL)
24548 return TREE_OPERAND (operand, 1);
24549 else
24550 return NULL_TREE;
24551 }
24552
24553 /* Check that SRC is a constant integer that can be represented as a native
24554 integer constant (either signed or unsigned). If so, store it into DEST and
24555 return true. Return false otherwise. */
24556
24557 static bool
24558 get_discr_value (tree src, dw_discr_value *dest)
24559 {
24560 tree discr_type = TREE_TYPE (src);
24561
24562 if (lang_hooks.types.get_debug_type)
24563 {
24564 tree debug_type = lang_hooks.types.get_debug_type (discr_type);
24565 if (debug_type != NULL)
24566 discr_type = debug_type;
24567 }
24568
24569 if (TREE_CODE (src) != INTEGER_CST || !INTEGRAL_TYPE_P (discr_type))
24570 return false;
24571
24572 /* Signedness can vary between the original type and the debug type. This
24573 can happen for character types in Ada for instance: the character type
24574 used for code generation can be signed, to be compatible with the C one,
24575 but from a debugger point of view, it must be unsigned. */
24576 bool is_orig_unsigned = TYPE_UNSIGNED (TREE_TYPE (src));
24577 bool is_debug_unsigned = TYPE_UNSIGNED (discr_type);
24578
24579 if (is_orig_unsigned != is_debug_unsigned)
24580 src = fold_convert (discr_type, src);
24581
24582 if (!(is_debug_unsigned ? tree_fits_uhwi_p (src) : tree_fits_shwi_p (src)))
24583 return false;
24584
24585 dest->pos = is_debug_unsigned;
24586 if (is_debug_unsigned)
24587 dest->v.uval = tree_to_uhwi (src);
24588 else
24589 dest->v.sval = tree_to_shwi (src);
24590
24591 return true;
24592 }
24593
24594 /* Try to extract synthetic properties out of VARIANT_PART_DECL, which is a
24595 FIELD_DECL in STRUCT_TYPE that represents a variant part. If unsuccessful,
24596 store NULL_TREE in DISCR_DECL. Otherwise:
24597
24598 - store the discriminant field in STRUCT_TYPE that controls the variant
24599 part to *DISCR_DECL
24600
24601 - put in *DISCR_LISTS_P an array where for each variant, the item
24602 represents the corresponding matching list of discriminant values.
24603
24604 - put in *DISCR_LISTS_LENGTH the number of variants, which is the size of
24605 the above array.
24606
24607 Note that when the array is allocated (i.e. when the analysis is
24608 successful), it is up to the caller to free the array. */
24609
24610 static void
24611 analyze_variants_discr (tree variant_part_decl,
24612 tree struct_type,
24613 tree *discr_decl,
24614 dw_discr_list_ref **discr_lists_p,
24615 unsigned *discr_lists_length)
24616 {
24617 tree variant_part_type = TREE_TYPE (variant_part_decl);
24618 tree variant;
24619 dw_discr_list_ref *discr_lists;
24620 unsigned i;
24621
24622 /* Compute how many variants there are in this variant part. */
24623 *discr_lists_length = 0;
24624 for (variant = TYPE_FIELDS (variant_part_type);
24625 variant != NULL_TREE;
24626 variant = DECL_CHAIN (variant))
24627 ++*discr_lists_length;
24628
24629 *discr_decl = NULL_TREE;
24630 *discr_lists_p
24631 = (dw_discr_list_ref *) xcalloc (*discr_lists_length,
24632 sizeof (**discr_lists_p));
24633 discr_lists = *discr_lists_p;
24634
24635 /* And then analyze all variants to extract discriminant information for all
24636 of them. This analysis is conservative: as soon as we detect something we
24637 do not support, abort everything and pretend we found nothing. */
24638 for (variant = TYPE_FIELDS (variant_part_type), i = 0;
24639 variant != NULL_TREE;
24640 variant = DECL_CHAIN (variant), ++i)
24641 {
24642 tree match_expr = DECL_QUALIFIER (variant);
24643
24644 /* Now, try to analyze the predicate and deduce a discriminant for
24645 it. */
24646 if (match_expr == boolean_true_node)
24647 /* Typically happens for the default variant: it matches all cases that
24648 previous variants rejected. Don't output any matching value for
24649 this one. */
24650 continue;
24651
24652 /* The following loop tries to iterate over each discriminant
24653 possibility: single values or ranges. */
24654 while (match_expr != NULL_TREE)
24655 {
24656 tree next_round_match_expr;
24657 tree candidate_discr = NULL_TREE;
24658 dw_discr_list_ref new_node = NULL;
24659
24660 /* Possibilities are matched one after the other by nested
24661 TRUTH_ORIF_EXPR expressions. Process the current possibility and
24662 continue with the rest at next iteration. */
24663 if (TREE_CODE (match_expr) == TRUTH_ORIF_EXPR)
24664 {
24665 next_round_match_expr = TREE_OPERAND (match_expr, 0);
24666 match_expr = TREE_OPERAND (match_expr, 1);
24667 }
24668 else
24669 next_round_match_expr = NULL_TREE;
24670
24671 if (match_expr == boolean_false_node)
24672 /* This sub-expression matches nothing: just wait for the next
24673 one. */
24674 ;
24675
24676 else if (TREE_CODE (match_expr) == EQ_EXPR)
24677 {
24678 /* We are matching: <discr_field> == <integer_cst>
24679 This sub-expression matches a single value. */
24680 tree integer_cst = TREE_OPERAND (match_expr, 1);
24681
24682 candidate_discr
24683 = analyze_discr_in_predicate (TREE_OPERAND (match_expr, 0),
24684 struct_type);
24685
24686 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24687 if (!get_discr_value (integer_cst,
24688 &new_node->dw_discr_lower_bound))
24689 goto abort;
24690 new_node->dw_discr_range = false;
24691 }
24692
24693 else if (TREE_CODE (match_expr) == TRUTH_ANDIF_EXPR)
24694 {
24695 /* We are matching:
24696 <discr_field> > <integer_cst>
24697 && <discr_field> < <integer_cst>.
24698 This sub-expression matches the range of values between the
24699 two matched integer constants. Note that comparisons can be
24700 inclusive or exclusive. */
24701 tree candidate_discr_1, candidate_discr_2;
24702 tree lower_cst, upper_cst;
24703 bool lower_cst_included, upper_cst_included;
24704 tree lower_op = TREE_OPERAND (match_expr, 0);
24705 tree upper_op = TREE_OPERAND (match_expr, 1);
24706
24707 /* When the comparison is exclusive, the integer constant is not
24708 the discriminant range bound we are looking for: we will have
24709 to increment or decrement it. */
24710 if (TREE_CODE (lower_op) == GE_EXPR)
24711 lower_cst_included = true;
24712 else if (TREE_CODE (lower_op) == GT_EXPR)
24713 lower_cst_included = false;
24714 else
24715 goto abort;
24716
24717 if (TREE_CODE (upper_op) == LE_EXPR)
24718 upper_cst_included = true;
24719 else if (TREE_CODE (upper_op) == LT_EXPR)
24720 upper_cst_included = false;
24721 else
24722 goto abort;
24723
24724 /* Extract the discriminant from the first operand and check it
24725 is consistant with the same analysis in the second
24726 operand. */
24727 candidate_discr_1
24728 = analyze_discr_in_predicate (TREE_OPERAND (lower_op, 0),
24729 struct_type);
24730 candidate_discr_2
24731 = analyze_discr_in_predicate (TREE_OPERAND (upper_op, 0),
24732 struct_type);
24733 if (candidate_discr_1 == candidate_discr_2)
24734 candidate_discr = candidate_discr_1;
24735 else
24736 goto abort;
24737
24738 /* Extract bounds from both. */
24739 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24740 lower_cst = TREE_OPERAND (lower_op, 1);
24741 upper_cst = TREE_OPERAND (upper_op, 1);
24742
24743 if (!lower_cst_included)
24744 lower_cst
24745 = fold_build2 (PLUS_EXPR, TREE_TYPE (lower_cst), lower_cst,
24746 build_int_cst (TREE_TYPE (lower_cst), 1));
24747 if (!upper_cst_included)
24748 upper_cst
24749 = fold_build2 (MINUS_EXPR, TREE_TYPE (upper_cst), upper_cst,
24750 build_int_cst (TREE_TYPE (upper_cst), 1));
24751
24752 if (!get_discr_value (lower_cst,
24753 &new_node->dw_discr_lower_bound)
24754 || !get_discr_value (upper_cst,
24755 &new_node->dw_discr_upper_bound))
24756 goto abort;
24757
24758 new_node->dw_discr_range = true;
24759 }
24760
24761 else
24762 /* Unsupported sub-expression: we cannot determine the set of
24763 matching discriminant values. Abort everything. */
24764 goto abort;
24765
24766 /* If the discriminant info is not consistant with what we saw so
24767 far, consider the analysis failed and abort everything. */
24768 if (candidate_discr == NULL_TREE
24769 || (*discr_decl != NULL_TREE && candidate_discr != *discr_decl))
24770 goto abort;
24771 else
24772 *discr_decl = candidate_discr;
24773
24774 if (new_node != NULL)
24775 {
24776 new_node->dw_discr_next = discr_lists[i];
24777 discr_lists[i] = new_node;
24778 }
24779 match_expr = next_round_match_expr;
24780 }
24781 }
24782
24783 /* If we reach this point, we could match everything we were interested
24784 in. */
24785 return;
24786
24787 abort:
24788 /* Clean all data structure and return no result. */
24789 free (*discr_lists_p);
24790 *discr_lists_p = NULL;
24791 *discr_decl = NULL_TREE;
24792 }
24793
24794 /* Generate a DIE to represent VARIANT_PART_DECL, a variant part that is part
24795 of STRUCT_TYPE, a record type. This new DIE is emitted as the next child
24796 under CONTEXT_DIE.
24797
24798 Variant parts are supposed to be implemented as a FIELD_DECL whose type is a
24799 QUAL_UNION_TYPE: this is the VARIANT_PART_DECL parameter. The members for
24800 this type, which are record types, represent the available variants and each
24801 has a DECL_QUALIFIER attribute. The discriminant and the discriminant
24802 values are inferred from these attributes.
24803
24804 In trees, the offsets for the fields inside these sub-records are relative
24805 to the variant part itself, whereas the corresponding DIEs should have
24806 offset attributes that are relative to the embedding record base address.
24807 This is why the caller must provide a VARIANT_PART_OFFSET expression: it
24808 must be an expression that computes the offset of the variant part to
24809 describe in DWARF. */
24810
24811 static void
24812 gen_variant_part (tree variant_part_decl, struct vlr_context *vlr_ctx,
24813 dw_die_ref context_die)
24814 {
24815 const tree variant_part_type = TREE_TYPE (variant_part_decl);
24816 tree variant_part_offset = vlr_ctx->variant_part_offset;
24817 struct loc_descr_context ctx = {
24818 vlr_ctx->struct_type, /* context_type */
24819 NULL_TREE, /* base_decl */
24820 NULL, /* dpi */
24821 false, /* placeholder_arg */
24822 false /* placeholder_seen */
24823 };
24824
24825 /* The FIELD_DECL node in STRUCT_TYPE that acts as the discriminant, or
24826 NULL_TREE if there is no such field. */
24827 tree discr_decl = NULL_TREE;
24828 dw_discr_list_ref *discr_lists;
24829 unsigned discr_lists_length = 0;
24830 unsigned i;
24831
24832 dw_die_ref dwarf_proc_die = NULL;
24833 dw_die_ref variant_part_die
24834 = new_die (DW_TAG_variant_part, context_die, variant_part_type);
24835
24836 equate_decl_number_to_die (variant_part_decl, variant_part_die);
24837
24838 analyze_variants_discr (variant_part_decl, vlr_ctx->struct_type,
24839 &discr_decl, &discr_lists, &discr_lists_length);
24840
24841 if (discr_decl != NULL_TREE)
24842 {
24843 dw_die_ref discr_die = lookup_decl_die (discr_decl);
24844
24845 if (discr_die)
24846 add_AT_die_ref (variant_part_die, DW_AT_discr, discr_die);
24847 else
24848 /* We have no DIE for the discriminant, so just discard all
24849 discrimimant information in the output. */
24850 discr_decl = NULL_TREE;
24851 }
24852
24853 /* If the offset for this variant part is more complex than a constant,
24854 create a DWARF procedure for it so that we will not have to generate DWARF
24855 expressions for it for each member. */
24856 if (TREE_CODE (variant_part_offset) != INTEGER_CST
24857 && (dwarf_version >= 3 || !dwarf_strict))
24858 {
24859 const tree dwarf_proc_fndecl
24860 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
24861 build_function_type (TREE_TYPE (variant_part_offset),
24862 NULL_TREE));
24863 const tree dwarf_proc_call = build_call_expr (dwarf_proc_fndecl, 0);
24864 const dw_loc_descr_ref dwarf_proc_body
24865 = loc_descriptor_from_tree (variant_part_offset, 0, &ctx);
24866
24867 dwarf_proc_die = new_dwarf_proc_die (dwarf_proc_body,
24868 dwarf_proc_fndecl, context_die);
24869 if (dwarf_proc_die != NULL)
24870 variant_part_offset = dwarf_proc_call;
24871 }
24872
24873 /* Output DIEs for all variants. */
24874 i = 0;
24875 for (tree variant = TYPE_FIELDS (variant_part_type);
24876 variant != NULL_TREE;
24877 variant = DECL_CHAIN (variant), ++i)
24878 {
24879 tree variant_type = TREE_TYPE (variant);
24880 dw_die_ref variant_die;
24881
24882 /* All variants (i.e. members of a variant part) are supposed to be
24883 encoded as structures. Sub-variant parts are QUAL_UNION_TYPE fields
24884 under these records. */
24885 gcc_assert (TREE_CODE (variant_type) == RECORD_TYPE);
24886
24887 variant_die = new_die (DW_TAG_variant, variant_part_die, variant_type);
24888 equate_decl_number_to_die (variant, variant_die);
24889
24890 /* Output discriminant values this variant matches, if any. */
24891 if (discr_decl == NULL || discr_lists[i] == NULL)
24892 /* In the case we have discriminant information at all, this is
24893 probably the default variant: as the standard says, don't
24894 output any discriminant value/list attribute. */
24895 ;
24896 else if (discr_lists[i]->dw_discr_next == NULL
24897 && !discr_lists[i]->dw_discr_range)
24898 /* If there is only one accepted value, don't bother outputting a
24899 list. */
24900 add_discr_value (variant_die, &discr_lists[i]->dw_discr_lower_bound);
24901 else
24902 add_discr_list (variant_die, discr_lists[i]);
24903
24904 for (tree member = TYPE_FIELDS (variant_type);
24905 member != NULL_TREE;
24906 member = DECL_CHAIN (member))
24907 {
24908 struct vlr_context vlr_sub_ctx = {
24909 vlr_ctx->struct_type, /* struct_type */
24910 NULL /* variant_part_offset */
24911 };
24912 if (is_variant_part (member))
24913 {
24914 /* All offsets for fields inside variant parts are relative to
24915 the top-level embedding RECORD_TYPE's base address. On the
24916 other hand, offsets in GCC's types are relative to the
24917 nested-most variant part. So we have to sum offsets each time
24918 we recurse. */
24919
24920 vlr_sub_ctx.variant_part_offset
24921 = fold_build2 (PLUS_EXPR, TREE_TYPE (variant_part_offset),
24922 variant_part_offset, byte_position (member));
24923 gen_variant_part (member, &vlr_sub_ctx, variant_die);
24924 }
24925 else
24926 {
24927 vlr_sub_ctx.variant_part_offset = variant_part_offset;
24928 gen_decl_die (member, NULL, &vlr_sub_ctx, variant_die);
24929 }
24930 }
24931 }
24932
24933 free (discr_lists);
24934 }
24935
24936 /* Generate a DIE for a class member. */
24937
24938 static void
24939 gen_member_die (tree type, dw_die_ref context_die)
24940 {
24941 tree member;
24942 tree binfo = TYPE_BINFO (type);
24943
24944 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
24945
24946 /* If this is not an incomplete type, output descriptions of each of its
24947 members. Note that as we output the DIEs necessary to represent the
24948 members of this record or union type, we will also be trying to output
24949 DIEs to represent the *types* of those members. However the `type'
24950 function (above) will specifically avoid generating type DIEs for member
24951 types *within* the list of member DIEs for this (containing) type except
24952 for those types (of members) which are explicitly marked as also being
24953 members of this (containing) type themselves. The g++ front- end can
24954 force any given type to be treated as a member of some other (containing)
24955 type by setting the TYPE_CONTEXT of the given (member) type to point to
24956 the TREE node representing the appropriate (containing) type. */
24957
24958 /* First output info about the base classes. */
24959 if (binfo)
24960 {
24961 vec<tree, va_gc> *accesses = BINFO_BASE_ACCESSES (binfo);
24962 int i;
24963 tree base;
24964
24965 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base); i++)
24966 gen_inheritance_die (base,
24967 (accesses ? (*accesses)[i] : access_public_node),
24968 type,
24969 context_die);
24970 }
24971
24972 /* Now output info about the data members and type members. */
24973 for (member = TYPE_FIELDS (type); member; member = DECL_CHAIN (member))
24974 {
24975 struct vlr_context vlr_ctx = { type, NULL_TREE };
24976 bool static_inline_p
24977 = (TREE_STATIC (member)
24978 && (lang_hooks.decls.decl_dwarf_attribute (member, DW_AT_inline)
24979 != -1));
24980
24981 /* Ignore clones. */
24982 if (DECL_ABSTRACT_ORIGIN (member))
24983 continue;
24984
24985 /* If we thought we were generating minimal debug info for TYPE
24986 and then changed our minds, some of the member declarations
24987 may have already been defined. Don't define them again, but
24988 do put them in the right order. */
24989
24990 if (dw_die_ref child = lookup_decl_die (member))
24991 {
24992 /* Handle inline static data members, which only have in-class
24993 declarations. */
24994 dw_die_ref ref = NULL;
24995 if (child->die_tag == DW_TAG_variable
24996 && child->die_parent == comp_unit_die ())
24997 {
24998 ref = get_AT_ref (child, DW_AT_specification);
24999 /* For C++17 inline static data members followed by redundant
25000 out of class redeclaration, we might get here with
25001 child being the DIE created for the out of class
25002 redeclaration and with its DW_AT_specification being
25003 the DIE created for in-class definition. We want to
25004 reparent the latter, and don't want to create another
25005 DIE with DW_AT_specification in that case, because
25006 we already have one. */
25007 if (ref
25008 && static_inline_p
25009 && ref->die_tag == DW_TAG_variable
25010 && ref->die_parent == comp_unit_die ()
25011 && get_AT (ref, DW_AT_specification) == NULL)
25012 {
25013 child = ref;
25014 ref = NULL;
25015 static_inline_p = false;
25016 }
25017 }
25018
25019 if (child->die_tag == DW_TAG_variable
25020 && child->die_parent == comp_unit_die ()
25021 && ref == NULL)
25022 {
25023 reparent_child (child, context_die);
25024 if (dwarf_version < 5)
25025 child->die_tag = DW_TAG_member;
25026 }
25027 else
25028 splice_child_die (context_die, child);
25029 }
25030
25031 /* Do not generate standard DWARF for variant parts if we are generating
25032 the corresponding GNAT encodings: DIEs generated for both would
25033 conflict in our mappings. */
25034 else if (is_variant_part (member)
25035 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
25036 {
25037 vlr_ctx.variant_part_offset = byte_position (member);
25038 gen_variant_part (member, &vlr_ctx, context_die);
25039 }
25040 else
25041 {
25042 vlr_ctx.variant_part_offset = NULL_TREE;
25043 gen_decl_die (member, NULL, &vlr_ctx, context_die);
25044 }
25045
25046 /* For C++ inline static data members emit immediately a DW_TAG_variable
25047 DIE that will refer to that DW_TAG_member/DW_TAG_variable through
25048 DW_AT_specification. */
25049 if (static_inline_p)
25050 {
25051 int old_extern = DECL_EXTERNAL (member);
25052 DECL_EXTERNAL (member) = 0;
25053 gen_decl_die (member, NULL, NULL, comp_unit_die ());
25054 DECL_EXTERNAL (member) = old_extern;
25055 }
25056 }
25057 }
25058
25059 /* Generate a DIE for a structure or union type. If TYPE_DECL_SUPPRESS_DEBUG
25060 is set, we pretend that the type was never defined, so we only get the
25061 member DIEs needed by later specification DIEs. */
25062
25063 static void
25064 gen_struct_or_union_type_die (tree type, dw_die_ref context_die,
25065 enum debug_info_usage usage)
25066 {
25067 if (TREE_ASM_WRITTEN (type))
25068 {
25069 /* Fill in the bound of variable-length fields in late dwarf if
25070 still incomplete. */
25071 if (!early_dwarf && variably_modified_type_p (type, NULL))
25072 for (tree member = TYPE_FIELDS (type);
25073 member;
25074 member = DECL_CHAIN (member))
25075 fill_variable_array_bounds (TREE_TYPE (member));
25076 return;
25077 }
25078
25079 dw_die_ref type_die = lookup_type_die (type);
25080 dw_die_ref scope_die = 0;
25081 int nested = 0;
25082 int complete = (TYPE_SIZE (type)
25083 && (! TYPE_STUB_DECL (type)
25084 || ! TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))));
25085 int ns_decl = (context_die && context_die->die_tag == DW_TAG_namespace);
25086 complete = complete && should_emit_struct_debug (type, usage);
25087
25088 if (type_die && ! complete)
25089 return;
25090
25091 if (TYPE_CONTEXT (type) != NULL_TREE
25092 && (AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25093 || TREE_CODE (TYPE_CONTEXT (type)) == NAMESPACE_DECL))
25094 nested = 1;
25095
25096 scope_die = scope_die_for (type, context_die);
25097
25098 /* Generate child dies for template paramaters. */
25099 if (!type_die && debug_info_level > DINFO_LEVEL_TERSE)
25100 schedule_generic_params_dies_gen (type);
25101
25102 if (! type_die || (nested && is_cu_die (scope_die)))
25103 /* First occurrence of type or toplevel definition of nested class. */
25104 {
25105 dw_die_ref old_die = type_die;
25106
25107 type_die = new_die (TREE_CODE (type) == RECORD_TYPE
25108 ? record_type_tag (type) : DW_TAG_union_type,
25109 scope_die, type);
25110 equate_type_number_to_die (type, type_die);
25111 if (old_die)
25112 add_AT_specification (type_die, old_die);
25113 else
25114 add_name_attribute (type_die, type_tag (type));
25115 }
25116 else
25117 remove_AT (type_die, DW_AT_declaration);
25118
25119 /* If this type has been completed, then give it a byte_size attribute and
25120 then give a list of members. */
25121 if (complete && !ns_decl)
25122 {
25123 /* Prevent infinite recursion in cases where the type of some member of
25124 this type is expressed in terms of this type itself. */
25125 TREE_ASM_WRITTEN (type) = 1;
25126 add_byte_size_attribute (type_die, type);
25127 add_alignment_attribute (type_die, type);
25128 if (TYPE_STUB_DECL (type) != NULL_TREE)
25129 {
25130 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
25131 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
25132 }
25133
25134 /* If the first reference to this type was as the return type of an
25135 inline function, then it may not have a parent. Fix this now. */
25136 if (type_die->die_parent == NULL)
25137 add_child_die (scope_die, type_die);
25138
25139 push_decl_scope (type);
25140 gen_member_die (type, type_die);
25141 pop_decl_scope ();
25142
25143 add_gnat_descriptive_type_attribute (type_die, type, context_die);
25144 if (TYPE_ARTIFICIAL (type))
25145 add_AT_flag (type_die, DW_AT_artificial, 1);
25146
25147 /* GNU extension: Record what type our vtable lives in. */
25148 if (TYPE_VFIELD (type))
25149 {
25150 tree vtype = DECL_FCONTEXT (TYPE_VFIELD (type));
25151
25152 gen_type_die (vtype, context_die);
25153 add_AT_die_ref (type_die, DW_AT_containing_type,
25154 lookup_type_die (vtype));
25155 }
25156 }
25157 else
25158 {
25159 add_AT_flag (type_die, DW_AT_declaration, 1);
25160
25161 /* We don't need to do this for function-local types. */
25162 if (TYPE_STUB_DECL (type)
25163 && ! decl_function_context (TYPE_STUB_DECL (type)))
25164 vec_safe_push (incomplete_types, type);
25165 }
25166
25167 if (get_AT (type_die, DW_AT_name))
25168 add_pubtype (type, type_die);
25169 }
25170
25171 /* Generate a DIE for a subroutine _type_. */
25172
25173 static void
25174 gen_subroutine_type_die (tree type, dw_die_ref context_die)
25175 {
25176 tree return_type = TREE_TYPE (type);
25177 dw_die_ref subr_die
25178 = new_die (DW_TAG_subroutine_type,
25179 scope_die_for (type, context_die), type);
25180
25181 equate_type_number_to_die (type, subr_die);
25182 add_prototyped_attribute (subr_die, type);
25183 add_type_attribute (subr_die, return_type, TYPE_UNQUALIFIED, false,
25184 context_die);
25185 add_alignment_attribute (subr_die, type);
25186 gen_formal_types_die (type, subr_die);
25187
25188 if (get_AT (subr_die, DW_AT_name))
25189 add_pubtype (type, subr_die);
25190 if ((dwarf_version >= 5 || !dwarf_strict)
25191 && lang_hooks.types.type_dwarf_attribute (type, DW_AT_reference) != -1)
25192 add_AT_flag (subr_die, DW_AT_reference, 1);
25193 if ((dwarf_version >= 5 || !dwarf_strict)
25194 && lang_hooks.types.type_dwarf_attribute (type,
25195 DW_AT_rvalue_reference) != -1)
25196 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
25197 }
25198
25199 /* Generate a DIE for a type definition. */
25200
25201 static void
25202 gen_typedef_die (tree decl, dw_die_ref context_die)
25203 {
25204 dw_die_ref type_die;
25205 tree type;
25206
25207 if (TREE_ASM_WRITTEN (decl))
25208 {
25209 if (DECL_ORIGINAL_TYPE (decl))
25210 fill_variable_array_bounds (DECL_ORIGINAL_TYPE (decl));
25211 return;
25212 }
25213
25214 /* As we avoid creating DIEs for local typedefs (see decl_ultimate_origin
25215 checks in process_scope_var and modified_type_die), this should be called
25216 only for original types. */
25217 gcc_assert (decl_ultimate_origin (decl) == NULL
25218 || decl_ultimate_origin (decl) == decl);
25219
25220 TREE_ASM_WRITTEN (decl) = 1;
25221 type_die = new_die (DW_TAG_typedef, context_die, decl);
25222
25223 add_name_and_src_coords_attributes (type_die, decl);
25224 if (DECL_ORIGINAL_TYPE (decl))
25225 {
25226 type = DECL_ORIGINAL_TYPE (decl);
25227 if (type == error_mark_node)
25228 return;
25229
25230 gcc_assert (type != TREE_TYPE (decl));
25231 equate_type_number_to_die (TREE_TYPE (decl), type_die);
25232 }
25233 else
25234 {
25235 type = TREE_TYPE (decl);
25236 if (type == error_mark_node)
25237 return;
25238
25239 if (is_naming_typedef_decl (TYPE_NAME (type)))
25240 {
25241 /* Here, we are in the case of decl being a typedef naming
25242 an anonymous type, e.g:
25243 typedef struct {...} foo;
25244 In that case TREE_TYPE (decl) is not a typedef variant
25245 type and TYPE_NAME of the anonymous type is set to the
25246 TYPE_DECL of the typedef. This construct is emitted by
25247 the C++ FE.
25248
25249 TYPE is the anonymous struct named by the typedef
25250 DECL. As we need the DW_AT_type attribute of the
25251 DW_TAG_typedef to point to the DIE of TYPE, let's
25252 generate that DIE right away. add_type_attribute
25253 called below will then pick (via lookup_type_die) that
25254 anonymous struct DIE. */
25255 if (!TREE_ASM_WRITTEN (type))
25256 gen_tagged_type_die (type, context_die, DINFO_USAGE_DIR_USE);
25257
25258 /* This is a GNU Extension. We are adding a
25259 DW_AT_linkage_name attribute to the DIE of the
25260 anonymous struct TYPE. The value of that attribute
25261 is the name of the typedef decl naming the anonymous
25262 struct. This greatly eases the work of consumers of
25263 this debug info. */
25264 add_linkage_name_raw (lookup_type_die (type), decl);
25265 }
25266 }
25267
25268 add_type_attribute (type_die, type, decl_quals (decl), false,
25269 context_die);
25270
25271 if (is_naming_typedef_decl (decl))
25272 /* We want that all subsequent calls to lookup_type_die with
25273 TYPE in argument yield the DW_TAG_typedef we have just
25274 created. */
25275 equate_type_number_to_die (type, type_die);
25276
25277 add_alignment_attribute (type_die, TREE_TYPE (decl));
25278
25279 add_accessibility_attribute (type_die, decl);
25280
25281 if (DECL_ABSTRACT_P (decl))
25282 equate_decl_number_to_die (decl, type_die);
25283
25284 if (get_AT (type_die, DW_AT_name))
25285 add_pubtype (decl, type_die);
25286 }
25287
25288 /* Generate a DIE for a struct, class, enum or union type. */
25289
25290 static void
25291 gen_tagged_type_die (tree type,
25292 dw_die_ref context_die,
25293 enum debug_info_usage usage)
25294 {
25295 int need_pop;
25296
25297 if (type == NULL_TREE
25298 || !is_tagged_type (type))
25299 return;
25300
25301 if (TREE_ASM_WRITTEN (type))
25302 need_pop = 0;
25303 /* If this is a nested type whose containing class hasn't been written
25304 out yet, writing it out will cover this one, too. This does not apply
25305 to instantiations of member class templates; they need to be added to
25306 the containing class as they are generated. FIXME: This hurts the
25307 idea of combining type decls from multiple TUs, since we can't predict
25308 what set of template instantiations we'll get. */
25309 else if (TYPE_CONTEXT (type)
25310 && AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25311 && ! TREE_ASM_WRITTEN (TYPE_CONTEXT (type)))
25312 {
25313 gen_type_die_with_usage (TYPE_CONTEXT (type), context_die, usage);
25314
25315 if (TREE_ASM_WRITTEN (type))
25316 return;
25317
25318 /* If that failed, attach ourselves to the stub. */
25319 push_decl_scope (TYPE_CONTEXT (type));
25320 context_die = lookup_type_die (TYPE_CONTEXT (type));
25321 need_pop = 1;
25322 }
25323 else if (TYPE_CONTEXT (type) != NULL_TREE
25324 && (TREE_CODE (TYPE_CONTEXT (type)) == FUNCTION_DECL))
25325 {
25326 /* If this type is local to a function that hasn't been written
25327 out yet, use a NULL context for now; it will be fixed up in
25328 decls_for_scope. */
25329 context_die = lookup_decl_die (TYPE_CONTEXT (type));
25330 /* A declaration DIE doesn't count; nested types need to go in the
25331 specification. */
25332 if (context_die && is_declaration_die (context_die))
25333 context_die = NULL;
25334 need_pop = 0;
25335 }
25336 else
25337 {
25338 context_die = declare_in_namespace (type, context_die);
25339 need_pop = 0;
25340 }
25341
25342 if (TREE_CODE (type) == ENUMERAL_TYPE)
25343 {
25344 /* This might have been written out by the call to
25345 declare_in_namespace. */
25346 if (!TREE_ASM_WRITTEN (type))
25347 gen_enumeration_type_die (type, context_die);
25348 }
25349 else
25350 gen_struct_or_union_type_die (type, context_die, usage);
25351
25352 if (need_pop)
25353 pop_decl_scope ();
25354
25355 /* Don't set TREE_ASM_WRITTEN on an incomplete struct; we want to fix
25356 it up if it is ever completed. gen_*_type_die will set it for us
25357 when appropriate. */
25358 }
25359
25360 /* Generate a type description DIE. */
25361
25362 static void
25363 gen_type_die_with_usage (tree type, dw_die_ref context_die,
25364 enum debug_info_usage usage)
25365 {
25366 struct array_descr_info info;
25367
25368 if (type == NULL_TREE || type == error_mark_node)
25369 return;
25370
25371 if (flag_checking && type)
25372 verify_type (type);
25373
25374 if (TYPE_NAME (type) != NULL_TREE
25375 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
25376 && is_redundant_typedef (TYPE_NAME (type))
25377 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
25378 /* The DECL of this type is a typedef we don't want to emit debug
25379 info for but we want debug info for its underlying typedef.
25380 This can happen for e.g, the injected-class-name of a C++
25381 type. */
25382 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
25383
25384 /* If TYPE is a typedef type variant, let's generate debug info
25385 for the parent typedef which TYPE is a type of. */
25386 if (typedef_variant_p (type))
25387 {
25388 if (TREE_ASM_WRITTEN (type))
25389 return;
25390
25391 tree name = TYPE_NAME (type);
25392 tree origin = decl_ultimate_origin (name);
25393 if (origin != NULL && origin != name)
25394 {
25395 gen_decl_die (origin, NULL, NULL, context_die);
25396 return;
25397 }
25398
25399 /* Prevent broken recursion; we can't hand off to the same type. */
25400 gcc_assert (DECL_ORIGINAL_TYPE (name) != type);
25401
25402 /* Give typedefs the right scope. */
25403 context_die = scope_die_for (type, context_die);
25404
25405 TREE_ASM_WRITTEN (type) = 1;
25406
25407 gen_decl_die (name, NULL, NULL, context_die);
25408 return;
25409 }
25410
25411 /* If type is an anonymous tagged type named by a typedef, let's
25412 generate debug info for the typedef. */
25413 if (is_naming_typedef_decl (TYPE_NAME (type)))
25414 {
25415 /* Use the DIE of the containing namespace as the parent DIE of
25416 the type description DIE we want to generate. */
25417 if (DECL_CONTEXT (TYPE_NAME (type))
25418 && TREE_CODE (DECL_CONTEXT (TYPE_NAME (type))) == NAMESPACE_DECL)
25419 context_die = get_context_die (DECL_CONTEXT (TYPE_NAME (type)));
25420
25421 gen_decl_die (TYPE_NAME (type), NULL, NULL, context_die);
25422 return;
25423 }
25424
25425 if (lang_hooks.types.get_debug_type)
25426 {
25427 tree debug_type = lang_hooks.types.get_debug_type (type);
25428
25429 if (debug_type != NULL_TREE && debug_type != type)
25430 {
25431 gen_type_die_with_usage (debug_type, context_die, usage);
25432 return;
25433 }
25434 }
25435
25436 /* We are going to output a DIE to represent the unqualified version
25437 of this type (i.e. without any const or volatile qualifiers) so
25438 get the main variant (i.e. the unqualified version) of this type
25439 now. (Vectors and arrays are special because the debugging info is in the
25440 cloned type itself. Similarly function/method types can contain extra
25441 ref-qualification). */
25442 if (TREE_CODE (type) == FUNCTION_TYPE
25443 || TREE_CODE (type) == METHOD_TYPE)
25444 {
25445 /* For function/method types, can't use type_main_variant here,
25446 because that can have different ref-qualifiers for C++,
25447 but try to canonicalize. */
25448 tree main = TYPE_MAIN_VARIANT (type);
25449 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
25450 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
25451 && check_base_type (t, main)
25452 && check_lang_type (t, type))
25453 {
25454 type = t;
25455 break;
25456 }
25457 }
25458 else if (TREE_CODE (type) != VECTOR_TYPE
25459 && TREE_CODE (type) != ARRAY_TYPE)
25460 type = type_main_variant (type);
25461
25462 /* If this is an array type with hidden descriptor, handle it first. */
25463 if (!TREE_ASM_WRITTEN (type)
25464 && lang_hooks.types.get_array_descr_info)
25465 {
25466 memset (&info, 0, sizeof (info));
25467 if (lang_hooks.types.get_array_descr_info (type, &info))
25468 {
25469 /* Fortran sometimes emits array types with no dimension. */
25470 gcc_assert (info.ndimensions >= 0
25471 && (info.ndimensions
25472 <= DWARF2OUT_ARRAY_DESCR_INFO_MAX_DIMEN));
25473 gen_descr_array_type_die (type, &info, context_die);
25474 TREE_ASM_WRITTEN (type) = 1;
25475 return;
25476 }
25477 }
25478
25479 if (TREE_ASM_WRITTEN (type))
25480 {
25481 /* Variable-length types may be incomplete even if
25482 TREE_ASM_WRITTEN. For such types, fall through to
25483 gen_array_type_die() and possibly fill in
25484 DW_AT_{upper,lower}_bound attributes. */
25485 if ((TREE_CODE (type) != ARRAY_TYPE
25486 && TREE_CODE (type) != RECORD_TYPE
25487 && TREE_CODE (type) != UNION_TYPE
25488 && TREE_CODE (type) != QUAL_UNION_TYPE)
25489 || !variably_modified_type_p (type, NULL))
25490 return;
25491 }
25492
25493 switch (TREE_CODE (type))
25494 {
25495 case ERROR_MARK:
25496 break;
25497
25498 case POINTER_TYPE:
25499 case REFERENCE_TYPE:
25500 /* We must set TREE_ASM_WRITTEN in case this is a recursive type. This
25501 ensures that the gen_type_die recursion will terminate even if the
25502 type is recursive. Recursive types are possible in Ada. */
25503 /* ??? We could perhaps do this for all types before the switch
25504 statement. */
25505 TREE_ASM_WRITTEN (type) = 1;
25506
25507 /* For these types, all that is required is that we output a DIE (or a
25508 set of DIEs) to represent the "basis" type. */
25509 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25510 DINFO_USAGE_IND_USE);
25511 break;
25512
25513 case OFFSET_TYPE:
25514 /* This code is used for C++ pointer-to-data-member types.
25515 Output a description of the relevant class type. */
25516 gen_type_die_with_usage (TYPE_OFFSET_BASETYPE (type), context_die,
25517 DINFO_USAGE_IND_USE);
25518
25519 /* Output a description of the type of the object pointed to. */
25520 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25521 DINFO_USAGE_IND_USE);
25522
25523 /* Now output a DIE to represent this pointer-to-data-member type
25524 itself. */
25525 gen_ptr_to_mbr_type_die (type, context_die);
25526 break;
25527
25528 case FUNCTION_TYPE:
25529 /* Force out return type (in case it wasn't forced out already). */
25530 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25531 DINFO_USAGE_DIR_USE);
25532 gen_subroutine_type_die (type, context_die);
25533 break;
25534
25535 case METHOD_TYPE:
25536 /* Force out return type (in case it wasn't forced out already). */
25537 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25538 DINFO_USAGE_DIR_USE);
25539 gen_subroutine_type_die (type, context_die);
25540 break;
25541
25542 case ARRAY_TYPE:
25543 case VECTOR_TYPE:
25544 gen_array_type_die (type, context_die);
25545 break;
25546
25547 case ENUMERAL_TYPE:
25548 case RECORD_TYPE:
25549 case UNION_TYPE:
25550 case QUAL_UNION_TYPE:
25551 gen_tagged_type_die (type, context_die, usage);
25552 return;
25553
25554 case VOID_TYPE:
25555 case INTEGER_TYPE:
25556 case REAL_TYPE:
25557 case FIXED_POINT_TYPE:
25558 case COMPLEX_TYPE:
25559 case BOOLEAN_TYPE:
25560 case POINTER_BOUNDS_TYPE:
25561 /* No DIEs needed for fundamental types. */
25562 break;
25563
25564 case NULLPTR_TYPE:
25565 case LANG_TYPE:
25566 /* Just use DW_TAG_unspecified_type. */
25567 {
25568 dw_die_ref type_die = lookup_type_die (type);
25569 if (type_die == NULL)
25570 {
25571 tree name = TYPE_IDENTIFIER (type);
25572 type_die = new_die (DW_TAG_unspecified_type, comp_unit_die (),
25573 type);
25574 add_name_attribute (type_die, IDENTIFIER_POINTER (name));
25575 equate_type_number_to_die (type, type_die);
25576 }
25577 }
25578 break;
25579
25580 default:
25581 if (is_cxx_auto (type))
25582 {
25583 tree name = TYPE_IDENTIFIER (type);
25584 dw_die_ref *die = (name == get_identifier ("auto")
25585 ? &auto_die : &decltype_auto_die);
25586 if (!*die)
25587 {
25588 *die = new_die (DW_TAG_unspecified_type,
25589 comp_unit_die (), NULL_TREE);
25590 add_name_attribute (*die, IDENTIFIER_POINTER (name));
25591 }
25592 equate_type_number_to_die (type, *die);
25593 break;
25594 }
25595 gcc_unreachable ();
25596 }
25597
25598 TREE_ASM_WRITTEN (type) = 1;
25599 }
25600
25601 static void
25602 gen_type_die (tree type, dw_die_ref context_die)
25603 {
25604 if (type != error_mark_node)
25605 {
25606 gen_type_die_with_usage (type, context_die, DINFO_USAGE_DIR_USE);
25607 if (flag_checking)
25608 {
25609 dw_die_ref die = lookup_type_die (type);
25610 if (die)
25611 check_die (die);
25612 }
25613 }
25614 }
25615
25616 /* Generate a DW_TAG_lexical_block DIE followed by DIEs to represent all of the
25617 things which are local to the given block. */
25618
25619 static void
25620 gen_block_die (tree stmt, dw_die_ref context_die)
25621 {
25622 int must_output_die = 0;
25623 bool inlined_func;
25624
25625 /* Ignore blocks that are NULL. */
25626 if (stmt == NULL_TREE)
25627 return;
25628
25629 inlined_func = inlined_function_outer_scope_p (stmt);
25630
25631 /* If the block is one fragment of a non-contiguous block, do not
25632 process the variables, since they will have been done by the
25633 origin block. Do process subblocks. */
25634 if (BLOCK_FRAGMENT_ORIGIN (stmt))
25635 {
25636 tree sub;
25637
25638 for (sub = BLOCK_SUBBLOCKS (stmt); sub; sub = BLOCK_CHAIN (sub))
25639 gen_block_die (sub, context_die);
25640
25641 return;
25642 }
25643
25644 /* Determine if we need to output any Dwarf DIEs at all to represent this
25645 block. */
25646 if (inlined_func)
25647 /* The outer scopes for inlinings *must* always be represented. We
25648 generate DW_TAG_inlined_subroutine DIEs for them. (See below.) */
25649 must_output_die = 1;
25650 else
25651 {
25652 /* Determine if this block directly contains any "significant"
25653 local declarations which we will need to output DIEs for. */
25654 if (debug_info_level > DINFO_LEVEL_TERSE)
25655 /* We are not in terse mode so *any* local declaration counts
25656 as being a "significant" one. */
25657 must_output_die = ((BLOCK_VARS (stmt) != NULL
25658 || BLOCK_NUM_NONLOCALIZED_VARS (stmt))
25659 && (TREE_USED (stmt)
25660 || TREE_ASM_WRITTEN (stmt)
25661 || BLOCK_ABSTRACT (stmt)));
25662 else if ((TREE_USED (stmt)
25663 || TREE_ASM_WRITTEN (stmt)
25664 || BLOCK_ABSTRACT (stmt))
25665 && !dwarf2out_ignore_block (stmt))
25666 must_output_die = 1;
25667 }
25668
25669 /* It would be a waste of space to generate a Dwarf DW_TAG_lexical_block
25670 DIE for any block which contains no significant local declarations at
25671 all. Rather, in such cases we just call `decls_for_scope' so that any
25672 needed Dwarf info for any sub-blocks will get properly generated. Note
25673 that in terse mode, our definition of what constitutes a "significant"
25674 local declaration gets restricted to include only inlined function
25675 instances and local (nested) function definitions. */
25676 if (must_output_die)
25677 {
25678 if (inlined_func)
25679 {
25680 /* If STMT block is abstract, that means we have been called
25681 indirectly from dwarf2out_abstract_function.
25682 That function rightfully marks the descendent blocks (of
25683 the abstract function it is dealing with) as being abstract,
25684 precisely to prevent us from emitting any
25685 DW_TAG_inlined_subroutine DIE as a descendent
25686 of an abstract function instance. So in that case, we should
25687 not call gen_inlined_subroutine_die.
25688
25689 Later though, when cgraph asks dwarf2out to emit info
25690 for the concrete instance of the function decl into which
25691 the concrete instance of STMT got inlined, the later will lead
25692 to the generation of a DW_TAG_inlined_subroutine DIE. */
25693 if (! BLOCK_ABSTRACT (stmt))
25694 gen_inlined_subroutine_die (stmt, context_die);
25695 }
25696 else
25697 gen_lexical_block_die (stmt, context_die);
25698 }
25699 else
25700 decls_for_scope (stmt, context_die);
25701 }
25702
25703 /* Process variable DECL (or variable with origin ORIGIN) within
25704 block STMT and add it to CONTEXT_DIE. */
25705 static void
25706 process_scope_var (tree stmt, tree decl, tree origin, dw_die_ref context_die)
25707 {
25708 dw_die_ref die;
25709 tree decl_or_origin = decl ? decl : origin;
25710
25711 if (TREE_CODE (decl_or_origin) == FUNCTION_DECL)
25712 die = lookup_decl_die (decl_or_origin);
25713 else if (TREE_CODE (decl_or_origin) == TYPE_DECL)
25714 {
25715 if (TYPE_DECL_IS_STUB (decl_or_origin))
25716 die = lookup_type_die (TREE_TYPE (decl_or_origin));
25717 else
25718 die = lookup_decl_die (decl_or_origin);
25719 /* Avoid re-creating the DIE late if it was optimized as unused early. */
25720 if (! die && ! early_dwarf)
25721 return;
25722 }
25723 else
25724 die = NULL;
25725
25726 /* Avoid creating DIEs for local typedefs and concrete static variables that
25727 will only be pruned later. */
25728 if ((origin || decl_ultimate_origin (decl))
25729 && (TREE_CODE (decl_or_origin) == TYPE_DECL
25730 || (VAR_P (decl_or_origin) && TREE_STATIC (decl_or_origin))))
25731 {
25732 origin = decl_ultimate_origin (decl_or_origin);
25733 if (decl && VAR_P (decl) && die != NULL)
25734 {
25735 die = lookup_decl_die (origin);
25736 if (die != NULL)
25737 equate_decl_number_to_die (decl, die);
25738 }
25739 return;
25740 }
25741
25742 if (die != NULL && die->die_parent == NULL)
25743 add_child_die (context_die, die);
25744 else if (TREE_CODE (decl_or_origin) == IMPORTED_DECL)
25745 {
25746 if (early_dwarf)
25747 dwarf2out_imported_module_or_decl_1 (decl_or_origin, DECL_NAME (decl_or_origin),
25748 stmt, context_die);
25749 }
25750 else
25751 {
25752 if (decl && DECL_P (decl))
25753 {
25754 die = lookup_decl_die (decl);
25755
25756 /* Early created DIEs do not have a parent as the decls refer
25757 to the function as DECL_CONTEXT rather than the BLOCK. */
25758 if (die && die->die_parent == NULL)
25759 {
25760 gcc_assert (in_lto_p);
25761 add_child_die (context_die, die);
25762 }
25763 }
25764
25765 gen_decl_die (decl, origin, NULL, context_die);
25766 }
25767 }
25768
25769 /* Generate all of the decls declared within a given scope and (recursively)
25770 all of its sub-blocks. */
25771
25772 static void
25773 decls_for_scope (tree stmt, dw_die_ref context_die)
25774 {
25775 tree decl;
25776 unsigned int i;
25777 tree subblocks;
25778
25779 /* Ignore NULL blocks. */
25780 if (stmt == NULL_TREE)
25781 return;
25782
25783 /* Output the DIEs to represent all of the data objects and typedefs
25784 declared directly within this block but not within any nested
25785 sub-blocks. Also, nested function and tag DIEs have been
25786 generated with a parent of NULL; fix that up now. We don't
25787 have to do this if we're at -g1. */
25788 if (debug_info_level > DINFO_LEVEL_TERSE)
25789 {
25790 for (decl = BLOCK_VARS (stmt); decl != NULL; decl = DECL_CHAIN (decl))
25791 process_scope_var (stmt, decl, NULL_TREE, context_die);
25792 /* BLOCK_NONLOCALIZED_VARs simply generate DIE stubs with abstract
25793 origin - avoid doing this twice as we have no good way to see
25794 if we've done it once already. */
25795 if (! early_dwarf)
25796 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (stmt); i++)
25797 {
25798 decl = BLOCK_NONLOCALIZED_VAR (stmt, i);
25799 if (decl == current_function_decl)
25800 /* Ignore declarations of the current function, while they
25801 are declarations, gen_subprogram_die would treat them
25802 as definitions again, because they are equal to
25803 current_function_decl and endlessly recurse. */;
25804 else if (TREE_CODE (decl) == FUNCTION_DECL)
25805 process_scope_var (stmt, decl, NULL_TREE, context_die);
25806 else
25807 process_scope_var (stmt, NULL_TREE, decl, context_die);
25808 }
25809 }
25810
25811 /* Even if we're at -g1, we need to process the subblocks in order to get
25812 inlined call information. */
25813
25814 /* Output the DIEs to represent all sub-blocks (and the items declared
25815 therein) of this block. */
25816 for (subblocks = BLOCK_SUBBLOCKS (stmt);
25817 subblocks != NULL;
25818 subblocks = BLOCK_CHAIN (subblocks))
25819 gen_block_die (subblocks, context_die);
25820 }
25821
25822 /* Is this a typedef we can avoid emitting? */
25823
25824 bool
25825 is_redundant_typedef (const_tree decl)
25826 {
25827 if (TYPE_DECL_IS_STUB (decl))
25828 return true;
25829
25830 if (DECL_ARTIFICIAL (decl)
25831 && DECL_CONTEXT (decl)
25832 && is_tagged_type (DECL_CONTEXT (decl))
25833 && TREE_CODE (TYPE_NAME (DECL_CONTEXT (decl))) == TYPE_DECL
25834 && DECL_NAME (decl) == DECL_NAME (TYPE_NAME (DECL_CONTEXT (decl))))
25835 /* Also ignore the artificial member typedef for the class name. */
25836 return true;
25837
25838 return false;
25839 }
25840
25841 /* Return TRUE if TYPE is a typedef that names a type for linkage
25842 purposes. This kind of typedefs is produced by the C++ FE for
25843 constructs like:
25844
25845 typedef struct {...} foo;
25846
25847 In that case, there is no typedef variant type produced for foo.
25848 Rather, the TREE_TYPE of the TYPE_DECL of foo is the anonymous
25849 struct type. */
25850
25851 static bool
25852 is_naming_typedef_decl (const_tree decl)
25853 {
25854 if (decl == NULL_TREE
25855 || TREE_CODE (decl) != TYPE_DECL
25856 || DECL_NAMELESS (decl)
25857 || !is_tagged_type (TREE_TYPE (decl))
25858 || DECL_IS_BUILTIN (decl)
25859 || is_redundant_typedef (decl)
25860 /* It looks like Ada produces TYPE_DECLs that are very similar
25861 to C++ naming typedefs but that have different
25862 semantics. Let's be specific to c++ for now. */
25863 || !is_cxx (decl))
25864 return FALSE;
25865
25866 return (DECL_ORIGINAL_TYPE (decl) == NULL_TREE
25867 && TYPE_NAME (TREE_TYPE (decl)) == decl
25868 && (TYPE_STUB_DECL (TREE_TYPE (decl))
25869 != TYPE_NAME (TREE_TYPE (decl))));
25870 }
25871
25872 /* Looks up the DIE for a context. */
25873
25874 static inline dw_die_ref
25875 lookup_context_die (tree context)
25876 {
25877 if (context)
25878 {
25879 /* Find die that represents this context. */
25880 if (TYPE_P (context))
25881 {
25882 context = TYPE_MAIN_VARIANT (context);
25883 dw_die_ref ctx = lookup_type_die (context);
25884 if (!ctx)
25885 return NULL;
25886 return strip_naming_typedef (context, ctx);
25887 }
25888 else
25889 return lookup_decl_die (context);
25890 }
25891 return comp_unit_die ();
25892 }
25893
25894 /* Returns the DIE for a context. */
25895
25896 static inline dw_die_ref
25897 get_context_die (tree context)
25898 {
25899 if (context)
25900 {
25901 /* Find die that represents this context. */
25902 if (TYPE_P (context))
25903 {
25904 context = TYPE_MAIN_VARIANT (context);
25905 return strip_naming_typedef (context, force_type_die (context));
25906 }
25907 else
25908 return force_decl_die (context);
25909 }
25910 return comp_unit_die ();
25911 }
25912
25913 /* Returns the DIE for decl. A DIE will always be returned. */
25914
25915 static dw_die_ref
25916 force_decl_die (tree decl)
25917 {
25918 dw_die_ref decl_die;
25919 unsigned saved_external_flag;
25920 tree save_fn = NULL_TREE;
25921 decl_die = lookup_decl_die (decl);
25922 if (!decl_die)
25923 {
25924 dw_die_ref context_die = get_context_die (DECL_CONTEXT (decl));
25925
25926 decl_die = lookup_decl_die (decl);
25927 if (decl_die)
25928 return decl_die;
25929
25930 switch (TREE_CODE (decl))
25931 {
25932 case FUNCTION_DECL:
25933 /* Clear current_function_decl, so that gen_subprogram_die thinks
25934 that this is a declaration. At this point, we just want to force
25935 declaration die. */
25936 save_fn = current_function_decl;
25937 current_function_decl = NULL_TREE;
25938 gen_subprogram_die (decl, context_die);
25939 current_function_decl = save_fn;
25940 break;
25941
25942 case VAR_DECL:
25943 /* Set external flag to force declaration die. Restore it after
25944 gen_decl_die() call. */
25945 saved_external_flag = DECL_EXTERNAL (decl);
25946 DECL_EXTERNAL (decl) = 1;
25947 gen_decl_die (decl, NULL, NULL, context_die);
25948 DECL_EXTERNAL (decl) = saved_external_flag;
25949 break;
25950
25951 case NAMESPACE_DECL:
25952 if (dwarf_version >= 3 || !dwarf_strict)
25953 dwarf2out_decl (decl);
25954 else
25955 /* DWARF2 has neither DW_TAG_module, nor DW_TAG_namespace. */
25956 decl_die = comp_unit_die ();
25957 break;
25958
25959 case TRANSLATION_UNIT_DECL:
25960 decl_die = comp_unit_die ();
25961 break;
25962
25963 default:
25964 gcc_unreachable ();
25965 }
25966
25967 /* We should be able to find the DIE now. */
25968 if (!decl_die)
25969 decl_die = lookup_decl_die (decl);
25970 gcc_assert (decl_die);
25971 }
25972
25973 return decl_die;
25974 }
25975
25976 /* Returns the DIE for TYPE, that must not be a base type. A DIE is
25977 always returned. */
25978
25979 static dw_die_ref
25980 force_type_die (tree type)
25981 {
25982 dw_die_ref type_die;
25983
25984 type_die = lookup_type_die (type);
25985 if (!type_die)
25986 {
25987 dw_die_ref context_die = get_context_die (TYPE_CONTEXT (type));
25988
25989 type_die = modified_type_die (type, TYPE_QUALS_NO_ADDR_SPACE (type),
25990 false, context_die);
25991 gcc_assert (type_die);
25992 }
25993 return type_die;
25994 }
25995
25996 /* Force out any required namespaces to be able to output DECL,
25997 and return the new context_die for it, if it's changed. */
25998
25999 static dw_die_ref
26000 setup_namespace_context (tree thing, dw_die_ref context_die)
26001 {
26002 tree context = (DECL_P (thing)
26003 ? DECL_CONTEXT (thing) : TYPE_CONTEXT (thing));
26004 if (context && TREE_CODE (context) == NAMESPACE_DECL)
26005 /* Force out the namespace. */
26006 context_die = force_decl_die (context);
26007
26008 return context_die;
26009 }
26010
26011 /* Emit a declaration DIE for THING (which is either a DECL or a tagged
26012 type) within its namespace, if appropriate.
26013
26014 For compatibility with older debuggers, namespace DIEs only contain
26015 declarations; all definitions are emitted at CU scope, with
26016 DW_AT_specification pointing to the declaration (like with class
26017 members). */
26018
26019 static dw_die_ref
26020 declare_in_namespace (tree thing, dw_die_ref context_die)
26021 {
26022 dw_die_ref ns_context;
26023
26024 if (debug_info_level <= DINFO_LEVEL_TERSE)
26025 return context_die;
26026
26027 /* External declarations in the local scope only need to be emitted
26028 once, not once in the namespace and once in the scope.
26029
26030 This avoids declaring the `extern' below in the
26031 namespace DIE as well as in the innermost scope:
26032
26033 namespace S
26034 {
26035 int i=5;
26036 int foo()
26037 {
26038 int i=8;
26039 extern int i;
26040 return i;
26041 }
26042 }
26043 */
26044 if (DECL_P (thing) && DECL_EXTERNAL (thing) && local_scope_p (context_die))
26045 return context_die;
26046
26047 /* If this decl is from an inlined function, then don't try to emit it in its
26048 namespace, as we will get confused. It would have already been emitted
26049 when the abstract instance of the inline function was emitted anyways. */
26050 if (DECL_P (thing) && DECL_ABSTRACT_ORIGIN (thing))
26051 return context_die;
26052
26053 ns_context = setup_namespace_context (thing, context_die);
26054
26055 if (ns_context != context_die)
26056 {
26057 if (is_fortran ())
26058 return ns_context;
26059 if (DECL_P (thing))
26060 gen_decl_die (thing, NULL, NULL, ns_context);
26061 else
26062 gen_type_die (thing, ns_context);
26063 }
26064 return context_die;
26065 }
26066
26067 /* Generate a DIE for a namespace or namespace alias. */
26068
26069 static void
26070 gen_namespace_die (tree decl, dw_die_ref context_die)
26071 {
26072 dw_die_ref namespace_die;
26073
26074 /* Namespace aliases have a DECL_ABSTRACT_ORIGIN of the namespace
26075 they are an alias of. */
26076 if (DECL_ABSTRACT_ORIGIN (decl) == NULL)
26077 {
26078 /* Output a real namespace or module. */
26079 context_die = setup_namespace_context (decl, comp_unit_die ());
26080 namespace_die = new_die (is_fortran ()
26081 ? DW_TAG_module : DW_TAG_namespace,
26082 context_die, decl);
26083 /* For Fortran modules defined in different CU don't add src coords. */
26084 if (namespace_die->die_tag == DW_TAG_module && DECL_EXTERNAL (decl))
26085 {
26086 const char *name = dwarf2_name (decl, 0);
26087 if (name)
26088 add_name_attribute (namespace_die, name);
26089 }
26090 else
26091 add_name_and_src_coords_attributes (namespace_die, decl);
26092 if (DECL_EXTERNAL (decl))
26093 add_AT_flag (namespace_die, DW_AT_declaration, 1);
26094 equate_decl_number_to_die (decl, namespace_die);
26095 }
26096 else
26097 {
26098 /* Output a namespace alias. */
26099
26100 /* Force out the namespace we are an alias of, if necessary. */
26101 dw_die_ref origin_die
26102 = force_decl_die (DECL_ABSTRACT_ORIGIN (decl));
26103
26104 if (DECL_FILE_SCOPE_P (decl)
26105 || TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL)
26106 context_die = setup_namespace_context (decl, comp_unit_die ());
26107 /* Now create the namespace alias DIE. */
26108 namespace_die = new_die (DW_TAG_imported_declaration, context_die, decl);
26109 add_name_and_src_coords_attributes (namespace_die, decl);
26110 add_AT_die_ref (namespace_die, DW_AT_import, origin_die);
26111 equate_decl_number_to_die (decl, namespace_die);
26112 }
26113 if ((dwarf_version >= 5 || !dwarf_strict)
26114 && lang_hooks.decls.decl_dwarf_attribute (decl,
26115 DW_AT_export_symbols) == 1)
26116 add_AT_flag (namespace_die, DW_AT_export_symbols, 1);
26117
26118 /* Bypass dwarf2_name's check for DECL_NAMELESS. */
26119 if (want_pubnames ())
26120 add_pubname_string (lang_hooks.dwarf_name (decl, 1), namespace_die);
26121 }
26122
26123 /* Generate Dwarf debug information for a decl described by DECL.
26124 The return value is currently only meaningful for PARM_DECLs,
26125 for all other decls it returns NULL.
26126
26127 If DECL is a FIELD_DECL, CTX is required: see the comment for VLR_CONTEXT.
26128 It can be NULL otherwise. */
26129
26130 static dw_die_ref
26131 gen_decl_die (tree decl, tree origin, struct vlr_context *ctx,
26132 dw_die_ref context_die)
26133 {
26134 tree decl_or_origin = decl ? decl : origin;
26135 tree class_origin = NULL, ultimate_origin;
26136
26137 if (DECL_P (decl_or_origin) && DECL_IGNORED_P (decl_or_origin))
26138 return NULL;
26139
26140 /* Ignore pointer bounds decls. */
26141 if (DECL_P (decl_or_origin)
26142 && TREE_TYPE (decl_or_origin)
26143 && POINTER_BOUNDS_P (decl_or_origin))
26144 return NULL;
26145
26146 switch (TREE_CODE (decl_or_origin))
26147 {
26148 case ERROR_MARK:
26149 break;
26150
26151 case CONST_DECL:
26152 if (!is_fortran () && !is_ada ())
26153 {
26154 /* The individual enumerators of an enum type get output when we output
26155 the Dwarf representation of the relevant enum type itself. */
26156 break;
26157 }
26158
26159 /* Emit its type. */
26160 gen_type_die (TREE_TYPE (decl), context_die);
26161
26162 /* And its containing namespace. */
26163 context_die = declare_in_namespace (decl, context_die);
26164
26165 gen_const_die (decl, context_die);
26166 break;
26167
26168 case FUNCTION_DECL:
26169 #if 0
26170 /* FIXME */
26171 /* This doesn't work because the C frontend sets DECL_ABSTRACT_ORIGIN
26172 on local redeclarations of global functions. That seems broken. */
26173 if (current_function_decl != decl)
26174 /* This is only a declaration. */;
26175 #endif
26176
26177 /* We should have abstract copies already and should not generate
26178 stray type DIEs in late LTO dumping. */
26179 if (! early_dwarf)
26180 ;
26181
26182 /* If we're emitting a clone, emit info for the abstract instance. */
26183 else if (origin || DECL_ORIGIN (decl) != decl)
26184 dwarf2out_abstract_function (origin
26185 ? DECL_ORIGIN (origin)
26186 : DECL_ABSTRACT_ORIGIN (decl));
26187
26188 /* If we're emitting a possibly inlined function emit it as
26189 abstract instance. */
26190 else if (cgraph_function_possibly_inlined_p (decl)
26191 && ! DECL_ABSTRACT_P (decl)
26192 && ! class_or_namespace_scope_p (context_die)
26193 /* dwarf2out_abstract_function won't emit a die if this is just
26194 a declaration. We must avoid setting DECL_ABSTRACT_ORIGIN in
26195 that case, because that works only if we have a die. */
26196 && DECL_INITIAL (decl) != NULL_TREE)
26197 dwarf2out_abstract_function (decl);
26198
26199 /* Otherwise we're emitting the primary DIE for this decl. */
26200 else if (debug_info_level > DINFO_LEVEL_TERSE)
26201 {
26202 /* Before we describe the FUNCTION_DECL itself, make sure that we
26203 have its containing type. */
26204 if (!origin)
26205 origin = decl_class_context (decl);
26206 if (origin != NULL_TREE)
26207 gen_type_die (origin, context_die);
26208
26209 /* And its return type. */
26210 gen_type_die (TREE_TYPE (TREE_TYPE (decl)), context_die);
26211
26212 /* And its virtual context. */
26213 if (DECL_VINDEX (decl) != NULL_TREE)
26214 gen_type_die (DECL_CONTEXT (decl), context_die);
26215
26216 /* Make sure we have a member DIE for decl. */
26217 if (origin != NULL_TREE)
26218 gen_type_die_for_member (origin, decl, context_die);
26219
26220 /* And its containing namespace. */
26221 context_die = declare_in_namespace (decl, context_die);
26222 }
26223
26224 /* Now output a DIE to represent the function itself. */
26225 if (decl)
26226 gen_subprogram_die (decl, context_die);
26227 break;
26228
26229 case TYPE_DECL:
26230 /* If we are in terse mode, don't generate any DIEs to represent any
26231 actual typedefs. */
26232 if (debug_info_level <= DINFO_LEVEL_TERSE)
26233 break;
26234
26235 /* In the special case of a TYPE_DECL node representing the declaration
26236 of some type tag, if the given TYPE_DECL is marked as having been
26237 instantiated from some other (original) TYPE_DECL node (e.g. one which
26238 was generated within the original definition of an inline function) we
26239 used to generate a special (abbreviated) DW_TAG_structure_type,
26240 DW_TAG_union_type, or DW_TAG_enumeration_type DIE here. But nothing
26241 should be actually referencing those DIEs, as variable DIEs with that
26242 type would be emitted already in the abstract origin, so it was always
26243 removed during unused type prunning. Don't add anything in this
26244 case. */
26245 if (TYPE_DECL_IS_STUB (decl) && decl_ultimate_origin (decl) != NULL_TREE)
26246 break;
26247
26248 if (is_redundant_typedef (decl))
26249 gen_type_die (TREE_TYPE (decl), context_die);
26250 else
26251 /* Output a DIE to represent the typedef itself. */
26252 gen_typedef_die (decl, context_die);
26253 break;
26254
26255 case LABEL_DECL:
26256 if (debug_info_level >= DINFO_LEVEL_NORMAL)
26257 gen_label_die (decl, context_die);
26258 break;
26259
26260 case VAR_DECL:
26261 case RESULT_DECL:
26262 /* If we are in terse mode, don't generate any DIEs to represent any
26263 variable declarations or definitions. */
26264 if (debug_info_level <= DINFO_LEVEL_TERSE)
26265 break;
26266
26267 /* Avoid generating stray type DIEs during late dwarf dumping.
26268 All types have been dumped early. */
26269 if (early_dwarf
26270 /* ??? But in LTRANS we cannot annotate early created variably
26271 modified type DIEs without copying them and adjusting all
26272 references to them. Dump them again as happens for inlining
26273 which copies both the decl and the types. */
26274 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26275 in VLA bound information for example. */
26276 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26277 current_function_decl)))
26278 {
26279 /* Output any DIEs that are needed to specify the type of this data
26280 object. */
26281 if (decl_by_reference_p (decl_or_origin))
26282 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26283 else
26284 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26285 }
26286
26287 if (early_dwarf)
26288 {
26289 /* And its containing type. */
26290 class_origin = decl_class_context (decl_or_origin);
26291 if (class_origin != NULL_TREE)
26292 gen_type_die_for_member (class_origin, decl_or_origin, context_die);
26293
26294 /* And its containing namespace. */
26295 context_die = declare_in_namespace (decl_or_origin, context_die);
26296 }
26297
26298 /* Now output the DIE to represent the data object itself. This gets
26299 complicated because of the possibility that the VAR_DECL really
26300 represents an inlined instance of a formal parameter for an inline
26301 function. */
26302 ultimate_origin = decl_ultimate_origin (decl_or_origin);
26303 if (ultimate_origin != NULL_TREE
26304 && TREE_CODE (ultimate_origin) == PARM_DECL)
26305 gen_formal_parameter_die (decl, origin,
26306 true /* Emit name attribute. */,
26307 context_die);
26308 else
26309 gen_variable_die (decl, origin, context_die);
26310 break;
26311
26312 case FIELD_DECL:
26313 gcc_assert (ctx != NULL && ctx->struct_type != NULL);
26314 /* Ignore the nameless fields that are used to skip bits but handle C++
26315 anonymous unions and structs. */
26316 if (DECL_NAME (decl) != NULL_TREE
26317 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE
26318 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE)
26319 {
26320 gen_type_die (member_declared_type (decl), context_die);
26321 gen_field_die (decl, ctx, context_die);
26322 }
26323 break;
26324
26325 case PARM_DECL:
26326 /* Avoid generating stray type DIEs during late dwarf dumping.
26327 All types have been dumped early. */
26328 if (early_dwarf
26329 /* ??? But in LTRANS we cannot annotate early created variably
26330 modified type DIEs without copying them and adjusting all
26331 references to them. Dump them again as happens for inlining
26332 which copies both the decl and the types. */
26333 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26334 in VLA bound information for example. */
26335 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26336 current_function_decl)))
26337 {
26338 if (DECL_BY_REFERENCE (decl_or_origin))
26339 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26340 else
26341 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26342 }
26343 return gen_formal_parameter_die (decl, origin,
26344 true /* Emit name attribute. */,
26345 context_die);
26346
26347 case NAMESPACE_DECL:
26348 if (dwarf_version >= 3 || !dwarf_strict)
26349 gen_namespace_die (decl, context_die);
26350 break;
26351
26352 case IMPORTED_DECL:
26353 dwarf2out_imported_module_or_decl_1 (decl, DECL_NAME (decl),
26354 DECL_CONTEXT (decl), context_die);
26355 break;
26356
26357 case NAMELIST_DECL:
26358 gen_namelist_decl (DECL_NAME (decl), context_die,
26359 NAMELIST_DECL_ASSOCIATED_DECL (decl));
26360 break;
26361
26362 default:
26363 /* Probably some frontend-internal decl. Assume we don't care. */
26364 gcc_assert ((int)TREE_CODE (decl) > NUM_TREE_CODES);
26365 break;
26366 }
26367
26368 return NULL;
26369 }
26370 \f
26371 /* Output initial debug information for global DECL. Called at the
26372 end of the parsing process.
26373
26374 This is the initial debug generation process. As such, the DIEs
26375 generated may be incomplete. A later debug generation pass
26376 (dwarf2out_late_global_decl) will augment the information generated
26377 in this pass (e.g., with complete location info). */
26378
26379 static void
26380 dwarf2out_early_global_decl (tree decl)
26381 {
26382 set_early_dwarf s;
26383
26384 /* gen_decl_die() will set DECL_ABSTRACT because
26385 cgraph_function_possibly_inlined_p() returns true. This is in
26386 turn will cause DW_AT_inline attributes to be set.
26387
26388 This happens because at early dwarf generation, there is no
26389 cgraph information, causing cgraph_function_possibly_inlined_p()
26390 to return true. Trick cgraph_function_possibly_inlined_p()
26391 while we generate dwarf early. */
26392 bool save = symtab->global_info_ready;
26393 symtab->global_info_ready = true;
26394
26395 /* We don't handle TYPE_DECLs. If required, they'll be reached via
26396 other DECLs and they can point to template types or other things
26397 that dwarf2out can't handle when done via dwarf2out_decl. */
26398 if (TREE_CODE (decl) != TYPE_DECL
26399 && TREE_CODE (decl) != PARM_DECL)
26400 {
26401 if (TREE_CODE (decl) == FUNCTION_DECL)
26402 {
26403 tree save_fndecl = current_function_decl;
26404
26405 /* For nested functions, make sure we have DIEs for the parents first
26406 so that all nested DIEs are generated at the proper scope in the
26407 first shot. */
26408 tree context = decl_function_context (decl);
26409 if (context != NULL)
26410 {
26411 dw_die_ref context_die = lookup_decl_die (context);
26412 current_function_decl = context;
26413
26414 /* Avoid emitting DIEs multiple times, but still process CONTEXT
26415 enough so that it lands in its own context. This avoids type
26416 pruning issues later on. */
26417 if (context_die == NULL || is_declaration_die (context_die))
26418 dwarf2out_decl (context);
26419 }
26420
26421 /* Emit an abstract origin of a function first. This happens
26422 with C++ constructor clones for example and makes
26423 dwarf2out_abstract_function happy which requires the early
26424 DIE of the abstract instance to be present. */
26425 tree origin = DECL_ABSTRACT_ORIGIN (decl);
26426 dw_die_ref origin_die;
26427 if (origin != NULL
26428 /* Do not emit the DIE multiple times but make sure to
26429 process it fully here in case we just saw a declaration. */
26430 && ((origin_die = lookup_decl_die (origin)) == NULL
26431 || is_declaration_die (origin_die)))
26432 {
26433 current_function_decl = origin;
26434 dwarf2out_decl (origin);
26435 }
26436
26437 /* Emit the DIE for decl but avoid doing that multiple times. */
26438 dw_die_ref old_die;
26439 if ((old_die = lookup_decl_die (decl)) == NULL
26440 || is_declaration_die (old_die))
26441 {
26442 current_function_decl = decl;
26443 dwarf2out_decl (decl);
26444 }
26445
26446 current_function_decl = save_fndecl;
26447 }
26448 else
26449 dwarf2out_decl (decl);
26450 }
26451 symtab->global_info_ready = save;
26452 }
26453
26454 /* Return whether EXPR is an expression with the following pattern:
26455 INDIRECT_REF (NOP_EXPR (INTEGER_CST)). */
26456
26457 static bool
26458 is_trivial_indirect_ref (tree expr)
26459 {
26460 if (expr == NULL_TREE || TREE_CODE (expr) != INDIRECT_REF)
26461 return false;
26462
26463 tree nop = TREE_OPERAND (expr, 0);
26464 if (nop == NULL_TREE || TREE_CODE (nop) != NOP_EXPR)
26465 return false;
26466
26467 tree int_cst = TREE_OPERAND (nop, 0);
26468 return int_cst != NULL_TREE && TREE_CODE (int_cst) == INTEGER_CST;
26469 }
26470
26471 /* Output debug information for global decl DECL. Called from
26472 toplev.c after compilation proper has finished. */
26473
26474 static void
26475 dwarf2out_late_global_decl (tree decl)
26476 {
26477 /* Fill-in any location information we were unable to determine
26478 on the first pass. */
26479 if (VAR_P (decl) && !POINTER_BOUNDS_P (decl))
26480 {
26481 dw_die_ref die = lookup_decl_die (decl);
26482
26483 /* We may have to generate early debug late for LTO in case debug
26484 was not enabled at compile-time or the target doesn't support
26485 the LTO early debug scheme. */
26486 if (! die && in_lto_p)
26487 {
26488 dwarf2out_decl (decl);
26489 die = lookup_decl_die (decl);
26490 }
26491
26492 if (die)
26493 {
26494 /* We get called via the symtab code invoking late_global_decl
26495 for symbols that are optimized out.
26496
26497 Do not add locations for those, except if they have a
26498 DECL_VALUE_EXPR, in which case they are relevant for debuggers.
26499 Still don't add a location if the DECL_VALUE_EXPR is not a trivial
26500 INDIRECT_REF expression, as this could generate relocations to
26501 text symbols in LTO object files, which is invalid. */
26502 varpool_node *node = varpool_node::get (decl);
26503 if ((! node || ! node->definition)
26504 && ! (DECL_HAS_VALUE_EXPR_P (decl)
26505 && is_trivial_indirect_ref (DECL_VALUE_EXPR (decl))))
26506 tree_add_const_value_attribute_for_decl (die, decl);
26507 else
26508 add_location_or_const_value_attribute (die, decl, false);
26509 }
26510 }
26511 }
26512
26513 /* Output debug information for type decl DECL. Called from toplev.c
26514 and from language front ends (to record built-in types). */
26515 static void
26516 dwarf2out_type_decl (tree decl, int local)
26517 {
26518 if (!local)
26519 {
26520 set_early_dwarf s;
26521 dwarf2out_decl (decl);
26522 }
26523 }
26524
26525 /* Output debug information for imported module or decl DECL.
26526 NAME is non-NULL name in the lexical block if the decl has been renamed.
26527 LEXICAL_BLOCK is the lexical block (which TREE_CODE is a BLOCK)
26528 that DECL belongs to.
26529 LEXICAL_BLOCK_DIE is the DIE of LEXICAL_BLOCK. */
26530 static void
26531 dwarf2out_imported_module_or_decl_1 (tree decl,
26532 tree name,
26533 tree lexical_block,
26534 dw_die_ref lexical_block_die)
26535 {
26536 expanded_location xloc;
26537 dw_die_ref imported_die = NULL;
26538 dw_die_ref at_import_die;
26539
26540 if (TREE_CODE (decl) == IMPORTED_DECL)
26541 {
26542 xloc = expand_location (DECL_SOURCE_LOCATION (decl));
26543 decl = IMPORTED_DECL_ASSOCIATED_DECL (decl);
26544 gcc_assert (decl);
26545 }
26546 else
26547 xloc = expand_location (input_location);
26548
26549 if (TREE_CODE (decl) == TYPE_DECL || TREE_CODE (decl) == CONST_DECL)
26550 {
26551 at_import_die = force_type_die (TREE_TYPE (decl));
26552 /* For namespace N { typedef void T; } using N::T; base_type_die
26553 returns NULL, but DW_TAG_imported_declaration requires
26554 the DW_AT_import tag. Force creation of DW_TAG_typedef. */
26555 if (!at_import_die)
26556 {
26557 gcc_assert (TREE_CODE (decl) == TYPE_DECL);
26558 gen_typedef_die (decl, get_context_die (DECL_CONTEXT (decl)));
26559 at_import_die = lookup_type_die (TREE_TYPE (decl));
26560 gcc_assert (at_import_die);
26561 }
26562 }
26563 else
26564 {
26565 at_import_die = lookup_decl_die (decl);
26566 if (!at_import_die)
26567 {
26568 /* If we're trying to avoid duplicate debug info, we may not have
26569 emitted the member decl for this field. Emit it now. */
26570 if (TREE_CODE (decl) == FIELD_DECL)
26571 {
26572 tree type = DECL_CONTEXT (decl);
26573
26574 if (TYPE_CONTEXT (type)
26575 && TYPE_P (TYPE_CONTEXT (type))
26576 && !should_emit_struct_debug (TYPE_CONTEXT (type),
26577 DINFO_USAGE_DIR_USE))
26578 return;
26579 gen_type_die_for_member (type, decl,
26580 get_context_die (TYPE_CONTEXT (type)));
26581 }
26582 if (TREE_CODE (decl) == NAMELIST_DECL)
26583 at_import_die = gen_namelist_decl (DECL_NAME (decl),
26584 get_context_die (DECL_CONTEXT (decl)),
26585 NULL_TREE);
26586 else
26587 at_import_die = force_decl_die (decl);
26588 }
26589 }
26590
26591 if (TREE_CODE (decl) == NAMESPACE_DECL)
26592 {
26593 if (dwarf_version >= 3 || !dwarf_strict)
26594 imported_die = new_die (DW_TAG_imported_module,
26595 lexical_block_die,
26596 lexical_block);
26597 else
26598 return;
26599 }
26600 else
26601 imported_die = new_die (DW_TAG_imported_declaration,
26602 lexical_block_die,
26603 lexical_block);
26604
26605 add_AT_file (imported_die, DW_AT_decl_file, lookup_filename (xloc.file));
26606 add_AT_unsigned (imported_die, DW_AT_decl_line, xloc.line);
26607 if (debug_column_info && xloc.column)
26608 add_AT_unsigned (imported_die, DW_AT_decl_column, xloc.column);
26609 if (name)
26610 add_AT_string (imported_die, DW_AT_name,
26611 IDENTIFIER_POINTER (name));
26612 add_AT_die_ref (imported_die, DW_AT_import, at_import_die);
26613 }
26614
26615 /* Output debug information for imported module or decl DECL.
26616 NAME is non-NULL name in context if the decl has been renamed.
26617 CHILD is true if decl is one of the renamed decls as part of
26618 importing whole module.
26619 IMPLICIT is set if this hook is called for an implicit import
26620 such as inline namespace. */
26621
26622 static void
26623 dwarf2out_imported_module_or_decl (tree decl, tree name, tree context,
26624 bool child, bool implicit)
26625 {
26626 /* dw_die_ref at_import_die; */
26627 dw_die_ref scope_die;
26628
26629 if (debug_info_level <= DINFO_LEVEL_TERSE)
26630 return;
26631
26632 gcc_assert (decl);
26633
26634 /* For DWARF5, just DW_AT_export_symbols on the DW_TAG_namespace
26635 should be enough, for DWARF4 and older even if we emit as extension
26636 DW_AT_export_symbols add the implicit DW_TAG_imported_module anyway
26637 for the benefit of consumers unaware of DW_AT_export_symbols. */
26638 if (implicit
26639 && dwarf_version >= 5
26640 && lang_hooks.decls.decl_dwarf_attribute (decl,
26641 DW_AT_export_symbols) == 1)
26642 return;
26643
26644 set_early_dwarf s;
26645
26646 /* To emit DW_TAG_imported_module or DW_TAG_imported_decl, we need two DIEs.
26647 We need decl DIE for reference and scope die. First, get DIE for the decl
26648 itself. */
26649
26650 /* Get the scope die for decl context. Use comp_unit_die for global module
26651 or decl. If die is not found for non globals, force new die. */
26652 if (context
26653 && TYPE_P (context)
26654 && !should_emit_struct_debug (context, DINFO_USAGE_DIR_USE))
26655 return;
26656
26657 scope_die = get_context_die (context);
26658
26659 if (child)
26660 {
26661 /* DW_TAG_imported_module was introduced in the DWARFv3 specification, so
26662 there is nothing we can do, here. */
26663 if (dwarf_version < 3 && dwarf_strict)
26664 return;
26665
26666 gcc_assert (scope_die->die_child);
26667 gcc_assert (scope_die->die_child->die_tag == DW_TAG_imported_module);
26668 gcc_assert (TREE_CODE (decl) != NAMESPACE_DECL);
26669 scope_die = scope_die->die_child;
26670 }
26671
26672 /* OK, now we have DIEs for decl as well as scope. Emit imported die. */
26673 dwarf2out_imported_module_or_decl_1 (decl, name, context, scope_die);
26674 }
26675
26676 /* Output debug information for namelists. */
26677
26678 static dw_die_ref
26679 gen_namelist_decl (tree name, dw_die_ref scope_die, tree item_decls)
26680 {
26681 dw_die_ref nml_die, nml_item_die, nml_item_ref_die;
26682 tree value;
26683 unsigned i;
26684
26685 if (debug_info_level <= DINFO_LEVEL_TERSE)
26686 return NULL;
26687
26688 gcc_assert (scope_die != NULL);
26689 nml_die = new_die (DW_TAG_namelist, scope_die, NULL);
26690 add_AT_string (nml_die, DW_AT_name, IDENTIFIER_POINTER (name));
26691
26692 /* If there are no item_decls, we have a nondefining namelist, e.g.
26693 with USE association; hence, set DW_AT_declaration. */
26694 if (item_decls == NULL_TREE)
26695 {
26696 add_AT_flag (nml_die, DW_AT_declaration, 1);
26697 return nml_die;
26698 }
26699
26700 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (item_decls), i, value)
26701 {
26702 nml_item_ref_die = lookup_decl_die (value);
26703 if (!nml_item_ref_die)
26704 nml_item_ref_die = force_decl_die (value);
26705
26706 nml_item_die = new_die (DW_TAG_namelist_item, nml_die, NULL);
26707 add_AT_die_ref (nml_item_die, DW_AT_namelist_items, nml_item_ref_die);
26708 }
26709 return nml_die;
26710 }
26711
26712
26713 /* Write the debugging output for DECL and return the DIE. */
26714
26715 static void
26716 dwarf2out_decl (tree decl)
26717 {
26718 dw_die_ref context_die = comp_unit_die ();
26719
26720 switch (TREE_CODE (decl))
26721 {
26722 case ERROR_MARK:
26723 return;
26724
26725 case FUNCTION_DECL:
26726 /* If we're a nested function, initially use a parent of NULL; if we're
26727 a plain function, this will be fixed up in decls_for_scope. If
26728 we're a method, it will be ignored, since we already have a DIE. */
26729 if (decl_function_context (decl)
26730 /* But if we're in terse mode, we don't care about scope. */
26731 && debug_info_level > DINFO_LEVEL_TERSE)
26732 context_die = NULL;
26733 break;
26734
26735 case VAR_DECL:
26736 /* For local statics lookup proper context die. */
26737 if (local_function_static (decl))
26738 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26739
26740 /* If we are in terse mode, don't generate any DIEs to represent any
26741 variable declarations or definitions. */
26742 if (debug_info_level <= DINFO_LEVEL_TERSE)
26743 return;
26744 break;
26745
26746 case CONST_DECL:
26747 if (debug_info_level <= DINFO_LEVEL_TERSE)
26748 return;
26749 if (!is_fortran () && !is_ada ())
26750 return;
26751 if (TREE_STATIC (decl) && decl_function_context (decl))
26752 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26753 break;
26754
26755 case NAMESPACE_DECL:
26756 case IMPORTED_DECL:
26757 if (debug_info_level <= DINFO_LEVEL_TERSE)
26758 return;
26759 if (lookup_decl_die (decl) != NULL)
26760 return;
26761 break;
26762
26763 case TYPE_DECL:
26764 /* Don't emit stubs for types unless they are needed by other DIEs. */
26765 if (TYPE_DECL_SUPPRESS_DEBUG (decl))
26766 return;
26767
26768 /* Don't bother trying to generate any DIEs to represent any of the
26769 normal built-in types for the language we are compiling. */
26770 if (DECL_IS_BUILTIN (decl))
26771 return;
26772
26773 /* If we are in terse mode, don't generate any DIEs for types. */
26774 if (debug_info_level <= DINFO_LEVEL_TERSE)
26775 return;
26776
26777 /* If we're a function-scope tag, initially use a parent of NULL;
26778 this will be fixed up in decls_for_scope. */
26779 if (decl_function_context (decl))
26780 context_die = NULL;
26781
26782 break;
26783
26784 case NAMELIST_DECL:
26785 break;
26786
26787 default:
26788 return;
26789 }
26790
26791 gen_decl_die (decl, NULL, NULL, context_die);
26792
26793 if (flag_checking)
26794 {
26795 dw_die_ref die = lookup_decl_die (decl);
26796 if (die)
26797 check_die (die);
26798 }
26799 }
26800
26801 /* Write the debugging output for DECL. */
26802
26803 static void
26804 dwarf2out_function_decl (tree decl)
26805 {
26806 dwarf2out_decl (decl);
26807 call_arg_locations = NULL;
26808 call_arg_loc_last = NULL;
26809 call_site_count = -1;
26810 tail_call_site_count = -1;
26811 decl_loc_table->empty ();
26812 cached_dw_loc_list_table->empty ();
26813 }
26814
26815 /* Output a marker (i.e. a label) for the beginning of the generated code for
26816 a lexical block. */
26817
26818 static void
26819 dwarf2out_begin_block (unsigned int line ATTRIBUTE_UNUSED,
26820 unsigned int blocknum)
26821 {
26822 switch_to_section (current_function_section ());
26823 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_BEGIN_LABEL, blocknum);
26824 }
26825
26826 /* Output a marker (i.e. a label) for the end of the generated code for a
26827 lexical block. */
26828
26829 static void
26830 dwarf2out_end_block (unsigned int line ATTRIBUTE_UNUSED, unsigned int blocknum)
26831 {
26832 switch_to_section (current_function_section ());
26833 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_END_LABEL, blocknum);
26834 }
26835
26836 /* Returns nonzero if it is appropriate not to emit any debugging
26837 information for BLOCK, because it doesn't contain any instructions.
26838
26839 Don't allow this for blocks with nested functions or local classes
26840 as we would end up with orphans, and in the presence of scheduling
26841 we may end up calling them anyway. */
26842
26843 static bool
26844 dwarf2out_ignore_block (const_tree block)
26845 {
26846 tree decl;
26847 unsigned int i;
26848
26849 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
26850 if (TREE_CODE (decl) == FUNCTION_DECL
26851 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26852 return 0;
26853 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (block); i++)
26854 {
26855 decl = BLOCK_NONLOCALIZED_VAR (block, i);
26856 if (TREE_CODE (decl) == FUNCTION_DECL
26857 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26858 return 0;
26859 }
26860
26861 return 1;
26862 }
26863
26864 /* Hash table routines for file_hash. */
26865
26866 bool
26867 dwarf_file_hasher::equal (dwarf_file_data *p1, const char *p2)
26868 {
26869 return filename_cmp (p1->filename, p2) == 0;
26870 }
26871
26872 hashval_t
26873 dwarf_file_hasher::hash (dwarf_file_data *p)
26874 {
26875 return htab_hash_string (p->filename);
26876 }
26877
26878 /* Lookup FILE_NAME (in the list of filenames that we know about here in
26879 dwarf2out.c) and return its "index". The index of each (known) filename is
26880 just a unique number which is associated with only that one filename. We
26881 need such numbers for the sake of generating labels (in the .debug_sfnames
26882 section) and references to those files numbers (in the .debug_srcinfo
26883 and .debug_macinfo sections). If the filename given as an argument is not
26884 found in our current list, add it to the list and assign it the next
26885 available unique index number. */
26886
26887 static struct dwarf_file_data *
26888 lookup_filename (const char *file_name)
26889 {
26890 struct dwarf_file_data * created;
26891
26892 if (!file_name)
26893 return NULL;
26894
26895 dwarf_file_data **slot
26896 = file_table->find_slot_with_hash (file_name, htab_hash_string (file_name),
26897 INSERT);
26898 if (*slot)
26899 return *slot;
26900
26901 created = ggc_alloc<dwarf_file_data> ();
26902 created->filename = file_name;
26903 created->emitted_number = 0;
26904 *slot = created;
26905 return created;
26906 }
26907
26908 /* If the assembler will construct the file table, then translate the compiler
26909 internal file table number into the assembler file table number, and emit
26910 a .file directive if we haven't already emitted one yet. The file table
26911 numbers are different because we prune debug info for unused variables and
26912 types, which may include filenames. */
26913
26914 static int
26915 maybe_emit_file (struct dwarf_file_data * fd)
26916 {
26917 if (! fd->emitted_number)
26918 {
26919 if (last_emitted_file)
26920 fd->emitted_number = last_emitted_file->emitted_number + 1;
26921 else
26922 fd->emitted_number = 1;
26923 last_emitted_file = fd;
26924
26925 if (output_asm_line_debug_info ())
26926 {
26927 fprintf (asm_out_file, "\t.file %u ", fd->emitted_number);
26928 output_quoted_string (asm_out_file,
26929 remap_debug_filename (fd->filename));
26930 fputc ('\n', asm_out_file);
26931 }
26932 }
26933
26934 return fd->emitted_number;
26935 }
26936
26937 /* Schedule generation of a DW_AT_const_value attribute to DIE.
26938 That generation should happen after function debug info has been
26939 generated. The value of the attribute is the constant value of ARG. */
26940
26941 static void
26942 append_entry_to_tmpl_value_parm_die_table (dw_die_ref die, tree arg)
26943 {
26944 die_arg_entry entry;
26945
26946 if (!die || !arg)
26947 return;
26948
26949 gcc_assert (early_dwarf);
26950
26951 if (!tmpl_value_parm_die_table)
26952 vec_alloc (tmpl_value_parm_die_table, 32);
26953
26954 entry.die = die;
26955 entry.arg = arg;
26956 vec_safe_push (tmpl_value_parm_die_table, entry);
26957 }
26958
26959 /* Return TRUE if T is an instance of generic type, FALSE
26960 otherwise. */
26961
26962 static bool
26963 generic_type_p (tree t)
26964 {
26965 if (t == NULL_TREE || !TYPE_P (t))
26966 return false;
26967 return lang_hooks.get_innermost_generic_parms (t) != NULL_TREE;
26968 }
26969
26970 /* Schedule the generation of the generic parameter dies for the
26971 instance of generic type T. The proper generation itself is later
26972 done by gen_scheduled_generic_parms_dies. */
26973
26974 static void
26975 schedule_generic_params_dies_gen (tree t)
26976 {
26977 if (!generic_type_p (t))
26978 return;
26979
26980 gcc_assert (early_dwarf);
26981
26982 if (!generic_type_instances)
26983 vec_alloc (generic_type_instances, 256);
26984
26985 vec_safe_push (generic_type_instances, t);
26986 }
26987
26988 /* Add a DW_AT_const_value attribute to DIEs that were scheduled
26989 by append_entry_to_tmpl_value_parm_die_table. This function must
26990 be called after function DIEs have been generated. */
26991
26992 static void
26993 gen_remaining_tmpl_value_param_die_attribute (void)
26994 {
26995 if (tmpl_value_parm_die_table)
26996 {
26997 unsigned i, j;
26998 die_arg_entry *e;
26999
27000 /* We do this in two phases - first get the cases we can
27001 handle during early-finish, preserving those we cannot
27002 (containing symbolic constants where we don't yet know
27003 whether we are going to output the referenced symbols).
27004 For those we try again at late-finish. */
27005 j = 0;
27006 FOR_EACH_VEC_ELT (*tmpl_value_parm_die_table, i, e)
27007 {
27008 if (!e->die->removed
27009 && !tree_add_const_value_attribute (e->die, e->arg))
27010 {
27011 dw_loc_descr_ref loc = NULL;
27012 if (! early_dwarf
27013 && (dwarf_version >= 5 || !dwarf_strict))
27014 loc = loc_descriptor_from_tree (e->arg, 2, NULL);
27015 if (loc)
27016 add_AT_loc (e->die, DW_AT_location, loc);
27017 else
27018 (*tmpl_value_parm_die_table)[j++] = *e;
27019 }
27020 }
27021 tmpl_value_parm_die_table->truncate (j);
27022 }
27023 }
27024
27025 /* Generate generic parameters DIEs for instances of generic types
27026 that have been previously scheduled by
27027 schedule_generic_params_dies_gen. This function must be called
27028 after all the types of the CU have been laid out. */
27029
27030 static void
27031 gen_scheduled_generic_parms_dies (void)
27032 {
27033 unsigned i;
27034 tree t;
27035
27036 if (!generic_type_instances)
27037 return;
27038
27039 FOR_EACH_VEC_ELT (*generic_type_instances, i, t)
27040 if (COMPLETE_TYPE_P (t))
27041 gen_generic_params_dies (t);
27042
27043 generic_type_instances = NULL;
27044 }
27045
27046
27047 /* Replace DW_AT_name for the decl with name. */
27048
27049 static void
27050 dwarf2out_set_name (tree decl, tree name)
27051 {
27052 dw_die_ref die;
27053 dw_attr_node *attr;
27054 const char *dname;
27055
27056 die = TYPE_SYMTAB_DIE (decl);
27057 if (!die)
27058 return;
27059
27060 dname = dwarf2_name (name, 0);
27061 if (!dname)
27062 return;
27063
27064 attr = get_AT (die, DW_AT_name);
27065 if (attr)
27066 {
27067 struct indirect_string_node *node;
27068
27069 node = find_AT_string (dname);
27070 /* replace the string. */
27071 attr->dw_attr_val.v.val_str = node;
27072 }
27073
27074 else
27075 add_name_attribute (die, dname);
27076 }
27077
27078 /* True if before or during processing of the first function being emitted. */
27079 static bool in_first_function_p = true;
27080 /* True if loc_note during dwarf2out_var_location call might still be
27081 before first real instruction at address equal to .Ltext0. */
27082 static bool maybe_at_text_label_p = true;
27083 /* One above highest N where .LVLN label might be equal to .Ltext0 label. */
27084 static unsigned int first_loclabel_num_not_at_text_label;
27085
27086 /* Look ahead for a real insn, or for a begin stmt marker. */
27087
27088 static rtx_insn *
27089 dwarf2out_next_real_insn (rtx_insn *loc_note)
27090 {
27091 rtx_insn *next_real = NEXT_INSN (loc_note);
27092
27093 while (next_real)
27094 if (INSN_P (next_real))
27095 break;
27096 else
27097 next_real = NEXT_INSN (next_real);
27098
27099 return next_real;
27100 }
27101
27102 /* Called by the final INSN scan whenever we see a var location. We
27103 use it to drop labels in the right places, and throw the location in
27104 our lookup table. */
27105
27106 static void
27107 dwarf2out_var_location (rtx_insn *loc_note)
27108 {
27109 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES + 2];
27110 struct var_loc_node *newloc;
27111 rtx_insn *next_real, *next_note;
27112 rtx_insn *call_insn = NULL;
27113 static const char *last_label;
27114 static const char *last_postcall_label;
27115 static bool last_in_cold_section_p;
27116 static rtx_insn *expected_next_loc_note;
27117 tree decl;
27118 bool var_loc_p;
27119 var_loc_view view = 0;
27120
27121 if (!NOTE_P (loc_note))
27122 {
27123 if (CALL_P (loc_note))
27124 {
27125 maybe_reset_location_view (loc_note, cur_line_info_table);
27126 call_site_count++;
27127 if (SIBLING_CALL_P (loc_note))
27128 tail_call_site_count++;
27129 if (find_reg_note (loc_note, REG_CALL_ARG_LOCATION, NULL_RTX))
27130 {
27131 call_insn = loc_note;
27132 loc_note = NULL;
27133 var_loc_p = false;
27134
27135 next_real = dwarf2out_next_real_insn (call_insn);
27136 next_note = NULL;
27137 cached_next_real_insn = NULL;
27138 goto create_label;
27139 }
27140 if (optimize == 0 && !flag_var_tracking)
27141 {
27142 /* When the var-tracking pass is not running, there is no note
27143 for indirect calls whose target is compile-time known. In this
27144 case, process such calls specifically so that we generate call
27145 sites for them anyway. */
27146 rtx x = PATTERN (loc_note);
27147 if (GET_CODE (x) == PARALLEL)
27148 x = XVECEXP (x, 0, 0);
27149 if (GET_CODE (x) == SET)
27150 x = SET_SRC (x);
27151 if (GET_CODE (x) == CALL)
27152 x = XEXP (x, 0);
27153 if (!MEM_P (x)
27154 || GET_CODE (XEXP (x, 0)) != SYMBOL_REF
27155 || !SYMBOL_REF_DECL (XEXP (x, 0))
27156 || (TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0)))
27157 != FUNCTION_DECL))
27158 {
27159 call_insn = loc_note;
27160 loc_note = NULL;
27161 var_loc_p = false;
27162
27163 next_real = dwarf2out_next_real_insn (call_insn);
27164 next_note = NULL;
27165 cached_next_real_insn = NULL;
27166 goto create_label;
27167 }
27168 }
27169 }
27170 else if (!debug_variable_location_views)
27171 gcc_unreachable ();
27172 else
27173 maybe_reset_location_view (loc_note, cur_line_info_table);
27174
27175 return;
27176 }
27177
27178 var_loc_p = NOTE_KIND (loc_note) == NOTE_INSN_VAR_LOCATION;
27179 if (var_loc_p && !DECL_P (NOTE_VAR_LOCATION_DECL (loc_note)))
27180 return;
27181
27182 /* Optimize processing a large consecutive sequence of location
27183 notes so we don't spend too much time in next_real_insn. If the
27184 next insn is another location note, remember the next_real_insn
27185 calculation for next time. */
27186 next_real = cached_next_real_insn;
27187 if (next_real)
27188 {
27189 if (expected_next_loc_note != loc_note)
27190 next_real = NULL;
27191 }
27192
27193 next_note = NEXT_INSN (loc_note);
27194 if (! next_note
27195 || next_note->deleted ()
27196 || ! NOTE_P (next_note)
27197 || (NOTE_KIND (next_note) != NOTE_INSN_VAR_LOCATION
27198 && NOTE_KIND (next_note) != NOTE_INSN_BEGIN_STMT
27199 && NOTE_KIND (next_note) != NOTE_INSN_INLINE_ENTRY))
27200 next_note = NULL;
27201
27202 if (! next_real)
27203 next_real = dwarf2out_next_real_insn (loc_note);
27204
27205 if (next_note)
27206 {
27207 expected_next_loc_note = next_note;
27208 cached_next_real_insn = next_real;
27209 }
27210 else
27211 cached_next_real_insn = NULL;
27212
27213 /* If there are no instructions which would be affected by this note,
27214 don't do anything. */
27215 if (var_loc_p
27216 && next_real == NULL_RTX
27217 && !NOTE_DURING_CALL_P (loc_note))
27218 return;
27219
27220 create_label:
27221
27222 if (next_real == NULL_RTX)
27223 next_real = get_last_insn ();
27224
27225 /* If there were any real insns between note we processed last time
27226 and this note (or if it is the first note), clear
27227 last_{,postcall_}label so that they are not reused this time. */
27228 if (last_var_location_insn == NULL_RTX
27229 || last_var_location_insn != next_real
27230 || last_in_cold_section_p != in_cold_section_p)
27231 {
27232 last_label = NULL;
27233 last_postcall_label = NULL;
27234 }
27235
27236 if (var_loc_p)
27237 {
27238 const char *label
27239 = NOTE_DURING_CALL_P (loc_note) ? last_postcall_label : last_label;
27240 view = cur_line_info_table->view;
27241 decl = NOTE_VAR_LOCATION_DECL (loc_note);
27242 newloc = add_var_loc_to_decl (decl, loc_note, label, view);
27243 if (newloc == NULL)
27244 return;
27245 }
27246 else
27247 {
27248 decl = NULL_TREE;
27249 newloc = NULL;
27250 }
27251
27252 /* If there were no real insns between note we processed last time
27253 and this note, use the label we emitted last time. Otherwise
27254 create a new label and emit it. */
27255 if (last_label == NULL)
27256 {
27257 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", loclabel_num);
27258 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LVL", loclabel_num);
27259 loclabel_num++;
27260 last_label = ggc_strdup (loclabel);
27261 /* See if loclabel might be equal to .Ltext0. If yes,
27262 bump first_loclabel_num_not_at_text_label. */
27263 if (!have_multiple_function_sections
27264 && in_first_function_p
27265 && maybe_at_text_label_p)
27266 {
27267 static rtx_insn *last_start;
27268 rtx_insn *insn;
27269 for (insn = loc_note; insn; insn = previous_insn (insn))
27270 if (insn == last_start)
27271 break;
27272 else if (!NONDEBUG_INSN_P (insn))
27273 continue;
27274 else
27275 {
27276 rtx body = PATTERN (insn);
27277 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
27278 continue;
27279 /* Inline asm could occupy zero bytes. */
27280 else if (GET_CODE (body) == ASM_INPUT
27281 || asm_noperands (body) >= 0)
27282 continue;
27283 #ifdef HAVE_ATTR_length /* ??? We don't include insn-attr.h. */
27284 else if (HAVE_ATTR_length && get_attr_min_length (insn) == 0)
27285 continue;
27286 #endif
27287 else
27288 {
27289 /* Assume insn has non-zero length. */
27290 maybe_at_text_label_p = false;
27291 break;
27292 }
27293 }
27294 if (maybe_at_text_label_p)
27295 {
27296 last_start = loc_note;
27297 first_loclabel_num_not_at_text_label = loclabel_num;
27298 }
27299 }
27300 }
27301
27302 gcc_assert ((loc_note == NULL_RTX && call_insn != NULL_RTX)
27303 || (loc_note != NULL_RTX && call_insn == NULL_RTX));
27304
27305 if (!var_loc_p)
27306 {
27307 struct call_arg_loc_node *ca_loc
27308 = ggc_cleared_alloc<call_arg_loc_node> ();
27309 rtx_insn *prev = call_insn;
27310
27311 ca_loc->call_arg_loc_note
27312 = find_reg_note (call_insn, REG_CALL_ARG_LOCATION, NULL_RTX);
27313 ca_loc->next = NULL;
27314 ca_loc->label = last_label;
27315 gcc_assert (prev
27316 && (CALL_P (prev)
27317 || (NONJUMP_INSN_P (prev)
27318 && GET_CODE (PATTERN (prev)) == SEQUENCE
27319 && CALL_P (XVECEXP (PATTERN (prev), 0, 0)))));
27320 if (!CALL_P (prev))
27321 prev = as_a <rtx_sequence *> (PATTERN (prev))->insn (0);
27322 ca_loc->tail_call_p = SIBLING_CALL_P (prev);
27323
27324 /* Look for a SYMBOL_REF in the "prev" instruction. */
27325 rtx x = get_call_rtx_from (PATTERN (prev));
27326 if (x)
27327 {
27328 /* Try to get the call symbol, if any. */
27329 if (MEM_P (XEXP (x, 0)))
27330 x = XEXP (x, 0);
27331 /* First, look for a memory access to a symbol_ref. */
27332 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
27333 && SYMBOL_REF_DECL (XEXP (x, 0))
27334 && TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0))) == FUNCTION_DECL)
27335 ca_loc->symbol_ref = XEXP (x, 0);
27336 /* Otherwise, look at a compile-time known user-level function
27337 declaration. */
27338 else if (MEM_P (x)
27339 && MEM_EXPR (x)
27340 && TREE_CODE (MEM_EXPR (x)) == FUNCTION_DECL)
27341 ca_loc->symbol_ref = XEXP (DECL_RTL (MEM_EXPR (x)), 0);
27342 }
27343
27344 ca_loc->block = insn_scope (prev);
27345 if (call_arg_locations)
27346 call_arg_loc_last->next = ca_loc;
27347 else
27348 call_arg_locations = ca_loc;
27349 call_arg_loc_last = ca_loc;
27350 }
27351 else if (loc_note != NULL_RTX && !NOTE_DURING_CALL_P (loc_note))
27352 {
27353 newloc->label = last_label;
27354 newloc->view = view;
27355 }
27356 else
27357 {
27358 if (!last_postcall_label)
27359 {
27360 sprintf (loclabel, "%s-1", last_label);
27361 last_postcall_label = ggc_strdup (loclabel);
27362 }
27363 newloc->label = last_postcall_label;
27364 /* ??? This view is at last_label, not last_label-1, but we
27365 could only assume view at last_label-1 is zero if we could
27366 assume calls always have length greater than one. This is
27367 probably true in general, though there might be a rare
27368 exception to this rule, e.g. if a call insn is optimized out
27369 by target magic. Then, even the -1 in the label will be
27370 wrong, which might invalidate the range. Anyway, using view,
27371 though technically possibly incorrect, will work as far as
27372 ranges go: since L-1 is in the middle of the call insn,
27373 (L-1).0 and (L-1).V shouldn't make any difference, and having
27374 the loclist entry refer to the .loc entry might be useful, so
27375 leave it like this. */
27376 newloc->view = view;
27377 }
27378
27379 if (var_loc_p && flag_debug_asm)
27380 {
27381 const char *name, *sep, *patstr;
27382 if (decl && DECL_NAME (decl))
27383 name = IDENTIFIER_POINTER (DECL_NAME (decl));
27384 else
27385 name = "";
27386 if (NOTE_VAR_LOCATION_LOC (loc_note))
27387 {
27388 sep = " => ";
27389 patstr = str_pattern_slim (NOTE_VAR_LOCATION_LOC (loc_note));
27390 }
27391 else
27392 {
27393 sep = " ";
27394 patstr = "RESET";
27395 }
27396 fprintf (asm_out_file, "\t%s DEBUG %s%s%s\n", ASM_COMMENT_START,
27397 name, sep, patstr);
27398 }
27399
27400 last_var_location_insn = next_real;
27401 last_in_cold_section_p = in_cold_section_p;
27402 }
27403
27404 /* Check whether BLOCK, a lexical block, is nested within OUTER, or is
27405 OUTER itself. If BOTHWAYS, check not only that BLOCK can reach
27406 OUTER through BLOCK_SUPERCONTEXT links, but also that there is a
27407 path from OUTER to BLOCK through BLOCK_SUBBLOCKs and
27408 BLOCK_FRAGMENT_ORIGIN links. */
27409 static bool
27410 block_within_block_p (tree block, tree outer, bool bothways)
27411 {
27412 if (block == outer)
27413 return true;
27414
27415 /* Quickly check that OUTER is up BLOCK's supercontext chain. */
27416 for (tree context = BLOCK_SUPERCONTEXT (block);
27417 context != outer;
27418 context = BLOCK_SUPERCONTEXT (context))
27419 if (!context || TREE_CODE (context) != BLOCK)
27420 return false;
27421
27422 if (!bothways)
27423 return true;
27424
27425 /* Now check that each block is actually referenced by its
27426 parent. */
27427 for (tree context = BLOCK_SUPERCONTEXT (block); ;
27428 context = BLOCK_SUPERCONTEXT (context))
27429 {
27430 if (BLOCK_FRAGMENT_ORIGIN (context))
27431 {
27432 gcc_assert (!BLOCK_SUBBLOCKS (context));
27433 context = BLOCK_FRAGMENT_ORIGIN (context);
27434 }
27435 for (tree sub = BLOCK_SUBBLOCKS (context);
27436 sub != block;
27437 sub = BLOCK_CHAIN (sub))
27438 if (!sub)
27439 return false;
27440 if (context == outer)
27441 return true;
27442 else
27443 block = context;
27444 }
27445 }
27446
27447 /* Called during final while assembling the marker of the entry point
27448 for an inlined function. */
27449
27450 static void
27451 dwarf2out_inline_entry (tree block)
27452 {
27453 gcc_assert (debug_inline_points);
27454
27455 /* If we can't represent it, don't bother. */
27456 if (!(dwarf_version >= 3 || !dwarf_strict))
27457 return;
27458
27459 gcc_assert (DECL_P (block_ultimate_origin (block)));
27460
27461 /* Sanity check the block tree. This would catch a case in which
27462 BLOCK got removed from the tree reachable from the outermost
27463 lexical block, but got retained in markers. It would still link
27464 back to its parents, but some ancestor would be missing a link
27465 down the path to the sub BLOCK. If the block got removed, its
27466 BLOCK_NUMBER will not be a usable value. */
27467 if (flag_checking)
27468 gcc_assert (block_within_block_p (block,
27469 DECL_INITIAL (current_function_decl),
27470 true));
27471
27472 gcc_assert (inlined_function_outer_scope_p (block));
27473 gcc_assert (!BLOCK_DIE (block));
27474
27475 if (BLOCK_FRAGMENT_ORIGIN (block))
27476 block = BLOCK_FRAGMENT_ORIGIN (block);
27477 /* Can the entry point ever not be at the beginning of an
27478 unfragmented lexical block? */
27479 else if (!(BLOCK_FRAGMENT_CHAIN (block)
27480 || (cur_line_info_table
27481 && !ZERO_VIEW_P (cur_line_info_table->view))))
27482 return;
27483
27484 if (!inline_entry_data_table)
27485 inline_entry_data_table
27486 = hash_table<inline_entry_data_hasher>::create_ggc (10);
27487
27488
27489 inline_entry_data **iedp
27490 = inline_entry_data_table->find_slot_with_hash (block,
27491 htab_hash_pointer (block),
27492 INSERT);
27493 if (*iedp)
27494 /* ??? Ideally, we'd record all entry points for the same inlined
27495 function (some may have been duplicated by e.g. unrolling), but
27496 we have no way to represent that ATM. */
27497 return;
27498
27499 inline_entry_data *ied = *iedp = ggc_cleared_alloc<inline_entry_data> ();
27500 ied->block = block;
27501 ied->label_pfx = BLOCK_INLINE_ENTRY_LABEL;
27502 ied->label_num = BLOCK_NUMBER (block);
27503 if (cur_line_info_table)
27504 ied->view = cur_line_info_table->view;
27505
27506 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27507
27508 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_INLINE_ENTRY_LABEL,
27509 BLOCK_NUMBER (block));
27510 ASM_OUTPUT_LABEL (asm_out_file, label);
27511 }
27512
27513 /* Called from finalize_size_functions for size functions so that their body
27514 can be encoded in the debug info to describe the layout of variable-length
27515 structures. */
27516
27517 static void
27518 dwarf2out_size_function (tree decl)
27519 {
27520 function_to_dwarf_procedure (decl);
27521 }
27522
27523 /* Note in one location list that text section has changed. */
27524
27525 int
27526 var_location_switch_text_section_1 (var_loc_list **slot, void *)
27527 {
27528 var_loc_list *list = *slot;
27529 if (list->first)
27530 list->last_before_switch
27531 = list->last->next ? list->last->next : list->last;
27532 return 1;
27533 }
27534
27535 /* Note in all location lists that text section has changed. */
27536
27537 static void
27538 var_location_switch_text_section (void)
27539 {
27540 if (decl_loc_table == NULL)
27541 return;
27542
27543 decl_loc_table->traverse<void *, var_location_switch_text_section_1> (NULL);
27544 }
27545
27546 /* Create a new line number table. */
27547
27548 static dw_line_info_table *
27549 new_line_info_table (void)
27550 {
27551 dw_line_info_table *table;
27552
27553 table = ggc_cleared_alloc<dw_line_info_table> ();
27554 table->file_num = 1;
27555 table->line_num = 1;
27556 table->is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
27557 FORCE_RESET_NEXT_VIEW (table->view);
27558 table->symviews_since_reset = 0;
27559
27560 return table;
27561 }
27562
27563 /* Lookup the "current" table into which we emit line info, so
27564 that we don't have to do it for every source line. */
27565
27566 static void
27567 set_cur_line_info_table (section *sec)
27568 {
27569 dw_line_info_table *table;
27570
27571 if (sec == text_section)
27572 table = text_section_line_info;
27573 else if (sec == cold_text_section)
27574 {
27575 table = cold_text_section_line_info;
27576 if (!table)
27577 {
27578 cold_text_section_line_info = table = new_line_info_table ();
27579 table->end_label = cold_end_label;
27580 }
27581 }
27582 else
27583 {
27584 const char *end_label;
27585
27586 if (crtl->has_bb_partition)
27587 {
27588 if (in_cold_section_p)
27589 end_label = crtl->subsections.cold_section_end_label;
27590 else
27591 end_label = crtl->subsections.hot_section_end_label;
27592 }
27593 else
27594 {
27595 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27596 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
27597 current_function_funcdef_no);
27598 end_label = ggc_strdup (label);
27599 }
27600
27601 table = new_line_info_table ();
27602 table->end_label = end_label;
27603
27604 vec_safe_push (separate_line_info, table);
27605 }
27606
27607 if (output_asm_line_debug_info ())
27608 table->is_stmt = (cur_line_info_table
27609 ? cur_line_info_table->is_stmt
27610 : DWARF_LINE_DEFAULT_IS_STMT_START);
27611 cur_line_info_table = table;
27612 }
27613
27614
27615 /* We need to reset the locations at the beginning of each
27616 function. We can't do this in the end_function hook, because the
27617 declarations that use the locations won't have been output when
27618 that hook is called. Also compute have_multiple_function_sections here. */
27619
27620 static void
27621 dwarf2out_begin_function (tree fun)
27622 {
27623 section *sec = function_section (fun);
27624
27625 if (sec != text_section)
27626 have_multiple_function_sections = true;
27627
27628 if (crtl->has_bb_partition && !cold_text_section)
27629 {
27630 gcc_assert (current_function_decl == fun);
27631 cold_text_section = unlikely_text_section ();
27632 switch_to_section (cold_text_section);
27633 ASM_OUTPUT_LABEL (asm_out_file, cold_text_section_label);
27634 switch_to_section (sec);
27635 }
27636
27637 dwarf2out_note_section_used ();
27638 call_site_count = 0;
27639 tail_call_site_count = 0;
27640
27641 set_cur_line_info_table (sec);
27642 FORCE_RESET_NEXT_VIEW (cur_line_info_table->view);
27643 }
27644
27645 /* Helper function of dwarf2out_end_function, called only after emitting
27646 the very first function into assembly. Check if some .debug_loc range
27647 might end with a .LVL* label that could be equal to .Ltext0.
27648 In that case we must force using absolute addresses in .debug_loc ranges,
27649 because this range could be .LVLN-.Ltext0 .. .LVLM-.Ltext0 for
27650 .LVLN == .LVLM == .Ltext0, thus 0 .. 0, which is a .debug_loc
27651 list terminator.
27652 Set have_multiple_function_sections to true in that case and
27653 terminate htab traversal. */
27654
27655 int
27656 find_empty_loc_ranges_at_text_label (var_loc_list **slot, int)
27657 {
27658 var_loc_list *entry = *slot;
27659 struct var_loc_node *node;
27660
27661 node = entry->first;
27662 if (node && node->next && node->next->label)
27663 {
27664 unsigned int i;
27665 const char *label = node->next->label;
27666 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES];
27667
27668 for (i = 0; i < first_loclabel_num_not_at_text_label; i++)
27669 {
27670 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", i);
27671 if (strcmp (label, loclabel) == 0)
27672 {
27673 have_multiple_function_sections = true;
27674 return 0;
27675 }
27676 }
27677 }
27678 return 1;
27679 }
27680
27681 /* Hook called after emitting a function into assembly.
27682 This does something only for the very first function emitted. */
27683
27684 static void
27685 dwarf2out_end_function (unsigned int)
27686 {
27687 if (in_first_function_p
27688 && !have_multiple_function_sections
27689 && first_loclabel_num_not_at_text_label
27690 && decl_loc_table)
27691 decl_loc_table->traverse<int, find_empty_loc_ranges_at_text_label> (0);
27692 in_first_function_p = false;
27693 maybe_at_text_label_p = false;
27694 }
27695
27696 /* Temporary holder for dwarf2out_register_main_translation_unit. Used to let
27697 front-ends register a translation unit even before dwarf2out_init is
27698 called. */
27699 static tree main_translation_unit = NULL_TREE;
27700
27701 /* Hook called by front-ends after they built their main translation unit.
27702 Associate comp_unit_die to UNIT. */
27703
27704 static void
27705 dwarf2out_register_main_translation_unit (tree unit)
27706 {
27707 gcc_assert (TREE_CODE (unit) == TRANSLATION_UNIT_DECL
27708 && main_translation_unit == NULL_TREE);
27709 main_translation_unit = unit;
27710 /* If dwarf2out_init has not been called yet, it will perform the association
27711 itself looking at main_translation_unit. */
27712 if (decl_die_table != NULL)
27713 equate_decl_number_to_die (unit, comp_unit_die ());
27714 }
27715
27716 /* Add OPCODE+VAL as an entry at the end of the opcode array in TABLE. */
27717
27718 static void
27719 push_dw_line_info_entry (dw_line_info_table *table,
27720 enum dw_line_info_opcode opcode, unsigned int val)
27721 {
27722 dw_line_info_entry e;
27723 e.opcode = opcode;
27724 e.val = val;
27725 vec_safe_push (table->entries, e);
27726 }
27727
27728 /* Output a label to mark the beginning of a source code line entry
27729 and record information relating to this source line, in
27730 'line_info_table' for later output of the .debug_line section. */
27731 /* ??? The discriminator parameter ought to be unsigned. */
27732
27733 static void
27734 dwarf2out_source_line (unsigned int line, unsigned int column,
27735 const char *filename,
27736 int discriminator, bool is_stmt)
27737 {
27738 unsigned int file_num;
27739 dw_line_info_table *table;
27740 static var_loc_view lvugid;
27741
27742 if (debug_info_level < DINFO_LEVEL_TERSE)
27743 return;
27744
27745 table = cur_line_info_table;
27746
27747 if (line == 0)
27748 {
27749 if (debug_variable_location_views
27750 && output_asm_line_debug_info ()
27751 && table && !RESETTING_VIEW_P (table->view))
27752 {
27753 /* If we're using the assembler to compute view numbers, we
27754 can't issue a .loc directive for line zero, so we can't
27755 get a view number at this point. We might attempt to
27756 compute it from the previous view, or equate it to a
27757 subsequent view (though it might not be there!), but
27758 since we're omitting the line number entry, we might as
27759 well omit the view number as well. That means pretending
27760 it's a view number zero, which might very well turn out
27761 to be correct. ??? Extend the assembler so that the
27762 compiler could emit e.g. ".locview .LVU#", to output a
27763 view without changing line number information. We'd then
27764 have to count it in symviews_since_reset; when it's omitted,
27765 it doesn't count. */
27766 if (!zero_view_p)
27767 zero_view_p = BITMAP_GGC_ALLOC ();
27768 bitmap_set_bit (zero_view_p, table->view);
27769 if (flag_debug_asm)
27770 {
27771 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27772 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27773 fprintf (asm_out_file, "\t%s line 0, omitted view ",
27774 ASM_COMMENT_START);
27775 assemble_name (asm_out_file, label);
27776 putc ('\n', asm_out_file);
27777 }
27778 table->view = ++lvugid;
27779 }
27780 return;
27781 }
27782
27783 /* The discriminator column was added in dwarf4. Simplify the below
27784 by simply removing it if we're not supposed to output it. */
27785 if (dwarf_version < 4 && dwarf_strict)
27786 discriminator = 0;
27787
27788 if (!debug_column_info)
27789 column = 0;
27790
27791 file_num = maybe_emit_file (lookup_filename (filename));
27792
27793 /* ??? TODO: Elide duplicate line number entries. Traditionally,
27794 the debugger has used the second (possibly duplicate) line number
27795 at the beginning of the function to mark the end of the prologue.
27796 We could eliminate any other duplicates within the function. For
27797 Dwarf3, we ought to include the DW_LNS_set_prologue_end mark in
27798 that second line number entry. */
27799 /* Recall that this end-of-prologue indication is *not* the same thing
27800 as the end_prologue debug hook. The NOTE_INSN_PROLOGUE_END note,
27801 to which the hook corresponds, follows the last insn that was
27802 emitted by gen_prologue. What we need is to precede the first insn
27803 that had been emitted after NOTE_INSN_FUNCTION_BEG, i.e. the first
27804 insn that corresponds to something the user wrote. These may be
27805 very different locations once scheduling is enabled. */
27806
27807 if (0 && file_num == table->file_num
27808 && line == table->line_num
27809 && column == table->column_num
27810 && discriminator == table->discrim_num
27811 && is_stmt == table->is_stmt)
27812 return;
27813
27814 switch_to_section (current_function_section ());
27815
27816 /* If requested, emit something human-readable. */
27817 if (flag_debug_asm)
27818 {
27819 if (debug_column_info)
27820 fprintf (asm_out_file, "\t%s %s:%d:%d\n", ASM_COMMENT_START,
27821 filename, line, column);
27822 else
27823 fprintf (asm_out_file, "\t%s %s:%d\n", ASM_COMMENT_START,
27824 filename, line);
27825 }
27826
27827 if (output_asm_line_debug_info ())
27828 {
27829 /* Emit the .loc directive understood by GNU as. */
27830 /* "\t.loc %u %u 0 is_stmt %u discriminator %u",
27831 file_num, line, is_stmt, discriminator */
27832 fputs ("\t.loc ", asm_out_file);
27833 fprint_ul (asm_out_file, file_num);
27834 putc (' ', asm_out_file);
27835 fprint_ul (asm_out_file, line);
27836 putc (' ', asm_out_file);
27837 fprint_ul (asm_out_file, column);
27838
27839 if (is_stmt != table->is_stmt)
27840 {
27841 fputs (" is_stmt ", asm_out_file);
27842 putc (is_stmt ? '1' : '0', asm_out_file);
27843 }
27844 if (SUPPORTS_DISCRIMINATOR && discriminator != 0)
27845 {
27846 gcc_assert (discriminator > 0);
27847 fputs (" discriminator ", asm_out_file);
27848 fprint_ul (asm_out_file, (unsigned long) discriminator);
27849 }
27850 if (debug_variable_location_views)
27851 {
27852 if (!RESETTING_VIEW_P (table->view))
27853 {
27854 table->symviews_since_reset++;
27855 if (table->symviews_since_reset > symview_upper_bound)
27856 symview_upper_bound = table->symviews_since_reset;
27857 /* When we're using the assembler to compute view
27858 numbers, we output symbolic labels after "view" in
27859 .loc directives, and the assembler will set them for
27860 us, so that we can refer to the view numbers in
27861 location lists. The only exceptions are when we know
27862 a view will be zero: "-0" is a forced reset, used
27863 e.g. in the beginning of functions, whereas "0" tells
27864 the assembler to check that there was a PC change
27865 since the previous view, in a way that implicitly
27866 resets the next view. */
27867 fputs (" view ", asm_out_file);
27868 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27869 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27870 assemble_name (asm_out_file, label);
27871 table->view = ++lvugid;
27872 }
27873 else
27874 {
27875 table->symviews_since_reset = 0;
27876 if (FORCE_RESETTING_VIEW_P (table->view))
27877 fputs (" view -0", asm_out_file);
27878 else
27879 fputs (" view 0", asm_out_file);
27880 /* Mark the present view as a zero view. Earlier debug
27881 binds may have already added its id to loclists to be
27882 emitted later, so we can't reuse the id for something
27883 else. However, it's good to know whether a view is
27884 known to be zero, because then we may be able to
27885 optimize out locviews that are all zeros, so take
27886 note of it in zero_view_p. */
27887 if (!zero_view_p)
27888 zero_view_p = BITMAP_GGC_ALLOC ();
27889 bitmap_set_bit (zero_view_p, lvugid);
27890 table->view = ++lvugid;
27891 }
27892 }
27893 putc ('\n', asm_out_file);
27894 }
27895 else
27896 {
27897 unsigned int label_num = ++line_info_label_num;
27898
27899 targetm.asm_out.internal_label (asm_out_file, LINE_CODE_LABEL, label_num);
27900
27901 if (debug_variable_location_views && !RESETTING_VIEW_P (table->view))
27902 push_dw_line_info_entry (table, LI_adv_address, label_num);
27903 else
27904 push_dw_line_info_entry (table, LI_set_address, label_num);
27905 if (debug_variable_location_views)
27906 {
27907 bool resetting = FORCE_RESETTING_VIEW_P (table->view);
27908 if (resetting)
27909 table->view = 0;
27910
27911 if (flag_debug_asm)
27912 fprintf (asm_out_file, "\t%s view %s%d\n",
27913 ASM_COMMENT_START,
27914 resetting ? "-" : "",
27915 table->view);
27916
27917 table->view++;
27918 }
27919 if (file_num != table->file_num)
27920 push_dw_line_info_entry (table, LI_set_file, file_num);
27921 if (discriminator != table->discrim_num)
27922 push_dw_line_info_entry (table, LI_set_discriminator, discriminator);
27923 if (is_stmt != table->is_stmt)
27924 push_dw_line_info_entry (table, LI_negate_stmt, 0);
27925 push_dw_line_info_entry (table, LI_set_line, line);
27926 if (debug_column_info)
27927 push_dw_line_info_entry (table, LI_set_column, column);
27928 }
27929
27930 table->file_num = file_num;
27931 table->line_num = line;
27932 table->column_num = column;
27933 table->discrim_num = discriminator;
27934 table->is_stmt = is_stmt;
27935 table->in_use = true;
27936 }
27937
27938 /* Record the beginning of a new source file. */
27939
27940 static void
27941 dwarf2out_start_source_file (unsigned int lineno, const char *filename)
27942 {
27943 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27944 {
27945 macinfo_entry e;
27946 e.code = DW_MACINFO_start_file;
27947 e.lineno = lineno;
27948 e.info = ggc_strdup (filename);
27949 vec_safe_push (macinfo_table, e);
27950 }
27951 }
27952
27953 /* Record the end of a source file. */
27954
27955 static void
27956 dwarf2out_end_source_file (unsigned int lineno ATTRIBUTE_UNUSED)
27957 {
27958 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27959 {
27960 macinfo_entry e;
27961 e.code = DW_MACINFO_end_file;
27962 e.lineno = lineno;
27963 e.info = NULL;
27964 vec_safe_push (macinfo_table, e);
27965 }
27966 }
27967
27968 /* Called from debug_define in toplev.c. The `buffer' parameter contains
27969 the tail part of the directive line, i.e. the part which is past the
27970 initial whitespace, #, whitespace, directive-name, whitespace part. */
27971
27972 static void
27973 dwarf2out_define (unsigned int lineno ATTRIBUTE_UNUSED,
27974 const char *buffer ATTRIBUTE_UNUSED)
27975 {
27976 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
27977 {
27978 macinfo_entry e;
27979 /* Insert a dummy first entry to be able to optimize the whole
27980 predefined macro block using DW_MACRO_import. */
27981 if (macinfo_table->is_empty () && lineno <= 1)
27982 {
27983 e.code = 0;
27984 e.lineno = 0;
27985 e.info = NULL;
27986 vec_safe_push (macinfo_table, e);
27987 }
27988 e.code = DW_MACINFO_define;
27989 e.lineno = lineno;
27990 e.info = ggc_strdup (buffer);
27991 vec_safe_push (macinfo_table, e);
27992 }
27993 }
27994
27995 /* Called from debug_undef in toplev.c. The `buffer' parameter contains
27996 the tail part of the directive line, i.e. the part which is past the
27997 initial whitespace, #, whitespace, directive-name, whitespace part. */
27998
27999 static void
28000 dwarf2out_undef (unsigned int lineno ATTRIBUTE_UNUSED,
28001 const char *buffer ATTRIBUTE_UNUSED)
28002 {
28003 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28004 {
28005 macinfo_entry e;
28006 /* Insert a dummy first entry to be able to optimize the whole
28007 predefined macro block using DW_MACRO_import. */
28008 if (macinfo_table->is_empty () && lineno <= 1)
28009 {
28010 e.code = 0;
28011 e.lineno = 0;
28012 e.info = NULL;
28013 vec_safe_push (macinfo_table, e);
28014 }
28015 e.code = DW_MACINFO_undef;
28016 e.lineno = lineno;
28017 e.info = ggc_strdup (buffer);
28018 vec_safe_push (macinfo_table, e);
28019 }
28020 }
28021
28022 /* Helpers to manipulate hash table of CUs. */
28023
28024 struct macinfo_entry_hasher : nofree_ptr_hash <macinfo_entry>
28025 {
28026 static inline hashval_t hash (const macinfo_entry *);
28027 static inline bool equal (const macinfo_entry *, const macinfo_entry *);
28028 };
28029
28030 inline hashval_t
28031 macinfo_entry_hasher::hash (const macinfo_entry *entry)
28032 {
28033 return htab_hash_string (entry->info);
28034 }
28035
28036 inline bool
28037 macinfo_entry_hasher::equal (const macinfo_entry *entry1,
28038 const macinfo_entry *entry2)
28039 {
28040 return !strcmp (entry1->info, entry2->info);
28041 }
28042
28043 typedef hash_table<macinfo_entry_hasher> macinfo_hash_type;
28044
28045 /* Output a single .debug_macinfo entry. */
28046
28047 static void
28048 output_macinfo_op (macinfo_entry *ref)
28049 {
28050 int file_num;
28051 size_t len;
28052 struct indirect_string_node *node;
28053 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28054 struct dwarf_file_data *fd;
28055
28056 switch (ref->code)
28057 {
28058 case DW_MACINFO_start_file:
28059 fd = lookup_filename (ref->info);
28060 file_num = maybe_emit_file (fd);
28061 dw2_asm_output_data (1, DW_MACINFO_start_file, "Start new file");
28062 dw2_asm_output_data_uleb128 (ref->lineno,
28063 "Included from line number %lu",
28064 (unsigned long) ref->lineno);
28065 dw2_asm_output_data_uleb128 (file_num, "file %s", ref->info);
28066 break;
28067 case DW_MACINFO_end_file:
28068 dw2_asm_output_data (1, DW_MACINFO_end_file, "End file");
28069 break;
28070 case DW_MACINFO_define:
28071 case DW_MACINFO_undef:
28072 len = strlen (ref->info) + 1;
28073 if (!dwarf_strict
28074 && len > DWARF_OFFSET_SIZE
28075 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28076 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28077 {
28078 ref->code = ref->code == DW_MACINFO_define
28079 ? DW_MACRO_define_strp : DW_MACRO_undef_strp;
28080 output_macinfo_op (ref);
28081 return;
28082 }
28083 dw2_asm_output_data (1, ref->code,
28084 ref->code == DW_MACINFO_define
28085 ? "Define macro" : "Undefine macro");
28086 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28087 (unsigned long) ref->lineno);
28088 dw2_asm_output_nstring (ref->info, -1, "The macro");
28089 break;
28090 case DW_MACRO_define_strp:
28091 case DW_MACRO_undef_strp:
28092 node = find_AT_string (ref->info);
28093 gcc_assert (node
28094 && (node->form == DW_FORM_strp
28095 || node->form == dwarf_form (DW_FORM_strx)));
28096 dw2_asm_output_data (1, ref->code,
28097 ref->code == DW_MACRO_define_strp
28098 ? "Define macro strp"
28099 : "Undefine macro strp");
28100 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28101 (unsigned long) ref->lineno);
28102 if (node->form == DW_FORM_strp)
28103 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
28104 debug_str_section, "The macro: \"%s\"",
28105 ref->info);
28106 else
28107 dw2_asm_output_data_uleb128 (node->index, "The macro: \"%s\"",
28108 ref->info);
28109 break;
28110 case DW_MACRO_import:
28111 dw2_asm_output_data (1, ref->code, "Import");
28112 ASM_GENERATE_INTERNAL_LABEL (label,
28113 DEBUG_MACRO_SECTION_LABEL,
28114 ref->lineno + macinfo_label_base);
28115 dw2_asm_output_offset (DWARF_OFFSET_SIZE, label, NULL, NULL);
28116 break;
28117 default:
28118 fprintf (asm_out_file, "%s unrecognized macinfo code %lu\n",
28119 ASM_COMMENT_START, (unsigned long) ref->code);
28120 break;
28121 }
28122 }
28123
28124 /* Attempt to make a sequence of define/undef macinfo ops shareable with
28125 other compilation unit .debug_macinfo sections. IDX is the first
28126 index of a define/undef, return the number of ops that should be
28127 emitted in a comdat .debug_macinfo section and emit
28128 a DW_MACRO_import entry referencing it.
28129 If the define/undef entry should be emitted normally, return 0. */
28130
28131 static unsigned
28132 optimize_macinfo_range (unsigned int idx, vec<macinfo_entry, va_gc> *files,
28133 macinfo_hash_type **macinfo_htab)
28134 {
28135 macinfo_entry *first, *second, *cur, *inc;
28136 char linebuf[sizeof (HOST_WIDE_INT) * 3 + 1];
28137 unsigned char checksum[16];
28138 struct md5_ctx ctx;
28139 char *grp_name, *tail;
28140 const char *base;
28141 unsigned int i, count, encoded_filename_len, linebuf_len;
28142 macinfo_entry **slot;
28143
28144 first = &(*macinfo_table)[idx];
28145 second = &(*macinfo_table)[idx + 1];
28146
28147 /* Optimize only if there are at least two consecutive define/undef ops,
28148 and either all of them are before first DW_MACINFO_start_file
28149 with lineno {0,1} (i.e. predefined macro block), or all of them are
28150 in some included header file. */
28151 if (second->code != DW_MACINFO_define && second->code != DW_MACINFO_undef)
28152 return 0;
28153 if (vec_safe_is_empty (files))
28154 {
28155 if (first->lineno > 1 || second->lineno > 1)
28156 return 0;
28157 }
28158 else if (first->lineno == 0)
28159 return 0;
28160
28161 /* Find the last define/undef entry that can be grouped together
28162 with first and at the same time compute md5 checksum of their
28163 codes, linenumbers and strings. */
28164 md5_init_ctx (&ctx);
28165 for (i = idx; macinfo_table->iterate (i, &cur); i++)
28166 if (cur->code != DW_MACINFO_define && cur->code != DW_MACINFO_undef)
28167 break;
28168 else if (vec_safe_is_empty (files) && cur->lineno > 1)
28169 break;
28170 else
28171 {
28172 unsigned char code = cur->code;
28173 md5_process_bytes (&code, 1, &ctx);
28174 checksum_uleb128 (cur->lineno, &ctx);
28175 md5_process_bytes (cur->info, strlen (cur->info) + 1, &ctx);
28176 }
28177 md5_finish_ctx (&ctx, checksum);
28178 count = i - idx;
28179
28180 /* From the containing include filename (if any) pick up just
28181 usable characters from its basename. */
28182 if (vec_safe_is_empty (files))
28183 base = "";
28184 else
28185 base = lbasename (files->last ().info);
28186 for (encoded_filename_len = 0, i = 0; base[i]; i++)
28187 if (ISIDNUM (base[i]) || base[i] == '.')
28188 encoded_filename_len++;
28189 /* Count . at the end. */
28190 if (encoded_filename_len)
28191 encoded_filename_len++;
28192
28193 sprintf (linebuf, HOST_WIDE_INT_PRINT_UNSIGNED, first->lineno);
28194 linebuf_len = strlen (linebuf);
28195
28196 /* The group name format is: wmN.[<encoded filename>.]<lineno>.<md5sum> */
28197 grp_name = XALLOCAVEC (char, 4 + encoded_filename_len + linebuf_len + 1
28198 + 16 * 2 + 1);
28199 memcpy (grp_name, DWARF_OFFSET_SIZE == 4 ? "wm4." : "wm8.", 4);
28200 tail = grp_name + 4;
28201 if (encoded_filename_len)
28202 {
28203 for (i = 0; base[i]; i++)
28204 if (ISIDNUM (base[i]) || base[i] == '.')
28205 *tail++ = base[i];
28206 *tail++ = '.';
28207 }
28208 memcpy (tail, linebuf, linebuf_len);
28209 tail += linebuf_len;
28210 *tail++ = '.';
28211 for (i = 0; i < 16; i++)
28212 sprintf (tail + i * 2, "%02x", checksum[i] & 0xff);
28213
28214 /* Construct a macinfo_entry for DW_MACRO_import
28215 in the empty vector entry before the first define/undef. */
28216 inc = &(*macinfo_table)[idx - 1];
28217 inc->code = DW_MACRO_import;
28218 inc->lineno = 0;
28219 inc->info = ggc_strdup (grp_name);
28220 if (!*macinfo_htab)
28221 *macinfo_htab = new macinfo_hash_type (10);
28222 /* Avoid emitting duplicates. */
28223 slot = (*macinfo_htab)->find_slot (inc, INSERT);
28224 if (*slot != NULL)
28225 {
28226 inc->code = 0;
28227 inc->info = NULL;
28228 /* If such an entry has been used before, just emit
28229 a DW_MACRO_import op. */
28230 inc = *slot;
28231 output_macinfo_op (inc);
28232 /* And clear all macinfo_entry in the range to avoid emitting them
28233 in the second pass. */
28234 for (i = idx; macinfo_table->iterate (i, &cur) && i < idx + count; i++)
28235 {
28236 cur->code = 0;
28237 cur->info = NULL;
28238 }
28239 }
28240 else
28241 {
28242 *slot = inc;
28243 inc->lineno = (*macinfo_htab)->elements ();
28244 output_macinfo_op (inc);
28245 }
28246 return count;
28247 }
28248
28249 /* Save any strings needed by the macinfo table in the debug str
28250 table. All strings must be collected into the table by the time
28251 index_string is called. */
28252
28253 static void
28254 save_macinfo_strings (void)
28255 {
28256 unsigned len;
28257 unsigned i;
28258 macinfo_entry *ref;
28259
28260 for (i = 0; macinfo_table && macinfo_table->iterate (i, &ref); i++)
28261 {
28262 switch (ref->code)
28263 {
28264 /* Match the logic in output_macinfo_op to decide on
28265 indirect strings. */
28266 case DW_MACINFO_define:
28267 case DW_MACINFO_undef:
28268 len = strlen (ref->info) + 1;
28269 if (!dwarf_strict
28270 && len > DWARF_OFFSET_SIZE
28271 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28272 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28273 set_indirect_string (find_AT_string (ref->info));
28274 break;
28275 case DW_MACRO_define_strp:
28276 case DW_MACRO_undef_strp:
28277 set_indirect_string (find_AT_string (ref->info));
28278 break;
28279 default:
28280 break;
28281 }
28282 }
28283 }
28284
28285 /* Output macinfo section(s). */
28286
28287 static void
28288 output_macinfo (const char *debug_line_label, bool early_lto_debug)
28289 {
28290 unsigned i;
28291 unsigned long length = vec_safe_length (macinfo_table);
28292 macinfo_entry *ref;
28293 vec<macinfo_entry, va_gc> *files = NULL;
28294 macinfo_hash_type *macinfo_htab = NULL;
28295 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
28296
28297 if (! length)
28298 return;
28299
28300 /* output_macinfo* uses these interchangeably. */
28301 gcc_assert ((int) DW_MACINFO_define == (int) DW_MACRO_define
28302 && (int) DW_MACINFO_undef == (int) DW_MACRO_undef
28303 && (int) DW_MACINFO_start_file == (int) DW_MACRO_start_file
28304 && (int) DW_MACINFO_end_file == (int) DW_MACRO_end_file);
28305
28306 /* AIX Assembler inserts the length, so adjust the reference to match the
28307 offset expected by debuggers. */
28308 strcpy (dl_section_ref, debug_line_label);
28309 if (XCOFF_DEBUGGING_INFO)
28310 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
28311
28312 /* For .debug_macro emit the section header. */
28313 if (!dwarf_strict || dwarf_version >= 5)
28314 {
28315 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28316 "DWARF macro version number");
28317 if (DWARF_OFFSET_SIZE == 8)
28318 dw2_asm_output_data (1, 3, "Flags: 64-bit, lineptr present");
28319 else
28320 dw2_asm_output_data (1, 2, "Flags: 32-bit, lineptr present");
28321 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_line_label,
28322 debug_line_section, NULL);
28323 }
28324
28325 /* In the first loop, it emits the primary .debug_macinfo section
28326 and after each emitted op the macinfo_entry is cleared.
28327 If a longer range of define/undef ops can be optimized using
28328 DW_MACRO_import, the DW_MACRO_import op is emitted and kept in
28329 the vector before the first define/undef in the range and the
28330 whole range of define/undef ops is not emitted and kept. */
28331 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28332 {
28333 switch (ref->code)
28334 {
28335 case DW_MACINFO_start_file:
28336 vec_safe_push (files, *ref);
28337 break;
28338 case DW_MACINFO_end_file:
28339 if (!vec_safe_is_empty (files))
28340 files->pop ();
28341 break;
28342 case DW_MACINFO_define:
28343 case DW_MACINFO_undef:
28344 if ((!dwarf_strict || dwarf_version >= 5)
28345 && HAVE_COMDAT_GROUP
28346 && vec_safe_length (files) != 1
28347 && i > 0
28348 && i + 1 < length
28349 && (*macinfo_table)[i - 1].code == 0)
28350 {
28351 unsigned count = optimize_macinfo_range (i, files, &macinfo_htab);
28352 if (count)
28353 {
28354 i += count - 1;
28355 continue;
28356 }
28357 }
28358 break;
28359 case 0:
28360 /* A dummy entry may be inserted at the beginning to be able
28361 to optimize the whole block of predefined macros. */
28362 if (i == 0)
28363 continue;
28364 default:
28365 break;
28366 }
28367 output_macinfo_op (ref);
28368 ref->info = NULL;
28369 ref->code = 0;
28370 }
28371
28372 if (!macinfo_htab)
28373 return;
28374
28375 /* Save the number of transparent includes so we can adjust the
28376 label number for the fat LTO object DWARF. */
28377 unsigned macinfo_label_base_adj = macinfo_htab->elements ();
28378
28379 delete macinfo_htab;
28380 macinfo_htab = NULL;
28381
28382 /* If any DW_MACRO_import were used, on those DW_MACRO_import entries
28383 terminate the current chain and switch to a new comdat .debug_macinfo
28384 section and emit the define/undef entries within it. */
28385 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28386 switch (ref->code)
28387 {
28388 case 0:
28389 continue;
28390 case DW_MACRO_import:
28391 {
28392 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28393 tree comdat_key = get_identifier (ref->info);
28394 /* Terminate the previous .debug_macinfo section. */
28395 dw2_asm_output_data (1, 0, "End compilation unit");
28396 targetm.asm_out.named_section (debug_macinfo_section_name,
28397 SECTION_DEBUG
28398 | SECTION_LINKONCE
28399 | (early_lto_debug
28400 ? SECTION_EXCLUDE : 0),
28401 comdat_key);
28402 ASM_GENERATE_INTERNAL_LABEL (label,
28403 DEBUG_MACRO_SECTION_LABEL,
28404 ref->lineno + macinfo_label_base);
28405 ASM_OUTPUT_LABEL (asm_out_file, label);
28406 ref->code = 0;
28407 ref->info = NULL;
28408 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28409 "DWARF macro version number");
28410 if (DWARF_OFFSET_SIZE == 8)
28411 dw2_asm_output_data (1, 1, "Flags: 64-bit");
28412 else
28413 dw2_asm_output_data (1, 0, "Flags: 32-bit");
28414 }
28415 break;
28416 case DW_MACINFO_define:
28417 case DW_MACINFO_undef:
28418 output_macinfo_op (ref);
28419 ref->code = 0;
28420 ref->info = NULL;
28421 break;
28422 default:
28423 gcc_unreachable ();
28424 }
28425
28426 macinfo_label_base += macinfo_label_base_adj;
28427 }
28428
28429 /* Initialize the various sections and labels for dwarf output and prefix
28430 them with PREFIX if non-NULL. Returns the generation (zero based
28431 number of times function was called). */
28432
28433 static unsigned
28434 init_sections_and_labels (bool early_lto_debug)
28435 {
28436 /* As we may get called multiple times have a generation count for
28437 labels. */
28438 static unsigned generation = 0;
28439
28440 if (early_lto_debug)
28441 {
28442 if (!dwarf_split_debug_info)
28443 {
28444 debug_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28445 SECTION_DEBUG | SECTION_EXCLUDE,
28446 NULL);
28447 debug_abbrev_section = get_section (DEBUG_LTO_ABBREV_SECTION,
28448 SECTION_DEBUG | SECTION_EXCLUDE,
28449 NULL);
28450 debug_macinfo_section_name
28451 = ((dwarf_strict && dwarf_version < 5)
28452 ? DEBUG_LTO_MACINFO_SECTION : DEBUG_LTO_MACRO_SECTION);
28453 debug_macinfo_section = get_section (debug_macinfo_section_name,
28454 SECTION_DEBUG
28455 | SECTION_EXCLUDE, NULL);
28456 }
28457 else
28458 {
28459 /* ??? Which of the following do we need early? */
28460 debug_info_section = get_section (DEBUG_LTO_DWO_INFO_SECTION,
28461 SECTION_DEBUG | SECTION_EXCLUDE,
28462 NULL);
28463 debug_abbrev_section = get_section (DEBUG_LTO_DWO_ABBREV_SECTION,
28464 SECTION_DEBUG | SECTION_EXCLUDE,
28465 NULL);
28466 debug_skeleton_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28467 SECTION_DEBUG
28468 | SECTION_EXCLUDE, NULL);
28469 debug_skeleton_abbrev_section
28470 = get_section (DEBUG_LTO_ABBREV_SECTION,
28471 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28472 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28473 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28474 generation);
28475
28476 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28477 stay in the main .o, but the skeleton_line goes into the split
28478 off dwo. */
28479 debug_skeleton_line_section
28480 = get_section (DEBUG_LTO_LINE_SECTION,
28481 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28482 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28483 DEBUG_SKELETON_LINE_SECTION_LABEL,
28484 generation);
28485 debug_str_offsets_section
28486 = get_section (DEBUG_LTO_DWO_STR_OFFSETS_SECTION,
28487 SECTION_DEBUG | SECTION_EXCLUDE,
28488 NULL);
28489 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28490 DEBUG_SKELETON_INFO_SECTION_LABEL,
28491 generation);
28492 debug_str_dwo_section = get_section (DEBUG_LTO_STR_DWO_SECTION,
28493 DEBUG_STR_DWO_SECTION_FLAGS,
28494 NULL);
28495 debug_macinfo_section_name
28496 = ((dwarf_strict && dwarf_version < 5)
28497 ? DEBUG_LTO_DWO_MACINFO_SECTION : DEBUG_LTO_DWO_MACRO_SECTION);
28498 debug_macinfo_section = get_section (debug_macinfo_section_name,
28499 SECTION_DEBUG | SECTION_EXCLUDE,
28500 NULL);
28501 }
28502 /* For macro info and the file table we have to refer to a
28503 debug_line section. */
28504 debug_line_section = get_section (DEBUG_LTO_LINE_SECTION,
28505 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28506 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28507 DEBUG_LINE_SECTION_LABEL, generation);
28508
28509 debug_str_section = get_section (DEBUG_LTO_STR_SECTION,
28510 DEBUG_STR_SECTION_FLAGS
28511 | SECTION_EXCLUDE, NULL);
28512 if (!dwarf_split_debug_info && !dwarf2out_as_loc_support)
28513 debug_line_str_section
28514 = get_section (DEBUG_LTO_LINE_STR_SECTION,
28515 DEBUG_STR_SECTION_FLAGS | SECTION_EXCLUDE, NULL);
28516 }
28517 else
28518 {
28519 if (!dwarf_split_debug_info)
28520 {
28521 debug_info_section = get_section (DEBUG_INFO_SECTION,
28522 SECTION_DEBUG, NULL);
28523 debug_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28524 SECTION_DEBUG, NULL);
28525 debug_loc_section = get_section (dwarf_version >= 5
28526 ? DEBUG_LOCLISTS_SECTION
28527 : DEBUG_LOC_SECTION,
28528 SECTION_DEBUG, NULL);
28529 debug_macinfo_section_name
28530 = ((dwarf_strict && dwarf_version < 5)
28531 ? DEBUG_MACINFO_SECTION : DEBUG_MACRO_SECTION);
28532 debug_macinfo_section = get_section (debug_macinfo_section_name,
28533 SECTION_DEBUG, NULL);
28534 }
28535 else
28536 {
28537 debug_info_section = get_section (DEBUG_DWO_INFO_SECTION,
28538 SECTION_DEBUG | SECTION_EXCLUDE,
28539 NULL);
28540 debug_abbrev_section = get_section (DEBUG_DWO_ABBREV_SECTION,
28541 SECTION_DEBUG | SECTION_EXCLUDE,
28542 NULL);
28543 debug_addr_section = get_section (DEBUG_ADDR_SECTION,
28544 SECTION_DEBUG, NULL);
28545 debug_skeleton_info_section = get_section (DEBUG_INFO_SECTION,
28546 SECTION_DEBUG, NULL);
28547 debug_skeleton_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28548 SECTION_DEBUG, NULL);
28549 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28550 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28551 generation);
28552
28553 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28554 stay in the main .o, but the skeleton_line goes into the
28555 split off dwo. */
28556 debug_skeleton_line_section
28557 = get_section (DEBUG_DWO_LINE_SECTION,
28558 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28559 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28560 DEBUG_SKELETON_LINE_SECTION_LABEL,
28561 generation);
28562 debug_str_offsets_section
28563 = get_section (DEBUG_DWO_STR_OFFSETS_SECTION,
28564 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28565 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28566 DEBUG_SKELETON_INFO_SECTION_LABEL,
28567 generation);
28568 debug_loc_section = get_section (dwarf_version >= 5
28569 ? DEBUG_DWO_LOCLISTS_SECTION
28570 : DEBUG_DWO_LOC_SECTION,
28571 SECTION_DEBUG | SECTION_EXCLUDE,
28572 NULL);
28573 debug_str_dwo_section = get_section (DEBUG_STR_DWO_SECTION,
28574 DEBUG_STR_DWO_SECTION_FLAGS,
28575 NULL);
28576 debug_macinfo_section_name
28577 = ((dwarf_strict && dwarf_version < 5)
28578 ? DEBUG_DWO_MACINFO_SECTION : DEBUG_DWO_MACRO_SECTION);
28579 debug_macinfo_section = get_section (debug_macinfo_section_name,
28580 SECTION_DEBUG | SECTION_EXCLUDE,
28581 NULL);
28582 }
28583 debug_aranges_section = get_section (DEBUG_ARANGES_SECTION,
28584 SECTION_DEBUG, NULL);
28585 debug_line_section = get_section (DEBUG_LINE_SECTION,
28586 SECTION_DEBUG, NULL);
28587 debug_pubnames_section = get_section (DEBUG_PUBNAMES_SECTION,
28588 SECTION_DEBUG, NULL);
28589 debug_pubtypes_section = get_section (DEBUG_PUBTYPES_SECTION,
28590 SECTION_DEBUG, NULL);
28591 debug_str_section = get_section (DEBUG_STR_SECTION,
28592 DEBUG_STR_SECTION_FLAGS, NULL);
28593 if (!dwarf_split_debug_info && !output_asm_line_debug_info ())
28594 debug_line_str_section = get_section (DEBUG_LINE_STR_SECTION,
28595 DEBUG_STR_SECTION_FLAGS, NULL);
28596
28597 debug_ranges_section = get_section (dwarf_version >= 5
28598 ? DEBUG_RNGLISTS_SECTION
28599 : DEBUG_RANGES_SECTION,
28600 SECTION_DEBUG, NULL);
28601 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
28602 SECTION_DEBUG, NULL);
28603 }
28604
28605 ASM_GENERATE_INTERNAL_LABEL (abbrev_section_label,
28606 DEBUG_ABBREV_SECTION_LABEL, generation);
28607 ASM_GENERATE_INTERNAL_LABEL (debug_info_section_label,
28608 DEBUG_INFO_SECTION_LABEL, generation);
28609 info_section_emitted = false;
28610 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28611 DEBUG_LINE_SECTION_LABEL, generation);
28612 /* There are up to 4 unique ranges labels per generation.
28613 See also output_rnglists. */
28614 ASM_GENERATE_INTERNAL_LABEL (ranges_section_label,
28615 DEBUG_RANGES_SECTION_LABEL, generation * 4);
28616 if (dwarf_version >= 5 && dwarf_split_debug_info)
28617 ASM_GENERATE_INTERNAL_LABEL (ranges_base_label,
28618 DEBUG_RANGES_SECTION_LABEL,
28619 1 + generation * 4);
28620 ASM_GENERATE_INTERNAL_LABEL (debug_addr_section_label,
28621 DEBUG_ADDR_SECTION_LABEL, generation);
28622 ASM_GENERATE_INTERNAL_LABEL (macinfo_section_label,
28623 (dwarf_strict && dwarf_version < 5)
28624 ? DEBUG_MACINFO_SECTION_LABEL
28625 : DEBUG_MACRO_SECTION_LABEL, generation);
28626 ASM_GENERATE_INTERNAL_LABEL (loc_section_label, DEBUG_LOC_SECTION_LABEL,
28627 generation);
28628
28629 ++generation;
28630 return generation - 1;
28631 }
28632
28633 /* Set up for Dwarf output at the start of compilation. */
28634
28635 static void
28636 dwarf2out_init (const char *filename ATTRIBUTE_UNUSED)
28637 {
28638 /* Allocate the file_table. */
28639 file_table = hash_table<dwarf_file_hasher>::create_ggc (50);
28640
28641 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28642 /* Allocate the decl_die_table. */
28643 decl_die_table = hash_table<decl_die_hasher>::create_ggc (10);
28644
28645 /* Allocate the decl_loc_table. */
28646 decl_loc_table = hash_table<decl_loc_hasher>::create_ggc (10);
28647
28648 /* Allocate the cached_dw_loc_list_table. */
28649 cached_dw_loc_list_table = hash_table<dw_loc_list_hasher>::create_ggc (10);
28650
28651 /* Allocate the initial hunk of the decl_scope_table. */
28652 vec_alloc (decl_scope_table, 256);
28653
28654 /* Allocate the initial hunk of the abbrev_die_table. */
28655 vec_alloc (abbrev_die_table, 256);
28656 /* Zero-th entry is allocated, but unused. */
28657 abbrev_die_table->quick_push (NULL);
28658
28659 /* Allocate the dwarf_proc_stack_usage_map. */
28660 dwarf_proc_stack_usage_map = new hash_map<dw_die_ref, int>;
28661
28662 /* Allocate the pubtypes and pubnames vectors. */
28663 vec_alloc (pubname_table, 32);
28664 vec_alloc (pubtype_table, 32);
28665
28666 vec_alloc (incomplete_types, 64);
28667
28668 vec_alloc (used_rtx_array, 32);
28669
28670 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28671 vec_alloc (macinfo_table, 64);
28672 #endif
28673
28674 /* If front-ends already registered a main translation unit but we were not
28675 ready to perform the association, do this now. */
28676 if (main_translation_unit != NULL_TREE)
28677 equate_decl_number_to_die (main_translation_unit, comp_unit_die ());
28678 }
28679
28680 /* Called before compile () starts outputtting functions, variables
28681 and toplevel asms into assembly. */
28682
28683 static void
28684 dwarf2out_assembly_start (void)
28685 {
28686 if (text_section_line_info)
28687 return;
28688
28689 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28690 ASM_GENERATE_INTERNAL_LABEL (text_section_label, TEXT_SECTION_LABEL, 0);
28691 ASM_GENERATE_INTERNAL_LABEL (text_end_label, TEXT_END_LABEL, 0);
28692 ASM_GENERATE_INTERNAL_LABEL (cold_text_section_label,
28693 COLD_TEXT_SECTION_LABEL, 0);
28694 ASM_GENERATE_INTERNAL_LABEL (cold_end_label, COLD_END_LABEL, 0);
28695
28696 switch_to_section (text_section);
28697 ASM_OUTPUT_LABEL (asm_out_file, text_section_label);
28698 #endif
28699
28700 /* Make sure the line number table for .text always exists. */
28701 text_section_line_info = new_line_info_table ();
28702 text_section_line_info->end_label = text_end_label;
28703
28704 #ifdef DWARF2_LINENO_DEBUGGING_INFO
28705 cur_line_info_table = text_section_line_info;
28706 #endif
28707
28708 if (HAVE_GAS_CFI_SECTIONS_DIRECTIVE
28709 && dwarf2out_do_cfi_asm ()
28710 && !dwarf2out_do_eh_frame ())
28711 fprintf (asm_out_file, "\t.cfi_sections\t.debug_frame\n");
28712 }
28713
28714 /* A helper function for dwarf2out_finish called through
28715 htab_traverse. Assign a string its index. All strings must be
28716 collected into the table by the time index_string is called,
28717 because the indexing code relies on htab_traverse to traverse nodes
28718 in the same order for each run. */
28719
28720 int
28721 index_string (indirect_string_node **h, unsigned int *index)
28722 {
28723 indirect_string_node *node = *h;
28724
28725 find_string_form (node);
28726 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28727 {
28728 gcc_assert (node->index == NO_INDEX_ASSIGNED);
28729 node->index = *index;
28730 *index += 1;
28731 }
28732 return 1;
28733 }
28734
28735 /* A helper function for output_indirect_strings called through
28736 htab_traverse. Output the offset to a string and update the
28737 current offset. */
28738
28739 int
28740 output_index_string_offset (indirect_string_node **h, unsigned int *offset)
28741 {
28742 indirect_string_node *node = *h;
28743
28744 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28745 {
28746 /* Assert that this node has been assigned an index. */
28747 gcc_assert (node->index != NO_INDEX_ASSIGNED
28748 && node->index != NOT_INDEXED);
28749 dw2_asm_output_data (DWARF_OFFSET_SIZE, *offset,
28750 "indexed string 0x%x: %s", node->index, node->str);
28751 *offset += strlen (node->str) + 1;
28752 }
28753 return 1;
28754 }
28755
28756 /* A helper function for dwarf2out_finish called through
28757 htab_traverse. Output the indexed string. */
28758
28759 int
28760 output_index_string (indirect_string_node **h, unsigned int *cur_idx)
28761 {
28762 struct indirect_string_node *node = *h;
28763
28764 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28765 {
28766 /* Assert that the strings are output in the same order as their
28767 indexes were assigned. */
28768 gcc_assert (*cur_idx == node->index);
28769 assemble_string (node->str, strlen (node->str) + 1);
28770 *cur_idx += 1;
28771 }
28772 return 1;
28773 }
28774
28775 /* A helper function for output_indirect_strings. Counts the number
28776 of index strings offsets. Must match the logic of the functions
28777 output_index_string[_offsets] above. */
28778 int
28779 count_index_strings (indirect_string_node **h, unsigned int *last_idx)
28780 {
28781 struct indirect_string_node *node = *h;
28782
28783 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28784 *last_idx += 1;
28785 return 1;
28786 }
28787
28788 /* A helper function for dwarf2out_finish called through
28789 htab_traverse. Emit one queued .debug_str string. */
28790
28791 int
28792 output_indirect_string (indirect_string_node **h, enum dwarf_form form)
28793 {
28794 struct indirect_string_node *node = *h;
28795
28796 node->form = find_string_form (node);
28797 if (node->form == form && node->refcount > 0)
28798 {
28799 ASM_OUTPUT_LABEL (asm_out_file, node->label);
28800 assemble_string (node->str, strlen (node->str) + 1);
28801 }
28802
28803 return 1;
28804 }
28805
28806 /* Output the indexed string table. */
28807
28808 static void
28809 output_indirect_strings (void)
28810 {
28811 switch_to_section (debug_str_section);
28812 if (!dwarf_split_debug_info)
28813 debug_str_hash->traverse<enum dwarf_form,
28814 output_indirect_string> (DW_FORM_strp);
28815 else
28816 {
28817 unsigned int offset = 0;
28818 unsigned int cur_idx = 0;
28819
28820 if (skeleton_debug_str_hash)
28821 skeleton_debug_str_hash->traverse<enum dwarf_form,
28822 output_indirect_string> (DW_FORM_strp);
28823
28824 switch_to_section (debug_str_offsets_section);
28825 /* For DWARF5 the .debug_str_offsets[.dwo] section needs a unit
28826 header. Note that we don't need to generate a label to the
28827 actual index table following the header here, because this is
28828 for the split dwarf case only. In an .dwo file there is only
28829 one string offsets table (and one debug info section). But
28830 if we would start using string offset tables for the main (or
28831 skeleton) unit, then we have to add a DW_AT_str_offsets_base
28832 pointing to the actual index after the header. Split dwarf
28833 units will never have a string offsets base attribute. When
28834 a split unit is moved into a .dwp file the string offsets can
28835 be found through the .debug_cu_index section table. */
28836 if (dwarf_version >= 5)
28837 {
28838 unsigned int last_idx = 0;
28839 unsigned long str_offsets_length;
28840
28841 debug_str_hash->traverse_noresize
28842 <unsigned int *, count_index_strings> (&last_idx);
28843 str_offsets_length = last_idx * DWARF_OFFSET_SIZE + 4;
28844 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
28845 dw2_asm_output_data (4, 0xffffffff,
28846 "Escape value for 64-bit DWARF extension");
28847 dw2_asm_output_data (DWARF_OFFSET_SIZE, str_offsets_length,
28848 "Length of string offsets unit");
28849 dw2_asm_output_data (2, 5, "DWARF string offsets version");
28850 dw2_asm_output_data (2, 0, "Header zero padding");
28851 }
28852 debug_str_hash->traverse_noresize
28853 <unsigned int *, output_index_string_offset> (&offset);
28854 switch_to_section (debug_str_dwo_section);
28855 debug_str_hash->traverse_noresize<unsigned int *, output_index_string>
28856 (&cur_idx);
28857 }
28858 }
28859
28860 /* Callback for htab_traverse to assign an index to an entry in the
28861 table, and to write that entry to the .debug_addr section. */
28862
28863 int
28864 output_addr_table_entry (addr_table_entry **slot, unsigned int *cur_index)
28865 {
28866 addr_table_entry *entry = *slot;
28867
28868 if (entry->refcount == 0)
28869 {
28870 gcc_assert (entry->index == NO_INDEX_ASSIGNED
28871 || entry->index == NOT_INDEXED);
28872 return 1;
28873 }
28874
28875 gcc_assert (entry->index == *cur_index);
28876 (*cur_index)++;
28877
28878 switch (entry->kind)
28879 {
28880 case ate_kind_rtx:
28881 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, entry->addr.rtl,
28882 "0x%x", entry->index);
28883 break;
28884 case ate_kind_rtx_dtprel:
28885 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
28886 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
28887 DWARF2_ADDR_SIZE,
28888 entry->addr.rtl);
28889 fputc ('\n', asm_out_file);
28890 break;
28891 case ate_kind_label:
28892 dw2_asm_output_addr (DWARF2_ADDR_SIZE, entry->addr.label,
28893 "0x%x", entry->index);
28894 break;
28895 default:
28896 gcc_unreachable ();
28897 }
28898 return 1;
28899 }
28900
28901 /* A helper function for dwarf2out_finish. Counts the number
28902 of indexed addresses. Must match the logic of the functions
28903 output_addr_table_entry above. */
28904 int
28905 count_index_addrs (addr_table_entry **slot, unsigned int *last_idx)
28906 {
28907 addr_table_entry *entry = *slot;
28908
28909 if (entry->refcount > 0)
28910 *last_idx += 1;
28911 return 1;
28912 }
28913
28914 /* Produce the .debug_addr section. */
28915
28916 static void
28917 output_addr_table (void)
28918 {
28919 unsigned int index = 0;
28920 if (addr_index_table == NULL || addr_index_table->size () == 0)
28921 return;
28922
28923 switch_to_section (debug_addr_section);
28924 addr_index_table
28925 ->traverse_noresize<unsigned int *, output_addr_table_entry> (&index);
28926 }
28927
28928 #if ENABLE_ASSERT_CHECKING
28929 /* Verify that all marks are clear. */
28930
28931 static void
28932 verify_marks_clear (dw_die_ref die)
28933 {
28934 dw_die_ref c;
28935
28936 gcc_assert (! die->die_mark);
28937 FOR_EACH_CHILD (die, c, verify_marks_clear (c));
28938 }
28939 #endif /* ENABLE_ASSERT_CHECKING */
28940
28941 /* Clear the marks for a die and its children.
28942 Be cool if the mark isn't set. */
28943
28944 static void
28945 prune_unmark_dies (dw_die_ref die)
28946 {
28947 dw_die_ref c;
28948
28949 if (die->die_mark)
28950 die->die_mark = 0;
28951 FOR_EACH_CHILD (die, c, prune_unmark_dies (c));
28952 }
28953
28954 /* Given LOC that is referenced by a DIE we're marking as used, find all
28955 referenced DWARF procedures it references and mark them as used. */
28956
28957 static void
28958 prune_unused_types_walk_loc_descr (dw_loc_descr_ref loc)
28959 {
28960 for (; loc != NULL; loc = loc->dw_loc_next)
28961 switch (loc->dw_loc_opc)
28962 {
28963 case DW_OP_implicit_pointer:
28964 case DW_OP_convert:
28965 case DW_OP_reinterpret:
28966 case DW_OP_GNU_implicit_pointer:
28967 case DW_OP_GNU_convert:
28968 case DW_OP_GNU_reinterpret:
28969 if (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref)
28970 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
28971 break;
28972 case DW_OP_GNU_variable_value:
28973 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
28974 {
28975 dw_die_ref ref
28976 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
28977 if (ref == NULL)
28978 break;
28979 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
28980 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
28981 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
28982 }
28983 /* FALLTHRU */
28984 case DW_OP_call2:
28985 case DW_OP_call4:
28986 case DW_OP_call_ref:
28987 case DW_OP_const_type:
28988 case DW_OP_GNU_const_type:
28989 case DW_OP_GNU_parameter_ref:
28990 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref);
28991 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
28992 break;
28993 case DW_OP_regval_type:
28994 case DW_OP_deref_type:
28995 case DW_OP_GNU_regval_type:
28996 case DW_OP_GNU_deref_type:
28997 gcc_assert (loc->dw_loc_oprnd2.val_class == dw_val_class_die_ref);
28998 prune_unused_types_mark (loc->dw_loc_oprnd2.v.val_die_ref.die, 1);
28999 break;
29000 case DW_OP_entry_value:
29001 case DW_OP_GNU_entry_value:
29002 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_loc);
29003 prune_unused_types_walk_loc_descr (loc->dw_loc_oprnd1.v.val_loc);
29004 break;
29005 default:
29006 break;
29007 }
29008 }
29009
29010 /* Given DIE that we're marking as used, find any other dies
29011 it references as attributes and mark them as used. */
29012
29013 static void
29014 prune_unused_types_walk_attribs (dw_die_ref die)
29015 {
29016 dw_attr_node *a;
29017 unsigned ix;
29018
29019 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29020 {
29021 switch (AT_class (a))
29022 {
29023 /* Make sure DWARF procedures referenced by location descriptions will
29024 get emitted. */
29025 case dw_val_class_loc:
29026 prune_unused_types_walk_loc_descr (AT_loc (a));
29027 break;
29028 case dw_val_class_loc_list:
29029 for (dw_loc_list_ref list = AT_loc_list (a);
29030 list != NULL;
29031 list = list->dw_loc_next)
29032 prune_unused_types_walk_loc_descr (list->expr);
29033 break;
29034
29035 case dw_val_class_view_list:
29036 /* This points to a loc_list in another attribute, so it's
29037 already covered. */
29038 break;
29039
29040 case dw_val_class_die_ref:
29041 /* A reference to another DIE.
29042 Make sure that it will get emitted.
29043 If it was broken out into a comdat group, don't follow it. */
29044 if (! AT_ref (a)->comdat_type_p
29045 || a->dw_attr == DW_AT_specification)
29046 prune_unused_types_mark (a->dw_attr_val.v.val_die_ref.die, 1);
29047 break;
29048
29049 case dw_val_class_str:
29050 /* Set the string's refcount to 0 so that prune_unused_types_mark
29051 accounts properly for it. */
29052 a->dw_attr_val.v.val_str->refcount = 0;
29053 break;
29054
29055 default:
29056 break;
29057 }
29058 }
29059 }
29060
29061 /* Mark the generic parameters and arguments children DIEs of DIE. */
29062
29063 static void
29064 prune_unused_types_mark_generic_parms_dies (dw_die_ref die)
29065 {
29066 dw_die_ref c;
29067
29068 if (die == NULL || die->die_child == NULL)
29069 return;
29070 c = die->die_child;
29071 do
29072 {
29073 if (is_template_parameter (c))
29074 prune_unused_types_mark (c, 1);
29075 c = c->die_sib;
29076 } while (c && c != die->die_child);
29077 }
29078
29079 /* Mark DIE as being used. If DOKIDS is true, then walk down
29080 to DIE's children. */
29081
29082 static void
29083 prune_unused_types_mark (dw_die_ref die, int dokids)
29084 {
29085 dw_die_ref c;
29086
29087 if (die->die_mark == 0)
29088 {
29089 /* We haven't done this node yet. Mark it as used. */
29090 die->die_mark = 1;
29091 /* If this is the DIE of a generic type instantiation,
29092 mark the children DIEs that describe its generic parms and
29093 args. */
29094 prune_unused_types_mark_generic_parms_dies (die);
29095
29096 /* We also have to mark its parents as used.
29097 (But we don't want to mark our parent's kids due to this,
29098 unless it is a class.) */
29099 if (die->die_parent)
29100 prune_unused_types_mark (die->die_parent,
29101 class_scope_p (die->die_parent));
29102
29103 /* Mark any referenced nodes. */
29104 prune_unused_types_walk_attribs (die);
29105
29106 /* If this node is a specification,
29107 also mark the definition, if it exists. */
29108 if (get_AT_flag (die, DW_AT_declaration) && die->die_definition)
29109 prune_unused_types_mark (die->die_definition, 1);
29110 }
29111
29112 if (dokids && die->die_mark != 2)
29113 {
29114 /* We need to walk the children, but haven't done so yet.
29115 Remember that we've walked the kids. */
29116 die->die_mark = 2;
29117
29118 /* If this is an array type, we need to make sure our
29119 kids get marked, even if they're types. If we're
29120 breaking out types into comdat sections, do this
29121 for all type definitions. */
29122 if (die->die_tag == DW_TAG_array_type
29123 || (use_debug_types
29124 && is_type_die (die) && ! is_declaration_die (die)))
29125 FOR_EACH_CHILD (die, c, prune_unused_types_mark (c, 1));
29126 else
29127 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29128 }
29129 }
29130
29131 /* For local classes, look if any static member functions were emitted
29132 and if so, mark them. */
29133
29134 static void
29135 prune_unused_types_walk_local_classes (dw_die_ref die)
29136 {
29137 dw_die_ref c;
29138
29139 if (die->die_mark == 2)
29140 return;
29141
29142 switch (die->die_tag)
29143 {
29144 case DW_TAG_structure_type:
29145 case DW_TAG_union_type:
29146 case DW_TAG_class_type:
29147 break;
29148
29149 case DW_TAG_subprogram:
29150 if (!get_AT_flag (die, DW_AT_declaration)
29151 || die->die_definition != NULL)
29152 prune_unused_types_mark (die, 1);
29153 return;
29154
29155 default:
29156 return;
29157 }
29158
29159 /* Mark children. */
29160 FOR_EACH_CHILD (die, c, prune_unused_types_walk_local_classes (c));
29161 }
29162
29163 /* Walk the tree DIE and mark types that we actually use. */
29164
29165 static void
29166 prune_unused_types_walk (dw_die_ref die)
29167 {
29168 dw_die_ref c;
29169
29170 /* Don't do anything if this node is already marked and
29171 children have been marked as well. */
29172 if (die->die_mark == 2)
29173 return;
29174
29175 switch (die->die_tag)
29176 {
29177 case DW_TAG_structure_type:
29178 case DW_TAG_union_type:
29179 case DW_TAG_class_type:
29180 if (die->die_perennial_p)
29181 break;
29182
29183 for (c = die->die_parent; c; c = c->die_parent)
29184 if (c->die_tag == DW_TAG_subprogram)
29185 break;
29186
29187 /* Finding used static member functions inside of classes
29188 is needed just for local classes, because for other classes
29189 static member function DIEs with DW_AT_specification
29190 are emitted outside of the DW_TAG_*_type. If we ever change
29191 it, we'd need to call this even for non-local classes. */
29192 if (c)
29193 prune_unused_types_walk_local_classes (die);
29194
29195 /* It's a type node --- don't mark it. */
29196 return;
29197
29198 case DW_TAG_const_type:
29199 case DW_TAG_packed_type:
29200 case DW_TAG_pointer_type:
29201 case DW_TAG_reference_type:
29202 case DW_TAG_rvalue_reference_type:
29203 case DW_TAG_volatile_type:
29204 case DW_TAG_typedef:
29205 case DW_TAG_array_type:
29206 case DW_TAG_interface_type:
29207 case DW_TAG_friend:
29208 case DW_TAG_enumeration_type:
29209 case DW_TAG_subroutine_type:
29210 case DW_TAG_string_type:
29211 case DW_TAG_set_type:
29212 case DW_TAG_subrange_type:
29213 case DW_TAG_ptr_to_member_type:
29214 case DW_TAG_file_type:
29215 /* Type nodes are useful only when other DIEs reference them --- don't
29216 mark them. */
29217 /* FALLTHROUGH */
29218
29219 case DW_TAG_dwarf_procedure:
29220 /* Likewise for DWARF procedures. */
29221
29222 if (die->die_perennial_p)
29223 break;
29224
29225 return;
29226
29227 default:
29228 /* Mark everything else. */
29229 break;
29230 }
29231
29232 if (die->die_mark == 0)
29233 {
29234 die->die_mark = 1;
29235
29236 /* Now, mark any dies referenced from here. */
29237 prune_unused_types_walk_attribs (die);
29238 }
29239
29240 die->die_mark = 2;
29241
29242 /* Mark children. */
29243 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29244 }
29245
29246 /* Increment the string counts on strings referred to from DIE's
29247 attributes. */
29248
29249 static void
29250 prune_unused_types_update_strings (dw_die_ref die)
29251 {
29252 dw_attr_node *a;
29253 unsigned ix;
29254
29255 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29256 if (AT_class (a) == dw_val_class_str)
29257 {
29258 struct indirect_string_node *s = a->dw_attr_val.v.val_str;
29259 s->refcount++;
29260 /* Avoid unnecessarily putting strings that are used less than
29261 twice in the hash table. */
29262 if (s->refcount
29263 == ((DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) ? 1 : 2))
29264 {
29265 indirect_string_node **slot
29266 = debug_str_hash->find_slot_with_hash (s->str,
29267 htab_hash_string (s->str),
29268 INSERT);
29269 gcc_assert (*slot == NULL);
29270 *slot = s;
29271 }
29272 }
29273 }
29274
29275 /* Mark DIE and its children as removed. */
29276
29277 static void
29278 mark_removed (dw_die_ref die)
29279 {
29280 dw_die_ref c;
29281 die->removed = true;
29282 FOR_EACH_CHILD (die, c, mark_removed (c));
29283 }
29284
29285 /* Remove from the tree DIE any dies that aren't marked. */
29286
29287 static void
29288 prune_unused_types_prune (dw_die_ref die)
29289 {
29290 dw_die_ref c;
29291
29292 gcc_assert (die->die_mark);
29293 prune_unused_types_update_strings (die);
29294
29295 if (! die->die_child)
29296 return;
29297
29298 c = die->die_child;
29299 do {
29300 dw_die_ref prev = c, next;
29301 for (c = c->die_sib; ! c->die_mark; c = next)
29302 if (c == die->die_child)
29303 {
29304 /* No marked children between 'prev' and the end of the list. */
29305 if (prev == c)
29306 /* No marked children at all. */
29307 die->die_child = NULL;
29308 else
29309 {
29310 prev->die_sib = c->die_sib;
29311 die->die_child = prev;
29312 }
29313 c->die_sib = NULL;
29314 mark_removed (c);
29315 return;
29316 }
29317 else
29318 {
29319 next = c->die_sib;
29320 c->die_sib = NULL;
29321 mark_removed (c);
29322 }
29323
29324 if (c != prev->die_sib)
29325 prev->die_sib = c;
29326 prune_unused_types_prune (c);
29327 } while (c != die->die_child);
29328 }
29329
29330 /* Remove dies representing declarations that we never use. */
29331
29332 static void
29333 prune_unused_types (void)
29334 {
29335 unsigned int i;
29336 limbo_die_node *node;
29337 comdat_type_node *ctnode;
29338 pubname_entry *pub;
29339 dw_die_ref base_type;
29340
29341 #if ENABLE_ASSERT_CHECKING
29342 /* All the marks should already be clear. */
29343 verify_marks_clear (comp_unit_die ());
29344 for (node = limbo_die_list; node; node = node->next)
29345 verify_marks_clear (node->die);
29346 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29347 verify_marks_clear (ctnode->root_die);
29348 #endif /* ENABLE_ASSERT_CHECKING */
29349
29350 /* Mark types that are used in global variables. */
29351 premark_types_used_by_global_vars ();
29352
29353 /* Set the mark on nodes that are actually used. */
29354 prune_unused_types_walk (comp_unit_die ());
29355 for (node = limbo_die_list; node; node = node->next)
29356 prune_unused_types_walk (node->die);
29357 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29358 {
29359 prune_unused_types_walk (ctnode->root_die);
29360 prune_unused_types_mark (ctnode->type_die, 1);
29361 }
29362
29363 /* Also set the mark on nodes referenced from the pubname_table. Enumerators
29364 are unusual in that they are pubnames that are the children of pubtypes.
29365 They should only be marked via their parent DW_TAG_enumeration_type die,
29366 not as roots in themselves. */
29367 FOR_EACH_VEC_ELT (*pubname_table, i, pub)
29368 if (pub->die->die_tag != DW_TAG_enumerator)
29369 prune_unused_types_mark (pub->die, 1);
29370 for (i = 0; base_types.iterate (i, &base_type); i++)
29371 prune_unused_types_mark (base_type, 1);
29372
29373 /* For -fvar-tracking-assignments, also set the mark on nodes that could be
29374 referenced by DW_TAG_call_site DW_AT_call_origin (i.e. direct call
29375 callees). */
29376 cgraph_node *cnode;
29377 FOR_EACH_FUNCTION (cnode)
29378 if (cnode->referred_to_p (false))
29379 {
29380 dw_die_ref die = lookup_decl_die (cnode->decl);
29381 if (die == NULL || die->die_mark)
29382 continue;
29383 for (cgraph_edge *e = cnode->callers; e; e = e->next_caller)
29384 if (e->caller != cnode
29385 && opt_for_fn (e->caller->decl, flag_var_tracking_assignments))
29386 {
29387 prune_unused_types_mark (die, 1);
29388 break;
29389 }
29390 }
29391
29392 if (debug_str_hash)
29393 debug_str_hash->empty ();
29394 if (skeleton_debug_str_hash)
29395 skeleton_debug_str_hash->empty ();
29396 prune_unused_types_prune (comp_unit_die ());
29397 for (limbo_die_node **pnode = &limbo_die_list; *pnode; )
29398 {
29399 node = *pnode;
29400 if (!node->die->die_mark)
29401 *pnode = node->next;
29402 else
29403 {
29404 prune_unused_types_prune (node->die);
29405 pnode = &node->next;
29406 }
29407 }
29408 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29409 prune_unused_types_prune (ctnode->root_die);
29410
29411 /* Leave the marks clear. */
29412 prune_unmark_dies (comp_unit_die ());
29413 for (node = limbo_die_list; node; node = node->next)
29414 prune_unmark_dies (node->die);
29415 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29416 prune_unmark_dies (ctnode->root_die);
29417 }
29418
29419 /* Helpers to manipulate hash table of comdat type units. */
29420
29421 struct comdat_type_hasher : nofree_ptr_hash <comdat_type_node>
29422 {
29423 static inline hashval_t hash (const comdat_type_node *);
29424 static inline bool equal (const comdat_type_node *, const comdat_type_node *);
29425 };
29426
29427 inline hashval_t
29428 comdat_type_hasher::hash (const comdat_type_node *type_node)
29429 {
29430 hashval_t h;
29431 memcpy (&h, type_node->signature, sizeof (h));
29432 return h;
29433 }
29434
29435 inline bool
29436 comdat_type_hasher::equal (const comdat_type_node *type_node_1,
29437 const comdat_type_node *type_node_2)
29438 {
29439 return (! memcmp (type_node_1->signature, type_node_2->signature,
29440 DWARF_TYPE_SIGNATURE_SIZE));
29441 }
29442
29443 /* Move a DW_AT_{,MIPS_}linkage_name attribute just added to dw_die_ref
29444 to the location it would have been added, should we know its
29445 DECL_ASSEMBLER_NAME when we added other attributes. This will
29446 probably improve compactness of debug info, removing equivalent
29447 abbrevs, and hide any differences caused by deferring the
29448 computation of the assembler name, triggered by e.g. PCH. */
29449
29450 static inline void
29451 move_linkage_attr (dw_die_ref die)
29452 {
29453 unsigned ix = vec_safe_length (die->die_attr);
29454 dw_attr_node linkage = (*die->die_attr)[ix - 1];
29455
29456 gcc_assert (linkage.dw_attr == DW_AT_linkage_name
29457 || linkage.dw_attr == DW_AT_MIPS_linkage_name);
29458
29459 while (--ix > 0)
29460 {
29461 dw_attr_node *prev = &(*die->die_attr)[ix - 1];
29462
29463 if (prev->dw_attr == DW_AT_decl_line
29464 || prev->dw_attr == DW_AT_decl_column
29465 || prev->dw_attr == DW_AT_name)
29466 break;
29467 }
29468
29469 if (ix != vec_safe_length (die->die_attr) - 1)
29470 {
29471 die->die_attr->pop ();
29472 die->die_attr->quick_insert (ix, linkage);
29473 }
29474 }
29475
29476 /* Helper function for resolve_addr, mark DW_TAG_base_type nodes
29477 referenced from typed stack ops and count how often they are used. */
29478
29479 static void
29480 mark_base_types (dw_loc_descr_ref loc)
29481 {
29482 dw_die_ref base_type = NULL;
29483
29484 for (; loc; loc = loc->dw_loc_next)
29485 {
29486 switch (loc->dw_loc_opc)
29487 {
29488 case DW_OP_regval_type:
29489 case DW_OP_deref_type:
29490 case DW_OP_GNU_regval_type:
29491 case DW_OP_GNU_deref_type:
29492 base_type = loc->dw_loc_oprnd2.v.val_die_ref.die;
29493 break;
29494 case DW_OP_convert:
29495 case DW_OP_reinterpret:
29496 case DW_OP_GNU_convert:
29497 case DW_OP_GNU_reinterpret:
29498 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
29499 continue;
29500 /* FALLTHRU */
29501 case DW_OP_const_type:
29502 case DW_OP_GNU_const_type:
29503 base_type = loc->dw_loc_oprnd1.v.val_die_ref.die;
29504 break;
29505 case DW_OP_entry_value:
29506 case DW_OP_GNU_entry_value:
29507 mark_base_types (loc->dw_loc_oprnd1.v.val_loc);
29508 continue;
29509 default:
29510 continue;
29511 }
29512 gcc_assert (base_type->die_parent == comp_unit_die ());
29513 if (base_type->die_mark)
29514 base_type->die_mark++;
29515 else
29516 {
29517 base_types.safe_push (base_type);
29518 base_type->die_mark = 1;
29519 }
29520 }
29521 }
29522
29523 /* Comparison function for sorting marked base types. */
29524
29525 static int
29526 base_type_cmp (const void *x, const void *y)
29527 {
29528 dw_die_ref dx = *(const dw_die_ref *) x;
29529 dw_die_ref dy = *(const dw_die_ref *) y;
29530 unsigned int byte_size1, byte_size2;
29531 unsigned int encoding1, encoding2;
29532 unsigned int align1, align2;
29533 if (dx->die_mark > dy->die_mark)
29534 return -1;
29535 if (dx->die_mark < dy->die_mark)
29536 return 1;
29537 byte_size1 = get_AT_unsigned (dx, DW_AT_byte_size);
29538 byte_size2 = get_AT_unsigned (dy, DW_AT_byte_size);
29539 if (byte_size1 < byte_size2)
29540 return 1;
29541 if (byte_size1 > byte_size2)
29542 return -1;
29543 encoding1 = get_AT_unsigned (dx, DW_AT_encoding);
29544 encoding2 = get_AT_unsigned (dy, DW_AT_encoding);
29545 if (encoding1 < encoding2)
29546 return 1;
29547 if (encoding1 > encoding2)
29548 return -1;
29549 align1 = get_AT_unsigned (dx, DW_AT_alignment);
29550 align2 = get_AT_unsigned (dy, DW_AT_alignment);
29551 if (align1 < align2)
29552 return 1;
29553 if (align1 > align2)
29554 return -1;
29555 return 0;
29556 }
29557
29558 /* Move base types marked by mark_base_types as early as possible
29559 in the CU, sorted by decreasing usage count both to make the
29560 uleb128 references as small as possible and to make sure they
29561 will have die_offset already computed by calc_die_sizes when
29562 sizes of typed stack loc ops is computed. */
29563
29564 static void
29565 move_marked_base_types (void)
29566 {
29567 unsigned int i;
29568 dw_die_ref base_type, die, c;
29569
29570 if (base_types.is_empty ())
29571 return;
29572
29573 /* Sort by decreasing usage count, they will be added again in that
29574 order later on. */
29575 base_types.qsort (base_type_cmp);
29576 die = comp_unit_die ();
29577 c = die->die_child;
29578 do
29579 {
29580 dw_die_ref prev = c;
29581 c = c->die_sib;
29582 while (c->die_mark)
29583 {
29584 remove_child_with_prev (c, prev);
29585 /* As base types got marked, there must be at least
29586 one node other than DW_TAG_base_type. */
29587 gcc_assert (die->die_child != NULL);
29588 c = prev->die_sib;
29589 }
29590 }
29591 while (c != die->die_child);
29592 gcc_assert (die->die_child);
29593 c = die->die_child;
29594 for (i = 0; base_types.iterate (i, &base_type); i++)
29595 {
29596 base_type->die_mark = 0;
29597 base_type->die_sib = c->die_sib;
29598 c->die_sib = base_type;
29599 c = base_type;
29600 }
29601 }
29602
29603 /* Helper function for resolve_addr, attempt to resolve
29604 one CONST_STRING, return true if successful. Similarly verify that
29605 SYMBOL_REFs refer to variables emitted in the current CU. */
29606
29607 static bool
29608 resolve_one_addr (rtx *addr)
29609 {
29610 rtx rtl = *addr;
29611
29612 if (GET_CODE (rtl) == CONST_STRING)
29613 {
29614 size_t len = strlen (XSTR (rtl, 0)) + 1;
29615 tree t = build_string (len, XSTR (rtl, 0));
29616 tree tlen = size_int (len - 1);
29617 TREE_TYPE (t)
29618 = build_array_type (char_type_node, build_index_type (tlen));
29619 rtl = lookup_constant_def (t);
29620 if (!rtl || !MEM_P (rtl))
29621 return false;
29622 rtl = XEXP (rtl, 0);
29623 if (GET_CODE (rtl) == SYMBOL_REF
29624 && SYMBOL_REF_DECL (rtl)
29625 && !TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29626 return false;
29627 vec_safe_push (used_rtx_array, rtl);
29628 *addr = rtl;
29629 return true;
29630 }
29631
29632 if (GET_CODE (rtl) == SYMBOL_REF
29633 && SYMBOL_REF_DECL (rtl))
29634 {
29635 if (TREE_CONSTANT_POOL_ADDRESS_P (rtl))
29636 {
29637 if (!TREE_ASM_WRITTEN (DECL_INITIAL (SYMBOL_REF_DECL (rtl))))
29638 return false;
29639 }
29640 else if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29641 return false;
29642 }
29643
29644 if (GET_CODE (rtl) == CONST)
29645 {
29646 subrtx_ptr_iterator::array_type array;
29647 FOR_EACH_SUBRTX_PTR (iter, array, &XEXP (rtl, 0), ALL)
29648 if (!resolve_one_addr (*iter))
29649 return false;
29650 }
29651
29652 return true;
29653 }
29654
29655 /* For STRING_CST, return SYMBOL_REF of its constant pool entry,
29656 if possible, and create DW_TAG_dwarf_procedure that can be referenced
29657 from DW_OP_implicit_pointer if the string hasn't been seen yet. */
29658
29659 static rtx
29660 string_cst_pool_decl (tree t)
29661 {
29662 rtx rtl = output_constant_def (t, 1);
29663 unsigned char *array;
29664 dw_loc_descr_ref l;
29665 tree decl;
29666 size_t len;
29667 dw_die_ref ref;
29668
29669 if (!rtl || !MEM_P (rtl))
29670 return NULL_RTX;
29671 rtl = XEXP (rtl, 0);
29672 if (GET_CODE (rtl) != SYMBOL_REF
29673 || SYMBOL_REF_DECL (rtl) == NULL_TREE)
29674 return NULL_RTX;
29675
29676 decl = SYMBOL_REF_DECL (rtl);
29677 if (!lookup_decl_die (decl))
29678 {
29679 len = TREE_STRING_LENGTH (t);
29680 vec_safe_push (used_rtx_array, rtl);
29681 ref = new_die (DW_TAG_dwarf_procedure, comp_unit_die (), decl);
29682 array = ggc_vec_alloc<unsigned char> (len);
29683 memcpy (array, TREE_STRING_POINTER (t), len);
29684 l = new_loc_descr (DW_OP_implicit_value, len, 0);
29685 l->dw_loc_oprnd2.val_class = dw_val_class_vec;
29686 l->dw_loc_oprnd2.v.val_vec.length = len;
29687 l->dw_loc_oprnd2.v.val_vec.elt_size = 1;
29688 l->dw_loc_oprnd2.v.val_vec.array = array;
29689 add_AT_loc (ref, DW_AT_location, l);
29690 equate_decl_number_to_die (decl, ref);
29691 }
29692 return rtl;
29693 }
29694
29695 /* Helper function of resolve_addr_in_expr. LOC is
29696 a DW_OP_addr followed by DW_OP_stack_value, either at the start
29697 of exprloc or after DW_OP_{,bit_}piece, and val_addr can't be
29698 resolved. Replace it (both DW_OP_addr and DW_OP_stack_value)
29699 with DW_OP_implicit_pointer if possible
29700 and return true, if unsuccessful, return false. */
29701
29702 static bool
29703 optimize_one_addr_into_implicit_ptr (dw_loc_descr_ref loc)
29704 {
29705 rtx rtl = loc->dw_loc_oprnd1.v.val_addr;
29706 HOST_WIDE_INT offset = 0;
29707 dw_die_ref ref = NULL;
29708 tree decl;
29709
29710 if (GET_CODE (rtl) == CONST
29711 && GET_CODE (XEXP (rtl, 0)) == PLUS
29712 && CONST_INT_P (XEXP (XEXP (rtl, 0), 1)))
29713 {
29714 offset = INTVAL (XEXP (XEXP (rtl, 0), 1));
29715 rtl = XEXP (XEXP (rtl, 0), 0);
29716 }
29717 if (GET_CODE (rtl) == CONST_STRING)
29718 {
29719 size_t len = strlen (XSTR (rtl, 0)) + 1;
29720 tree t = build_string (len, XSTR (rtl, 0));
29721 tree tlen = size_int (len - 1);
29722
29723 TREE_TYPE (t)
29724 = build_array_type (char_type_node, build_index_type (tlen));
29725 rtl = string_cst_pool_decl (t);
29726 if (!rtl)
29727 return false;
29728 }
29729 if (GET_CODE (rtl) == SYMBOL_REF && SYMBOL_REF_DECL (rtl))
29730 {
29731 decl = SYMBOL_REF_DECL (rtl);
29732 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
29733 {
29734 ref = lookup_decl_die (decl);
29735 if (ref && (get_AT (ref, DW_AT_location)
29736 || get_AT (ref, DW_AT_const_value)))
29737 {
29738 loc->dw_loc_opc = dwarf_OP (DW_OP_implicit_pointer);
29739 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29740 loc->dw_loc_oprnd1.val_entry = NULL;
29741 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29742 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29743 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
29744 loc->dw_loc_oprnd2.v.val_int = offset;
29745 return true;
29746 }
29747 }
29748 }
29749 return false;
29750 }
29751
29752 /* Helper function for resolve_addr, handle one location
29753 expression, return false if at least one CONST_STRING or SYMBOL_REF in
29754 the location list couldn't be resolved. */
29755
29756 static bool
29757 resolve_addr_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
29758 {
29759 dw_loc_descr_ref keep = NULL;
29760 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = loc->dw_loc_next)
29761 switch (loc->dw_loc_opc)
29762 {
29763 case DW_OP_addr:
29764 if (!resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
29765 {
29766 if ((prev == NULL
29767 || prev->dw_loc_opc == DW_OP_piece
29768 || prev->dw_loc_opc == DW_OP_bit_piece)
29769 && loc->dw_loc_next
29770 && loc->dw_loc_next->dw_loc_opc == DW_OP_stack_value
29771 && (!dwarf_strict || dwarf_version >= 5)
29772 && optimize_one_addr_into_implicit_ptr (loc))
29773 break;
29774 return false;
29775 }
29776 break;
29777 case DW_OP_GNU_addr_index:
29778 case DW_OP_addrx:
29779 case DW_OP_GNU_const_index:
29780 case DW_OP_constx:
29781 if ((loc->dw_loc_opc == DW_OP_GNU_addr_index
29782 || loc->dw_loc_opc == DW_OP_addrx)
29783 || ((loc->dw_loc_opc == DW_OP_GNU_const_index
29784 || loc->dw_loc_opc == DW_OP_constx)
29785 && loc->dtprel))
29786 {
29787 rtx rtl = loc->dw_loc_oprnd1.val_entry->addr.rtl;
29788 if (!resolve_one_addr (&rtl))
29789 return false;
29790 remove_addr_table_entry (loc->dw_loc_oprnd1.val_entry);
29791 loc->dw_loc_oprnd1.val_entry
29792 = add_addr_table_entry (rtl, ate_kind_rtx);
29793 }
29794 break;
29795 case DW_OP_const4u:
29796 case DW_OP_const8u:
29797 if (loc->dtprel
29798 && !resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
29799 return false;
29800 break;
29801 case DW_OP_plus_uconst:
29802 if (size_of_loc_descr (loc)
29803 > size_of_int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned)
29804 + 1
29805 && loc->dw_loc_oprnd1.v.val_unsigned > 0)
29806 {
29807 dw_loc_descr_ref repl
29808 = int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned);
29809 add_loc_descr (&repl, new_loc_descr (DW_OP_plus, 0, 0));
29810 add_loc_descr (&repl, loc->dw_loc_next);
29811 *loc = *repl;
29812 }
29813 break;
29814 case DW_OP_implicit_value:
29815 if (loc->dw_loc_oprnd2.val_class == dw_val_class_addr
29816 && !resolve_one_addr (&loc->dw_loc_oprnd2.v.val_addr))
29817 return false;
29818 break;
29819 case DW_OP_implicit_pointer:
29820 case DW_OP_GNU_implicit_pointer:
29821 case DW_OP_GNU_parameter_ref:
29822 case DW_OP_GNU_variable_value:
29823 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
29824 {
29825 dw_die_ref ref
29826 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
29827 if (ref == NULL)
29828 return false;
29829 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29830 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29831 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29832 }
29833 if (loc->dw_loc_opc == DW_OP_GNU_variable_value)
29834 {
29835 if (prev == NULL
29836 && loc->dw_loc_next == NULL
29837 && AT_class (a) == dw_val_class_loc)
29838 switch (a->dw_attr)
29839 {
29840 /* Following attributes allow both exprloc and reference,
29841 so if the whole expression is DW_OP_GNU_variable_value
29842 alone we could transform it into reference. */
29843 case DW_AT_byte_size:
29844 case DW_AT_bit_size:
29845 case DW_AT_lower_bound:
29846 case DW_AT_upper_bound:
29847 case DW_AT_bit_stride:
29848 case DW_AT_count:
29849 case DW_AT_allocated:
29850 case DW_AT_associated:
29851 case DW_AT_byte_stride:
29852 a->dw_attr_val.val_class = dw_val_class_die_ref;
29853 a->dw_attr_val.val_entry = NULL;
29854 a->dw_attr_val.v.val_die_ref.die
29855 = loc->dw_loc_oprnd1.v.val_die_ref.die;
29856 a->dw_attr_val.v.val_die_ref.external = 0;
29857 return true;
29858 default:
29859 break;
29860 }
29861 if (dwarf_strict)
29862 return false;
29863 }
29864 break;
29865 case DW_OP_const_type:
29866 case DW_OP_regval_type:
29867 case DW_OP_deref_type:
29868 case DW_OP_convert:
29869 case DW_OP_reinterpret:
29870 case DW_OP_GNU_const_type:
29871 case DW_OP_GNU_regval_type:
29872 case DW_OP_GNU_deref_type:
29873 case DW_OP_GNU_convert:
29874 case DW_OP_GNU_reinterpret:
29875 while (loc->dw_loc_next
29876 && (loc->dw_loc_next->dw_loc_opc == DW_OP_convert
29877 || loc->dw_loc_next->dw_loc_opc == DW_OP_GNU_convert))
29878 {
29879 dw_die_ref base1, base2;
29880 unsigned enc1, enc2, size1, size2;
29881 if (loc->dw_loc_opc == DW_OP_regval_type
29882 || loc->dw_loc_opc == DW_OP_deref_type
29883 || loc->dw_loc_opc == DW_OP_GNU_regval_type
29884 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
29885 base1 = loc->dw_loc_oprnd2.v.val_die_ref.die;
29886 else if (loc->dw_loc_oprnd1.val_class
29887 == dw_val_class_unsigned_const)
29888 break;
29889 else
29890 base1 = loc->dw_loc_oprnd1.v.val_die_ref.die;
29891 if (loc->dw_loc_next->dw_loc_oprnd1.val_class
29892 == dw_val_class_unsigned_const)
29893 break;
29894 base2 = loc->dw_loc_next->dw_loc_oprnd1.v.val_die_ref.die;
29895 gcc_assert (base1->die_tag == DW_TAG_base_type
29896 && base2->die_tag == DW_TAG_base_type);
29897 enc1 = get_AT_unsigned (base1, DW_AT_encoding);
29898 enc2 = get_AT_unsigned (base2, DW_AT_encoding);
29899 size1 = get_AT_unsigned (base1, DW_AT_byte_size);
29900 size2 = get_AT_unsigned (base2, DW_AT_byte_size);
29901 if (size1 == size2
29902 && (((enc1 == DW_ATE_unsigned || enc1 == DW_ATE_signed)
29903 && (enc2 == DW_ATE_unsigned || enc2 == DW_ATE_signed)
29904 && loc != keep)
29905 || enc1 == enc2))
29906 {
29907 /* Optimize away next DW_OP_convert after
29908 adjusting LOC's base type die reference. */
29909 if (loc->dw_loc_opc == DW_OP_regval_type
29910 || loc->dw_loc_opc == DW_OP_deref_type
29911 || loc->dw_loc_opc == DW_OP_GNU_regval_type
29912 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
29913 loc->dw_loc_oprnd2.v.val_die_ref.die = base2;
29914 else
29915 loc->dw_loc_oprnd1.v.val_die_ref.die = base2;
29916 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
29917 continue;
29918 }
29919 /* Don't change integer DW_OP_convert after e.g. floating
29920 point typed stack entry. */
29921 else if (enc1 != DW_ATE_unsigned && enc1 != DW_ATE_signed)
29922 keep = loc->dw_loc_next;
29923 break;
29924 }
29925 break;
29926 default:
29927 break;
29928 }
29929 return true;
29930 }
29931
29932 /* Helper function of resolve_addr. DIE had DW_AT_location of
29933 DW_OP_addr alone, which referred to DECL in DW_OP_addr's operand
29934 and DW_OP_addr couldn't be resolved. resolve_addr has already
29935 removed the DW_AT_location attribute. This function attempts to
29936 add a new DW_AT_location attribute with DW_OP_implicit_pointer
29937 to it or DW_AT_const_value attribute, if possible. */
29938
29939 static void
29940 optimize_location_into_implicit_ptr (dw_die_ref die, tree decl)
29941 {
29942 if (!VAR_P (decl)
29943 || lookup_decl_die (decl) != die
29944 || DECL_EXTERNAL (decl)
29945 || !TREE_STATIC (decl)
29946 || DECL_INITIAL (decl) == NULL_TREE
29947 || DECL_P (DECL_INITIAL (decl))
29948 || get_AT (die, DW_AT_const_value))
29949 return;
29950
29951 tree init = DECL_INITIAL (decl);
29952 HOST_WIDE_INT offset = 0;
29953 /* For variables that have been optimized away and thus
29954 don't have a memory location, see if we can emit
29955 DW_AT_const_value instead. */
29956 if (tree_add_const_value_attribute (die, init))
29957 return;
29958 if (dwarf_strict && dwarf_version < 5)
29959 return;
29960 /* If init is ADDR_EXPR or POINTER_PLUS_EXPR of ADDR_EXPR,
29961 and ADDR_EXPR refers to a decl that has DW_AT_location or
29962 DW_AT_const_value (but isn't addressable, otherwise
29963 resolving the original DW_OP_addr wouldn't fail), see if
29964 we can add DW_OP_implicit_pointer. */
29965 STRIP_NOPS (init);
29966 if (TREE_CODE (init) == POINTER_PLUS_EXPR
29967 && tree_fits_shwi_p (TREE_OPERAND (init, 1)))
29968 {
29969 offset = tree_to_shwi (TREE_OPERAND (init, 1));
29970 init = TREE_OPERAND (init, 0);
29971 STRIP_NOPS (init);
29972 }
29973 if (TREE_CODE (init) != ADDR_EXPR)
29974 return;
29975 if ((TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST
29976 && !TREE_ASM_WRITTEN (TREE_OPERAND (init, 0)))
29977 || (TREE_CODE (TREE_OPERAND (init, 0)) == VAR_DECL
29978 && !DECL_EXTERNAL (TREE_OPERAND (init, 0))
29979 && TREE_OPERAND (init, 0) != decl))
29980 {
29981 dw_die_ref ref;
29982 dw_loc_descr_ref l;
29983
29984 if (TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST)
29985 {
29986 rtx rtl = string_cst_pool_decl (TREE_OPERAND (init, 0));
29987 if (!rtl)
29988 return;
29989 decl = SYMBOL_REF_DECL (rtl);
29990 }
29991 else
29992 decl = TREE_OPERAND (init, 0);
29993 ref = lookup_decl_die (decl);
29994 if (ref == NULL
29995 || (!get_AT (ref, DW_AT_location)
29996 && !get_AT (ref, DW_AT_const_value)))
29997 return;
29998 l = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
29999 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30000 l->dw_loc_oprnd1.v.val_die_ref.die = ref;
30001 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
30002 add_AT_loc (die, DW_AT_location, l);
30003 }
30004 }
30005
30006 /* Return NULL if l is a DWARF expression, or first op that is not
30007 valid DWARF expression. */
30008
30009 static dw_loc_descr_ref
30010 non_dwarf_expression (dw_loc_descr_ref l)
30011 {
30012 while (l)
30013 {
30014 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
30015 return l;
30016 switch (l->dw_loc_opc)
30017 {
30018 case DW_OP_regx:
30019 case DW_OP_implicit_value:
30020 case DW_OP_stack_value:
30021 case DW_OP_implicit_pointer:
30022 case DW_OP_GNU_implicit_pointer:
30023 case DW_OP_GNU_parameter_ref:
30024 case DW_OP_piece:
30025 case DW_OP_bit_piece:
30026 return l;
30027 default:
30028 break;
30029 }
30030 l = l->dw_loc_next;
30031 }
30032 return NULL;
30033 }
30034
30035 /* Return adjusted copy of EXPR:
30036 If it is empty DWARF expression, return it.
30037 If it is valid non-empty DWARF expression,
30038 return copy of EXPR with DW_OP_deref appended to it.
30039 If it is DWARF expression followed by DW_OP_reg{N,x}, return
30040 copy of the DWARF expression with DW_OP_breg{N,x} <0> appended.
30041 If it is DWARF expression followed by DW_OP_stack_value, return
30042 copy of the DWARF expression without anything appended.
30043 Otherwise, return NULL. */
30044
30045 static dw_loc_descr_ref
30046 copy_deref_exprloc (dw_loc_descr_ref expr)
30047 {
30048 dw_loc_descr_ref tail = NULL;
30049
30050 if (expr == NULL)
30051 return NULL;
30052
30053 dw_loc_descr_ref l = non_dwarf_expression (expr);
30054 if (l && l->dw_loc_next)
30055 return NULL;
30056
30057 if (l)
30058 {
30059 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
30060 tail = new_loc_descr ((enum dwarf_location_atom)
30061 (DW_OP_breg0 + (l->dw_loc_opc - DW_OP_reg0)),
30062 0, 0);
30063 else
30064 switch (l->dw_loc_opc)
30065 {
30066 case DW_OP_regx:
30067 tail = new_loc_descr (DW_OP_bregx,
30068 l->dw_loc_oprnd1.v.val_unsigned, 0);
30069 break;
30070 case DW_OP_stack_value:
30071 break;
30072 default:
30073 return NULL;
30074 }
30075 }
30076 else
30077 tail = new_loc_descr (DW_OP_deref, 0, 0);
30078
30079 dw_loc_descr_ref ret = NULL, *p = &ret;
30080 while (expr != l)
30081 {
30082 *p = new_loc_descr (expr->dw_loc_opc, 0, 0);
30083 (*p)->dw_loc_oprnd1 = expr->dw_loc_oprnd1;
30084 (*p)->dw_loc_oprnd2 = expr->dw_loc_oprnd2;
30085 p = &(*p)->dw_loc_next;
30086 expr = expr->dw_loc_next;
30087 }
30088 *p = tail;
30089 return ret;
30090 }
30091
30092 /* For DW_AT_string_length attribute with DW_OP_GNU_variable_value
30093 reference to a variable or argument, adjust it if needed and return:
30094 -1 if the DW_AT_string_length attribute and DW_AT_{string_length_,}byte_size
30095 attribute if present should be removed
30096 0 keep the attribute perhaps with minor modifications, no need to rescan
30097 1 if the attribute has been successfully adjusted. */
30098
30099 static int
30100 optimize_string_length (dw_attr_node *a)
30101 {
30102 dw_loc_descr_ref l = AT_loc (a), lv;
30103 dw_die_ref die;
30104 if (l->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
30105 {
30106 tree decl = l->dw_loc_oprnd1.v.val_decl_ref;
30107 die = lookup_decl_die (decl);
30108 if (die)
30109 {
30110 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30111 l->dw_loc_oprnd1.v.val_die_ref.die = die;
30112 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
30113 }
30114 else
30115 return -1;
30116 }
30117 else
30118 die = l->dw_loc_oprnd1.v.val_die_ref.die;
30119
30120 /* DWARF5 allows reference class, so we can then reference the DIE.
30121 Only do this for DW_OP_GNU_variable_value DW_OP_stack_value. */
30122 if (l->dw_loc_next != NULL && dwarf_version >= 5)
30123 {
30124 a->dw_attr_val.val_class = dw_val_class_die_ref;
30125 a->dw_attr_val.val_entry = NULL;
30126 a->dw_attr_val.v.val_die_ref.die = die;
30127 a->dw_attr_val.v.val_die_ref.external = 0;
30128 return 0;
30129 }
30130
30131 dw_attr_node *av = get_AT (die, DW_AT_location);
30132 dw_loc_list_ref d;
30133 bool non_dwarf_expr = false;
30134
30135 if (av == NULL)
30136 return dwarf_strict ? -1 : 0;
30137 switch (AT_class (av))
30138 {
30139 case dw_val_class_loc_list:
30140 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30141 if (d->expr && non_dwarf_expression (d->expr))
30142 non_dwarf_expr = true;
30143 break;
30144 case dw_val_class_view_list:
30145 gcc_unreachable ();
30146 case dw_val_class_loc:
30147 lv = AT_loc (av);
30148 if (lv == NULL)
30149 return dwarf_strict ? -1 : 0;
30150 if (non_dwarf_expression (lv))
30151 non_dwarf_expr = true;
30152 break;
30153 default:
30154 return dwarf_strict ? -1 : 0;
30155 }
30156
30157 /* If it is safe to transform DW_OP_GNU_variable_value DW_OP_stack_value
30158 into DW_OP_call4 or DW_OP_GNU_variable_value into
30159 DW_OP_call4 DW_OP_deref, do so. */
30160 if (!non_dwarf_expr
30161 && (l->dw_loc_next != NULL || AT_class (av) == dw_val_class_loc))
30162 {
30163 l->dw_loc_opc = DW_OP_call4;
30164 if (l->dw_loc_next)
30165 l->dw_loc_next = NULL;
30166 else
30167 l->dw_loc_next = new_loc_descr (DW_OP_deref, 0, 0);
30168 return 0;
30169 }
30170
30171 /* For DW_OP_GNU_variable_value DW_OP_stack_value, we can just
30172 copy over the DW_AT_location attribute from die to a. */
30173 if (l->dw_loc_next != NULL)
30174 {
30175 a->dw_attr_val = av->dw_attr_val;
30176 return 1;
30177 }
30178
30179 dw_loc_list_ref list, *p;
30180 switch (AT_class (av))
30181 {
30182 case dw_val_class_loc_list:
30183 p = &list;
30184 list = NULL;
30185 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30186 {
30187 lv = copy_deref_exprloc (d->expr);
30188 if (lv)
30189 {
30190 *p = new_loc_list (lv, d->begin, d->vbegin, d->end, d->vend, d->section);
30191 p = &(*p)->dw_loc_next;
30192 }
30193 else if (!dwarf_strict && d->expr)
30194 return 0;
30195 }
30196 if (list == NULL)
30197 return dwarf_strict ? -1 : 0;
30198 a->dw_attr_val.val_class = dw_val_class_loc_list;
30199 gen_llsym (list);
30200 *AT_loc_list_ptr (a) = list;
30201 return 1;
30202 case dw_val_class_loc:
30203 lv = copy_deref_exprloc (AT_loc (av));
30204 if (lv == NULL)
30205 return dwarf_strict ? -1 : 0;
30206 a->dw_attr_val.v.val_loc = lv;
30207 return 1;
30208 default:
30209 gcc_unreachable ();
30210 }
30211 }
30212
30213 /* Resolve DW_OP_addr and DW_AT_const_value CONST_STRING arguments to
30214 an address in .rodata section if the string literal is emitted there,
30215 or remove the containing location list or replace DW_AT_const_value
30216 with DW_AT_location and empty location expression, if it isn't found
30217 in .rodata. Similarly for SYMBOL_REFs, keep only those that refer
30218 to something that has been emitted in the current CU. */
30219
30220 static void
30221 resolve_addr (dw_die_ref die)
30222 {
30223 dw_die_ref c;
30224 dw_attr_node *a;
30225 dw_loc_list_ref *curr, *start, loc;
30226 unsigned ix;
30227 bool remove_AT_byte_size = false;
30228
30229 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30230 switch (AT_class (a))
30231 {
30232 case dw_val_class_loc_list:
30233 start = curr = AT_loc_list_ptr (a);
30234 loc = *curr;
30235 gcc_assert (loc);
30236 /* The same list can be referenced more than once. See if we have
30237 already recorded the result from a previous pass. */
30238 if (loc->replaced)
30239 *curr = loc->dw_loc_next;
30240 else if (!loc->resolved_addr)
30241 {
30242 /* As things stand, we do not expect or allow one die to
30243 reference a suffix of another die's location list chain.
30244 References must be identical or completely separate.
30245 There is therefore no need to cache the result of this
30246 pass on any list other than the first; doing so
30247 would lead to unnecessary writes. */
30248 while (*curr)
30249 {
30250 gcc_assert (!(*curr)->replaced && !(*curr)->resolved_addr);
30251 if (!resolve_addr_in_expr (a, (*curr)->expr))
30252 {
30253 dw_loc_list_ref next = (*curr)->dw_loc_next;
30254 dw_loc_descr_ref l = (*curr)->expr;
30255
30256 if (next && (*curr)->ll_symbol)
30257 {
30258 gcc_assert (!next->ll_symbol);
30259 next->ll_symbol = (*curr)->ll_symbol;
30260 next->vl_symbol = (*curr)->vl_symbol;
30261 }
30262 if (dwarf_split_debug_info)
30263 remove_loc_list_addr_table_entries (l);
30264 *curr = next;
30265 }
30266 else
30267 {
30268 mark_base_types ((*curr)->expr);
30269 curr = &(*curr)->dw_loc_next;
30270 }
30271 }
30272 if (loc == *start)
30273 loc->resolved_addr = 1;
30274 else
30275 {
30276 loc->replaced = 1;
30277 loc->dw_loc_next = *start;
30278 }
30279 }
30280 if (!*start)
30281 {
30282 remove_AT (die, a->dw_attr);
30283 ix--;
30284 }
30285 break;
30286 case dw_val_class_view_list:
30287 {
30288 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
30289 gcc_checking_assert (dwarf2out_locviews_in_attribute ());
30290 dw_val_node *llnode
30291 = view_list_to_loc_list_val_node (&a->dw_attr_val);
30292 /* If we no longer have a loclist, or it no longer needs
30293 views, drop this attribute. */
30294 if (!llnode || !llnode->v.val_loc_list->vl_symbol)
30295 {
30296 remove_AT (die, a->dw_attr);
30297 ix--;
30298 }
30299 break;
30300 }
30301 case dw_val_class_loc:
30302 {
30303 dw_loc_descr_ref l = AT_loc (a);
30304 /* DW_OP_GNU_variable_value DW_OP_stack_value or
30305 DW_OP_GNU_variable_value in DW_AT_string_length can be converted
30306 into DW_OP_call4 or DW_OP_call4 DW_OP_deref, which is standard
30307 DWARF4 unlike DW_OP_GNU_variable_value. Or for DWARF5
30308 DW_OP_GNU_variable_value DW_OP_stack_value can be replaced
30309 with DW_FORM_ref referencing the same DIE as
30310 DW_OP_GNU_variable_value used to reference. */
30311 if (a->dw_attr == DW_AT_string_length
30312 && l
30313 && l->dw_loc_opc == DW_OP_GNU_variable_value
30314 && (l->dw_loc_next == NULL
30315 || (l->dw_loc_next->dw_loc_next == NULL
30316 && l->dw_loc_next->dw_loc_opc == DW_OP_stack_value)))
30317 {
30318 switch (optimize_string_length (a))
30319 {
30320 case -1:
30321 remove_AT (die, a->dw_attr);
30322 ix--;
30323 /* If we drop DW_AT_string_length, we need to drop also
30324 DW_AT_{string_length_,}byte_size. */
30325 remove_AT_byte_size = true;
30326 continue;
30327 default:
30328 break;
30329 case 1:
30330 /* Even if we keep the optimized DW_AT_string_length,
30331 it might have changed AT_class, so process it again. */
30332 ix--;
30333 continue;
30334 }
30335 }
30336 /* For -gdwarf-2 don't attempt to optimize
30337 DW_AT_data_member_location containing
30338 DW_OP_plus_uconst - older consumers might
30339 rely on it being that op instead of a more complex,
30340 but shorter, location description. */
30341 if ((dwarf_version > 2
30342 || a->dw_attr != DW_AT_data_member_location
30343 || l == NULL
30344 || l->dw_loc_opc != DW_OP_plus_uconst
30345 || l->dw_loc_next != NULL)
30346 && !resolve_addr_in_expr (a, l))
30347 {
30348 if (dwarf_split_debug_info)
30349 remove_loc_list_addr_table_entries (l);
30350 if (l != NULL
30351 && l->dw_loc_next == NULL
30352 && l->dw_loc_opc == DW_OP_addr
30353 && GET_CODE (l->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF
30354 && SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr)
30355 && a->dw_attr == DW_AT_location)
30356 {
30357 tree decl = SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr);
30358 remove_AT (die, a->dw_attr);
30359 ix--;
30360 optimize_location_into_implicit_ptr (die, decl);
30361 break;
30362 }
30363 if (a->dw_attr == DW_AT_string_length)
30364 /* If we drop DW_AT_string_length, we need to drop also
30365 DW_AT_{string_length_,}byte_size. */
30366 remove_AT_byte_size = true;
30367 remove_AT (die, a->dw_attr);
30368 ix--;
30369 }
30370 else
30371 mark_base_types (l);
30372 }
30373 break;
30374 case dw_val_class_addr:
30375 if (a->dw_attr == DW_AT_const_value
30376 && !resolve_one_addr (&a->dw_attr_val.v.val_addr))
30377 {
30378 if (AT_index (a) != NOT_INDEXED)
30379 remove_addr_table_entry (a->dw_attr_val.val_entry);
30380 remove_AT (die, a->dw_attr);
30381 ix--;
30382 }
30383 if ((die->die_tag == DW_TAG_call_site
30384 && a->dw_attr == DW_AT_call_origin)
30385 || (die->die_tag == DW_TAG_GNU_call_site
30386 && a->dw_attr == DW_AT_abstract_origin))
30387 {
30388 tree tdecl = SYMBOL_REF_DECL (a->dw_attr_val.v.val_addr);
30389 dw_die_ref tdie = lookup_decl_die (tdecl);
30390 dw_die_ref cdie;
30391 if (tdie == NULL
30392 && DECL_EXTERNAL (tdecl)
30393 && DECL_ABSTRACT_ORIGIN (tdecl) == NULL_TREE
30394 && (cdie = lookup_context_die (DECL_CONTEXT (tdecl))))
30395 {
30396 dw_die_ref pdie = cdie;
30397 /* Make sure we don't add these DIEs into type units.
30398 We could emit skeleton DIEs for context (namespaces,
30399 outer structs/classes) and a skeleton DIE for the
30400 innermost context with DW_AT_signature pointing to the
30401 type unit. See PR78835. */
30402 while (pdie && pdie->die_tag != DW_TAG_type_unit)
30403 pdie = pdie->die_parent;
30404 if (pdie == NULL)
30405 {
30406 /* Creating a full DIE for tdecl is overly expensive and
30407 at this point even wrong when in the LTO phase
30408 as it can end up generating new type DIEs we didn't
30409 output and thus optimize_external_refs will crash. */
30410 tdie = new_die (DW_TAG_subprogram, cdie, NULL_TREE);
30411 add_AT_flag (tdie, DW_AT_external, 1);
30412 add_AT_flag (tdie, DW_AT_declaration, 1);
30413 add_linkage_attr (tdie, tdecl);
30414 add_name_and_src_coords_attributes (tdie, tdecl, true);
30415 equate_decl_number_to_die (tdecl, tdie);
30416 }
30417 }
30418 if (tdie)
30419 {
30420 a->dw_attr_val.val_class = dw_val_class_die_ref;
30421 a->dw_attr_val.v.val_die_ref.die = tdie;
30422 a->dw_attr_val.v.val_die_ref.external = 0;
30423 }
30424 else
30425 {
30426 if (AT_index (a) != NOT_INDEXED)
30427 remove_addr_table_entry (a->dw_attr_val.val_entry);
30428 remove_AT (die, a->dw_attr);
30429 ix--;
30430 }
30431 }
30432 break;
30433 default:
30434 break;
30435 }
30436
30437 if (remove_AT_byte_size)
30438 remove_AT (die, dwarf_version >= 5
30439 ? DW_AT_string_length_byte_size
30440 : DW_AT_byte_size);
30441
30442 FOR_EACH_CHILD (die, c, resolve_addr (c));
30443 }
30444 \f
30445 /* Helper routines for optimize_location_lists.
30446 This pass tries to share identical local lists in .debug_loc
30447 section. */
30448
30449 /* Iteratively hash operands of LOC opcode into HSTATE. */
30450
30451 static void
30452 hash_loc_operands (dw_loc_descr_ref loc, inchash::hash &hstate)
30453 {
30454 dw_val_ref val1 = &loc->dw_loc_oprnd1;
30455 dw_val_ref val2 = &loc->dw_loc_oprnd2;
30456
30457 switch (loc->dw_loc_opc)
30458 {
30459 case DW_OP_const4u:
30460 case DW_OP_const8u:
30461 if (loc->dtprel)
30462 goto hash_addr;
30463 /* FALLTHRU */
30464 case DW_OP_const1u:
30465 case DW_OP_const1s:
30466 case DW_OP_const2u:
30467 case DW_OP_const2s:
30468 case DW_OP_const4s:
30469 case DW_OP_const8s:
30470 case DW_OP_constu:
30471 case DW_OP_consts:
30472 case DW_OP_pick:
30473 case DW_OP_plus_uconst:
30474 case DW_OP_breg0:
30475 case DW_OP_breg1:
30476 case DW_OP_breg2:
30477 case DW_OP_breg3:
30478 case DW_OP_breg4:
30479 case DW_OP_breg5:
30480 case DW_OP_breg6:
30481 case DW_OP_breg7:
30482 case DW_OP_breg8:
30483 case DW_OP_breg9:
30484 case DW_OP_breg10:
30485 case DW_OP_breg11:
30486 case DW_OP_breg12:
30487 case DW_OP_breg13:
30488 case DW_OP_breg14:
30489 case DW_OP_breg15:
30490 case DW_OP_breg16:
30491 case DW_OP_breg17:
30492 case DW_OP_breg18:
30493 case DW_OP_breg19:
30494 case DW_OP_breg20:
30495 case DW_OP_breg21:
30496 case DW_OP_breg22:
30497 case DW_OP_breg23:
30498 case DW_OP_breg24:
30499 case DW_OP_breg25:
30500 case DW_OP_breg26:
30501 case DW_OP_breg27:
30502 case DW_OP_breg28:
30503 case DW_OP_breg29:
30504 case DW_OP_breg30:
30505 case DW_OP_breg31:
30506 case DW_OP_regx:
30507 case DW_OP_fbreg:
30508 case DW_OP_piece:
30509 case DW_OP_deref_size:
30510 case DW_OP_xderef_size:
30511 hstate.add_object (val1->v.val_int);
30512 break;
30513 case DW_OP_skip:
30514 case DW_OP_bra:
30515 {
30516 int offset;
30517
30518 gcc_assert (val1->val_class == dw_val_class_loc);
30519 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
30520 hstate.add_object (offset);
30521 }
30522 break;
30523 case DW_OP_implicit_value:
30524 hstate.add_object (val1->v.val_unsigned);
30525 switch (val2->val_class)
30526 {
30527 case dw_val_class_const:
30528 hstate.add_object (val2->v.val_int);
30529 break;
30530 case dw_val_class_vec:
30531 {
30532 unsigned int elt_size = val2->v.val_vec.elt_size;
30533 unsigned int len = val2->v.val_vec.length;
30534
30535 hstate.add_int (elt_size);
30536 hstate.add_int (len);
30537 hstate.add (val2->v.val_vec.array, len * elt_size);
30538 }
30539 break;
30540 case dw_val_class_const_double:
30541 hstate.add_object (val2->v.val_double.low);
30542 hstate.add_object (val2->v.val_double.high);
30543 break;
30544 case dw_val_class_wide_int:
30545 hstate.add (val2->v.val_wide->get_val (),
30546 get_full_len (*val2->v.val_wide)
30547 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30548 break;
30549 case dw_val_class_addr:
30550 inchash::add_rtx (val2->v.val_addr, hstate);
30551 break;
30552 default:
30553 gcc_unreachable ();
30554 }
30555 break;
30556 case DW_OP_bregx:
30557 case DW_OP_bit_piece:
30558 hstate.add_object (val1->v.val_int);
30559 hstate.add_object (val2->v.val_int);
30560 break;
30561 case DW_OP_addr:
30562 hash_addr:
30563 if (loc->dtprel)
30564 {
30565 unsigned char dtprel = 0xd1;
30566 hstate.add_object (dtprel);
30567 }
30568 inchash::add_rtx (val1->v.val_addr, hstate);
30569 break;
30570 case DW_OP_GNU_addr_index:
30571 case DW_OP_addrx:
30572 case DW_OP_GNU_const_index:
30573 case DW_OP_constx:
30574 {
30575 if (loc->dtprel)
30576 {
30577 unsigned char dtprel = 0xd1;
30578 hstate.add_object (dtprel);
30579 }
30580 inchash::add_rtx (val1->val_entry->addr.rtl, hstate);
30581 }
30582 break;
30583 case DW_OP_implicit_pointer:
30584 case DW_OP_GNU_implicit_pointer:
30585 hstate.add_int (val2->v.val_int);
30586 break;
30587 case DW_OP_entry_value:
30588 case DW_OP_GNU_entry_value:
30589 hstate.add_object (val1->v.val_loc);
30590 break;
30591 case DW_OP_regval_type:
30592 case DW_OP_deref_type:
30593 case DW_OP_GNU_regval_type:
30594 case DW_OP_GNU_deref_type:
30595 {
30596 unsigned int byte_size
30597 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_byte_size);
30598 unsigned int encoding
30599 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_encoding);
30600 hstate.add_object (val1->v.val_int);
30601 hstate.add_object (byte_size);
30602 hstate.add_object (encoding);
30603 }
30604 break;
30605 case DW_OP_convert:
30606 case DW_OP_reinterpret:
30607 case DW_OP_GNU_convert:
30608 case DW_OP_GNU_reinterpret:
30609 if (val1->val_class == dw_val_class_unsigned_const)
30610 {
30611 hstate.add_object (val1->v.val_unsigned);
30612 break;
30613 }
30614 /* FALLTHRU */
30615 case DW_OP_const_type:
30616 case DW_OP_GNU_const_type:
30617 {
30618 unsigned int byte_size
30619 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_byte_size);
30620 unsigned int encoding
30621 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_encoding);
30622 hstate.add_object (byte_size);
30623 hstate.add_object (encoding);
30624 if (loc->dw_loc_opc != DW_OP_const_type
30625 && loc->dw_loc_opc != DW_OP_GNU_const_type)
30626 break;
30627 hstate.add_object (val2->val_class);
30628 switch (val2->val_class)
30629 {
30630 case dw_val_class_const:
30631 hstate.add_object (val2->v.val_int);
30632 break;
30633 case dw_val_class_vec:
30634 {
30635 unsigned int elt_size = val2->v.val_vec.elt_size;
30636 unsigned int len = val2->v.val_vec.length;
30637
30638 hstate.add_object (elt_size);
30639 hstate.add_object (len);
30640 hstate.add (val2->v.val_vec.array, len * elt_size);
30641 }
30642 break;
30643 case dw_val_class_const_double:
30644 hstate.add_object (val2->v.val_double.low);
30645 hstate.add_object (val2->v.val_double.high);
30646 break;
30647 case dw_val_class_wide_int:
30648 hstate.add (val2->v.val_wide->get_val (),
30649 get_full_len (*val2->v.val_wide)
30650 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30651 break;
30652 default:
30653 gcc_unreachable ();
30654 }
30655 }
30656 break;
30657
30658 default:
30659 /* Other codes have no operands. */
30660 break;
30661 }
30662 }
30663
30664 /* Iteratively hash the whole DWARF location expression LOC into HSTATE. */
30665
30666 static inline void
30667 hash_locs (dw_loc_descr_ref loc, inchash::hash &hstate)
30668 {
30669 dw_loc_descr_ref l;
30670 bool sizes_computed = false;
30671 /* Compute sizes, so that DW_OP_skip/DW_OP_bra can be checksummed. */
30672 size_of_locs (loc);
30673
30674 for (l = loc; l != NULL; l = l->dw_loc_next)
30675 {
30676 enum dwarf_location_atom opc = l->dw_loc_opc;
30677 hstate.add_object (opc);
30678 if ((opc == DW_OP_skip || opc == DW_OP_bra) && !sizes_computed)
30679 {
30680 size_of_locs (loc);
30681 sizes_computed = true;
30682 }
30683 hash_loc_operands (l, hstate);
30684 }
30685 }
30686
30687 /* Compute hash of the whole location list LIST_HEAD. */
30688
30689 static inline void
30690 hash_loc_list (dw_loc_list_ref list_head)
30691 {
30692 dw_loc_list_ref curr = list_head;
30693 inchash::hash hstate;
30694
30695 for (curr = list_head; curr != NULL; curr = curr->dw_loc_next)
30696 {
30697 hstate.add (curr->begin, strlen (curr->begin) + 1);
30698 hstate.add (curr->end, strlen (curr->end) + 1);
30699 hstate.add_object (curr->vbegin);
30700 hstate.add_object (curr->vend);
30701 if (curr->section)
30702 hstate.add (curr->section, strlen (curr->section) + 1);
30703 hash_locs (curr->expr, hstate);
30704 }
30705 list_head->hash = hstate.end ();
30706 }
30707
30708 /* Return true if X and Y opcodes have the same operands. */
30709
30710 static inline bool
30711 compare_loc_operands (dw_loc_descr_ref x, dw_loc_descr_ref y)
30712 {
30713 dw_val_ref valx1 = &x->dw_loc_oprnd1;
30714 dw_val_ref valx2 = &x->dw_loc_oprnd2;
30715 dw_val_ref valy1 = &y->dw_loc_oprnd1;
30716 dw_val_ref valy2 = &y->dw_loc_oprnd2;
30717
30718 switch (x->dw_loc_opc)
30719 {
30720 case DW_OP_const4u:
30721 case DW_OP_const8u:
30722 if (x->dtprel)
30723 goto hash_addr;
30724 /* FALLTHRU */
30725 case DW_OP_const1u:
30726 case DW_OP_const1s:
30727 case DW_OP_const2u:
30728 case DW_OP_const2s:
30729 case DW_OP_const4s:
30730 case DW_OP_const8s:
30731 case DW_OP_constu:
30732 case DW_OP_consts:
30733 case DW_OP_pick:
30734 case DW_OP_plus_uconst:
30735 case DW_OP_breg0:
30736 case DW_OP_breg1:
30737 case DW_OP_breg2:
30738 case DW_OP_breg3:
30739 case DW_OP_breg4:
30740 case DW_OP_breg5:
30741 case DW_OP_breg6:
30742 case DW_OP_breg7:
30743 case DW_OP_breg8:
30744 case DW_OP_breg9:
30745 case DW_OP_breg10:
30746 case DW_OP_breg11:
30747 case DW_OP_breg12:
30748 case DW_OP_breg13:
30749 case DW_OP_breg14:
30750 case DW_OP_breg15:
30751 case DW_OP_breg16:
30752 case DW_OP_breg17:
30753 case DW_OP_breg18:
30754 case DW_OP_breg19:
30755 case DW_OP_breg20:
30756 case DW_OP_breg21:
30757 case DW_OP_breg22:
30758 case DW_OP_breg23:
30759 case DW_OP_breg24:
30760 case DW_OP_breg25:
30761 case DW_OP_breg26:
30762 case DW_OP_breg27:
30763 case DW_OP_breg28:
30764 case DW_OP_breg29:
30765 case DW_OP_breg30:
30766 case DW_OP_breg31:
30767 case DW_OP_regx:
30768 case DW_OP_fbreg:
30769 case DW_OP_piece:
30770 case DW_OP_deref_size:
30771 case DW_OP_xderef_size:
30772 return valx1->v.val_int == valy1->v.val_int;
30773 case DW_OP_skip:
30774 case DW_OP_bra:
30775 /* If splitting debug info, the use of DW_OP_GNU_addr_index
30776 can cause irrelevant differences in dw_loc_addr. */
30777 gcc_assert (valx1->val_class == dw_val_class_loc
30778 && valy1->val_class == dw_val_class_loc
30779 && (dwarf_split_debug_info
30780 || x->dw_loc_addr == y->dw_loc_addr));
30781 return valx1->v.val_loc->dw_loc_addr == valy1->v.val_loc->dw_loc_addr;
30782 case DW_OP_implicit_value:
30783 if (valx1->v.val_unsigned != valy1->v.val_unsigned
30784 || valx2->val_class != valy2->val_class)
30785 return false;
30786 switch (valx2->val_class)
30787 {
30788 case dw_val_class_const:
30789 return valx2->v.val_int == valy2->v.val_int;
30790 case dw_val_class_vec:
30791 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
30792 && valx2->v.val_vec.length == valy2->v.val_vec.length
30793 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
30794 valx2->v.val_vec.elt_size
30795 * valx2->v.val_vec.length) == 0;
30796 case dw_val_class_const_double:
30797 return valx2->v.val_double.low == valy2->v.val_double.low
30798 && valx2->v.val_double.high == valy2->v.val_double.high;
30799 case dw_val_class_wide_int:
30800 return *valx2->v.val_wide == *valy2->v.val_wide;
30801 case dw_val_class_addr:
30802 return rtx_equal_p (valx2->v.val_addr, valy2->v.val_addr);
30803 default:
30804 gcc_unreachable ();
30805 }
30806 case DW_OP_bregx:
30807 case DW_OP_bit_piece:
30808 return valx1->v.val_int == valy1->v.val_int
30809 && valx2->v.val_int == valy2->v.val_int;
30810 case DW_OP_addr:
30811 hash_addr:
30812 return rtx_equal_p (valx1->v.val_addr, valy1->v.val_addr);
30813 case DW_OP_GNU_addr_index:
30814 case DW_OP_addrx:
30815 case DW_OP_GNU_const_index:
30816 case DW_OP_constx:
30817 {
30818 rtx ax1 = valx1->val_entry->addr.rtl;
30819 rtx ay1 = valy1->val_entry->addr.rtl;
30820 return rtx_equal_p (ax1, ay1);
30821 }
30822 case DW_OP_implicit_pointer:
30823 case DW_OP_GNU_implicit_pointer:
30824 return valx1->val_class == dw_val_class_die_ref
30825 && valx1->val_class == valy1->val_class
30826 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die
30827 && valx2->v.val_int == valy2->v.val_int;
30828 case DW_OP_entry_value:
30829 case DW_OP_GNU_entry_value:
30830 return compare_loc_operands (valx1->v.val_loc, valy1->v.val_loc);
30831 case DW_OP_const_type:
30832 case DW_OP_GNU_const_type:
30833 if (valx1->v.val_die_ref.die != valy1->v.val_die_ref.die
30834 || valx2->val_class != valy2->val_class)
30835 return false;
30836 switch (valx2->val_class)
30837 {
30838 case dw_val_class_const:
30839 return valx2->v.val_int == valy2->v.val_int;
30840 case dw_val_class_vec:
30841 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
30842 && valx2->v.val_vec.length == valy2->v.val_vec.length
30843 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
30844 valx2->v.val_vec.elt_size
30845 * valx2->v.val_vec.length) == 0;
30846 case dw_val_class_const_double:
30847 return valx2->v.val_double.low == valy2->v.val_double.low
30848 && valx2->v.val_double.high == valy2->v.val_double.high;
30849 case dw_val_class_wide_int:
30850 return *valx2->v.val_wide == *valy2->v.val_wide;
30851 default:
30852 gcc_unreachable ();
30853 }
30854 case DW_OP_regval_type:
30855 case DW_OP_deref_type:
30856 case DW_OP_GNU_regval_type:
30857 case DW_OP_GNU_deref_type:
30858 return valx1->v.val_int == valy1->v.val_int
30859 && valx2->v.val_die_ref.die == valy2->v.val_die_ref.die;
30860 case DW_OP_convert:
30861 case DW_OP_reinterpret:
30862 case DW_OP_GNU_convert:
30863 case DW_OP_GNU_reinterpret:
30864 if (valx1->val_class != valy1->val_class)
30865 return false;
30866 if (valx1->val_class == dw_val_class_unsigned_const)
30867 return valx1->v.val_unsigned == valy1->v.val_unsigned;
30868 return valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
30869 case DW_OP_GNU_parameter_ref:
30870 return valx1->val_class == dw_val_class_die_ref
30871 && valx1->val_class == valy1->val_class
30872 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
30873 default:
30874 /* Other codes have no operands. */
30875 return true;
30876 }
30877 }
30878
30879 /* Return true if DWARF location expressions X and Y are the same. */
30880
30881 static inline bool
30882 compare_locs (dw_loc_descr_ref x, dw_loc_descr_ref y)
30883 {
30884 for (; x != NULL && y != NULL; x = x->dw_loc_next, y = y->dw_loc_next)
30885 if (x->dw_loc_opc != y->dw_loc_opc
30886 || x->dtprel != y->dtprel
30887 || !compare_loc_operands (x, y))
30888 break;
30889 return x == NULL && y == NULL;
30890 }
30891
30892 /* Hashtable helpers. */
30893
30894 struct loc_list_hasher : nofree_ptr_hash <dw_loc_list_struct>
30895 {
30896 static inline hashval_t hash (const dw_loc_list_struct *);
30897 static inline bool equal (const dw_loc_list_struct *,
30898 const dw_loc_list_struct *);
30899 };
30900
30901 /* Return precomputed hash of location list X. */
30902
30903 inline hashval_t
30904 loc_list_hasher::hash (const dw_loc_list_struct *x)
30905 {
30906 return x->hash;
30907 }
30908
30909 /* Return true if location lists A and B are the same. */
30910
30911 inline bool
30912 loc_list_hasher::equal (const dw_loc_list_struct *a,
30913 const dw_loc_list_struct *b)
30914 {
30915 if (a == b)
30916 return 1;
30917 if (a->hash != b->hash)
30918 return 0;
30919 for (; a != NULL && b != NULL; a = a->dw_loc_next, b = b->dw_loc_next)
30920 if (strcmp (a->begin, b->begin) != 0
30921 || strcmp (a->end, b->end) != 0
30922 || (a->section == NULL) != (b->section == NULL)
30923 || (a->section && strcmp (a->section, b->section) != 0)
30924 || a->vbegin != b->vbegin || a->vend != b->vend
30925 || !compare_locs (a->expr, b->expr))
30926 break;
30927 return a == NULL && b == NULL;
30928 }
30929
30930 typedef hash_table<loc_list_hasher> loc_list_hash_type;
30931
30932
30933 /* Recursively optimize location lists referenced from DIE
30934 children and share them whenever possible. */
30935
30936 static void
30937 optimize_location_lists_1 (dw_die_ref die, loc_list_hash_type *htab)
30938 {
30939 dw_die_ref c;
30940 dw_attr_node *a;
30941 unsigned ix;
30942 dw_loc_list_struct **slot;
30943 bool drop_locviews = false;
30944 bool has_locviews = false;
30945
30946 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30947 if (AT_class (a) == dw_val_class_loc_list)
30948 {
30949 dw_loc_list_ref list = AT_loc_list (a);
30950 /* TODO: perform some optimizations here, before hashing
30951 it and storing into the hash table. */
30952 hash_loc_list (list);
30953 slot = htab->find_slot_with_hash (list, list->hash, INSERT);
30954 if (*slot == NULL)
30955 {
30956 *slot = list;
30957 if (loc_list_has_views (list))
30958 gcc_assert (list->vl_symbol);
30959 else if (list->vl_symbol)
30960 {
30961 drop_locviews = true;
30962 list->vl_symbol = NULL;
30963 }
30964 }
30965 else
30966 {
30967 if (list->vl_symbol && !(*slot)->vl_symbol)
30968 drop_locviews = true;
30969 a->dw_attr_val.v.val_loc_list = *slot;
30970 }
30971 }
30972 else if (AT_class (a) == dw_val_class_view_list)
30973 {
30974 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
30975 has_locviews = true;
30976 }
30977
30978
30979 if (drop_locviews && has_locviews)
30980 remove_AT (die, DW_AT_GNU_locviews);
30981
30982 FOR_EACH_CHILD (die, c, optimize_location_lists_1 (c, htab));
30983 }
30984
30985
30986 /* Recursively assign each location list a unique index into the debug_addr
30987 section. */
30988
30989 static void
30990 index_location_lists (dw_die_ref die)
30991 {
30992 dw_die_ref c;
30993 dw_attr_node *a;
30994 unsigned ix;
30995
30996 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30997 if (AT_class (a) == dw_val_class_loc_list)
30998 {
30999 dw_loc_list_ref list = AT_loc_list (a);
31000 dw_loc_list_ref curr;
31001 for (curr = list; curr != NULL; curr = curr->dw_loc_next)
31002 {
31003 /* Don't index an entry that has already been indexed
31004 or won't be output. Make sure skip_loc_list_entry doesn't
31005 call size_of_locs, because that might cause circular dependency,
31006 index_location_lists requiring address table indexes to be
31007 computed, but adding new indexes through add_addr_table_entry
31008 and address table index computation requiring no new additions
31009 to the hash table. In the rare case of DWARF[234] >= 64KB
31010 location expression, we'll just waste unused address table entry
31011 for it. */
31012 if (curr->begin_entry != NULL
31013 || skip_loc_list_entry (curr))
31014 continue;
31015
31016 curr->begin_entry
31017 = add_addr_table_entry (xstrdup (curr->begin), ate_kind_label);
31018 }
31019 }
31020
31021 FOR_EACH_CHILD (die, c, index_location_lists (c));
31022 }
31023
31024 /* Optimize location lists referenced from DIE
31025 children and share them whenever possible. */
31026
31027 static void
31028 optimize_location_lists (dw_die_ref die)
31029 {
31030 loc_list_hash_type htab (500);
31031 optimize_location_lists_1 (die, &htab);
31032 }
31033 \f
31034 /* Traverse the limbo die list, and add parent/child links. The only
31035 dies without parents that should be here are concrete instances of
31036 inline functions, and the comp_unit_die. We can ignore the comp_unit_die.
31037 For concrete instances, we can get the parent die from the abstract
31038 instance. */
31039
31040 static void
31041 flush_limbo_die_list (void)
31042 {
31043 limbo_die_node *node;
31044
31045 /* get_context_die calls force_decl_die, which can put new DIEs on the
31046 limbo list in LTO mode when nested functions are put in a different
31047 partition than that of their parent function. */
31048 while ((node = limbo_die_list))
31049 {
31050 dw_die_ref die = node->die;
31051 limbo_die_list = node->next;
31052
31053 if (die->die_parent == NULL)
31054 {
31055 dw_die_ref origin = get_AT_ref (die, DW_AT_abstract_origin);
31056
31057 if (origin && origin->die_parent)
31058 add_child_die (origin->die_parent, die);
31059 else if (is_cu_die (die))
31060 ;
31061 else if (seen_error ())
31062 /* It's OK to be confused by errors in the input. */
31063 add_child_die (comp_unit_die (), die);
31064 else
31065 {
31066 /* In certain situations, the lexical block containing a
31067 nested function can be optimized away, which results
31068 in the nested function die being orphaned. Likewise
31069 with the return type of that nested function. Force
31070 this to be a child of the containing function.
31071
31072 It may happen that even the containing function got fully
31073 inlined and optimized out. In that case we are lost and
31074 assign the empty child. This should not be big issue as
31075 the function is likely unreachable too. */
31076 gcc_assert (node->created_for);
31077
31078 if (DECL_P (node->created_for))
31079 origin = get_context_die (DECL_CONTEXT (node->created_for));
31080 else if (TYPE_P (node->created_for))
31081 origin = scope_die_for (node->created_for, comp_unit_die ());
31082 else
31083 origin = comp_unit_die ();
31084
31085 add_child_die (origin, die);
31086 }
31087 }
31088 }
31089 }
31090
31091 /* Reset DIEs so we can output them again. */
31092
31093 static void
31094 reset_dies (dw_die_ref die)
31095 {
31096 dw_die_ref c;
31097
31098 /* Remove stuff we re-generate. */
31099 die->die_mark = 0;
31100 die->die_offset = 0;
31101 die->die_abbrev = 0;
31102 remove_AT (die, DW_AT_sibling);
31103
31104 FOR_EACH_CHILD (die, c, reset_dies (c));
31105 }
31106
31107 /* Output stuff that dwarf requires at the end of every file,
31108 and generate the DWARF-2 debugging info. */
31109
31110 static void
31111 dwarf2out_finish (const char *)
31112 {
31113 comdat_type_node *ctnode;
31114 dw_die_ref main_comp_unit_die;
31115 unsigned char checksum[16];
31116 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
31117
31118 /* Flush out any latecomers to the limbo party. */
31119 flush_limbo_die_list ();
31120
31121 if (inline_entry_data_table)
31122 gcc_assert (inline_entry_data_table->elements () == 0);
31123
31124 if (flag_checking)
31125 {
31126 verify_die (comp_unit_die ());
31127 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31128 verify_die (node->die);
31129 }
31130
31131 /* We shouldn't have any symbols with delayed asm names for
31132 DIEs generated after early finish. */
31133 gcc_assert (deferred_asm_name == NULL);
31134
31135 gen_remaining_tmpl_value_param_die_attribute ();
31136
31137 if (flag_generate_lto || flag_generate_offload)
31138 {
31139 gcc_assert (flag_fat_lto_objects || flag_generate_offload);
31140
31141 /* Prune stuff so that dwarf2out_finish runs successfully
31142 for the fat part of the object. */
31143 reset_dies (comp_unit_die ());
31144 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31145 reset_dies (node->die);
31146
31147 hash_table<comdat_type_hasher> comdat_type_table (100);
31148 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31149 {
31150 comdat_type_node **slot
31151 = comdat_type_table.find_slot (ctnode, INSERT);
31152
31153 /* Don't reset types twice. */
31154 if (*slot != HTAB_EMPTY_ENTRY)
31155 continue;
31156
31157 /* Add a pointer to the line table for the main compilation unit
31158 so that the debugger can make sense of DW_AT_decl_file
31159 attributes. */
31160 if (debug_info_level >= DINFO_LEVEL_TERSE)
31161 reset_dies (ctnode->root_die);
31162
31163 *slot = ctnode;
31164 }
31165
31166 /* Reset die CU symbol so we don't output it twice. */
31167 comp_unit_die ()->die_id.die_symbol = NULL;
31168
31169 /* Remove DW_AT_macro and DW_AT_stmt_list from the early output. */
31170 remove_AT (comp_unit_die (), DW_AT_stmt_list);
31171 if (have_macinfo)
31172 remove_AT (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE);
31173
31174 /* Remove indirect string decisions. */
31175 debug_str_hash->traverse<void *, reset_indirect_string> (NULL);
31176 }
31177
31178 #if ENABLE_ASSERT_CHECKING
31179 {
31180 dw_die_ref die = comp_unit_die (), c;
31181 FOR_EACH_CHILD (die, c, gcc_assert (! c->die_mark));
31182 }
31183 #endif
31184 resolve_addr (comp_unit_die ());
31185 move_marked_base_types ();
31186
31187 /* Initialize sections and labels used for actual assembler output. */
31188 unsigned generation = init_sections_and_labels (false);
31189
31190 /* Traverse the DIE's and add sibling attributes to those DIE's that
31191 have children. */
31192 add_sibling_attributes (comp_unit_die ());
31193 limbo_die_node *node;
31194 for (node = cu_die_list; node; node = node->next)
31195 add_sibling_attributes (node->die);
31196 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31197 add_sibling_attributes (ctnode->root_die);
31198
31199 /* When splitting DWARF info, we put some attributes in the
31200 skeleton compile_unit DIE that remains in the .o, while
31201 most attributes go in the DWO compile_unit_die. */
31202 if (dwarf_split_debug_info)
31203 {
31204 limbo_die_node *cu;
31205 main_comp_unit_die = gen_compile_unit_die (NULL);
31206 if (dwarf_version >= 5)
31207 main_comp_unit_die->die_tag = DW_TAG_skeleton_unit;
31208 cu = limbo_die_list;
31209 gcc_assert (cu->die == main_comp_unit_die);
31210 limbo_die_list = limbo_die_list->next;
31211 cu->next = cu_die_list;
31212 cu_die_list = cu;
31213 }
31214 else
31215 main_comp_unit_die = comp_unit_die ();
31216
31217 /* Output a terminator label for the .text section. */
31218 switch_to_section (text_section);
31219 targetm.asm_out.internal_label (asm_out_file, TEXT_END_LABEL, 0);
31220 if (cold_text_section)
31221 {
31222 switch_to_section (cold_text_section);
31223 targetm.asm_out.internal_label (asm_out_file, COLD_END_LABEL, 0);
31224 }
31225
31226 /* We can only use the low/high_pc attributes if all of the code was
31227 in .text. */
31228 if (!have_multiple_function_sections
31229 || (dwarf_version < 3 && dwarf_strict))
31230 {
31231 /* Don't add if the CU has no associated code. */
31232 if (text_section_used)
31233 add_AT_low_high_pc (main_comp_unit_die, text_section_label,
31234 text_end_label, true);
31235 }
31236 else
31237 {
31238 unsigned fde_idx;
31239 dw_fde_ref fde;
31240 bool range_list_added = false;
31241
31242 if (text_section_used)
31243 add_ranges_by_labels (main_comp_unit_die, text_section_label,
31244 text_end_label, &range_list_added, true);
31245 if (cold_text_section_used)
31246 add_ranges_by_labels (main_comp_unit_die, cold_text_section_label,
31247 cold_end_label, &range_list_added, true);
31248
31249 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
31250 {
31251 if (DECL_IGNORED_P (fde->decl))
31252 continue;
31253 if (!fde->in_std_section)
31254 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_begin,
31255 fde->dw_fde_end, &range_list_added,
31256 true);
31257 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
31258 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_second_begin,
31259 fde->dw_fde_second_end, &range_list_added,
31260 true);
31261 }
31262
31263 if (range_list_added)
31264 {
31265 /* We need to give .debug_loc and .debug_ranges an appropriate
31266 "base address". Use zero so that these addresses become
31267 absolute. Historically, we've emitted the unexpected
31268 DW_AT_entry_pc instead of DW_AT_low_pc for this purpose.
31269 Emit both to give time for other tools to adapt. */
31270 add_AT_addr (main_comp_unit_die, DW_AT_low_pc, const0_rtx, true);
31271 if (! dwarf_strict && dwarf_version < 4)
31272 add_AT_addr (main_comp_unit_die, DW_AT_entry_pc, const0_rtx, true);
31273
31274 add_ranges (NULL);
31275 }
31276 }
31277
31278 /* AIX Assembler inserts the length, so adjust the reference to match the
31279 offset expected by debuggers. */
31280 strcpy (dl_section_ref, debug_line_section_label);
31281 if (XCOFF_DEBUGGING_INFO)
31282 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
31283
31284 if (debug_info_level >= DINFO_LEVEL_TERSE)
31285 add_AT_lineptr (main_comp_unit_die, DW_AT_stmt_list,
31286 dl_section_ref);
31287
31288 if (have_macinfo)
31289 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
31290 macinfo_section_label);
31291
31292 if (dwarf_split_debug_info)
31293 {
31294 if (have_location_lists)
31295 {
31296 if (dwarf_version >= 5)
31297 add_AT_loclistsptr (comp_unit_die (), DW_AT_loclists_base,
31298 loc_section_label);
31299 /* optimize_location_lists calculates the size of the lists,
31300 so index them first, and assign indices to the entries.
31301 Although optimize_location_lists will remove entries from
31302 the table, it only does so for duplicates, and therefore
31303 only reduces ref_counts to 1. */
31304 index_location_lists (comp_unit_die ());
31305 }
31306
31307 if (addr_index_table != NULL)
31308 {
31309 unsigned int index = 0;
31310 addr_index_table
31311 ->traverse_noresize<unsigned int *, index_addr_table_entry>
31312 (&index);
31313 }
31314 }
31315
31316 loc_list_idx = 0;
31317 if (have_location_lists)
31318 {
31319 optimize_location_lists (comp_unit_die ());
31320 /* And finally assign indexes to the entries for -gsplit-dwarf. */
31321 if (dwarf_version >= 5 && dwarf_split_debug_info)
31322 assign_location_list_indexes (comp_unit_die ());
31323 }
31324
31325 save_macinfo_strings ();
31326
31327 if (dwarf_split_debug_info)
31328 {
31329 unsigned int index = 0;
31330
31331 /* Add attributes common to skeleton compile_units and
31332 type_units. Because these attributes include strings, it
31333 must be done before freezing the string table. Top-level
31334 skeleton die attrs are added when the skeleton type unit is
31335 created, so ensure it is created by this point. */
31336 add_top_level_skeleton_die_attrs (main_comp_unit_die);
31337 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
31338 }
31339
31340 /* Output all of the compilation units. We put the main one last so that
31341 the offsets are available to output_pubnames. */
31342 for (node = cu_die_list; node; node = node->next)
31343 output_comp_unit (node->die, 0, NULL);
31344
31345 hash_table<comdat_type_hasher> comdat_type_table (100);
31346 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31347 {
31348 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
31349
31350 /* Don't output duplicate types. */
31351 if (*slot != HTAB_EMPTY_ENTRY)
31352 continue;
31353
31354 /* Add a pointer to the line table for the main compilation unit
31355 so that the debugger can make sense of DW_AT_decl_file
31356 attributes. */
31357 if (debug_info_level >= DINFO_LEVEL_TERSE)
31358 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
31359 (!dwarf_split_debug_info
31360 ? dl_section_ref
31361 : debug_skeleton_line_section_label));
31362
31363 output_comdat_type_unit (ctnode);
31364 *slot = ctnode;
31365 }
31366
31367 if (dwarf_split_debug_info)
31368 {
31369 int mark;
31370 struct md5_ctx ctx;
31371
31372 if (dwarf_version >= 5 && !vec_safe_is_empty (ranges_table))
31373 index_rnglists ();
31374
31375 /* Compute a checksum of the comp_unit to use as the dwo_id. */
31376 md5_init_ctx (&ctx);
31377 mark = 0;
31378 die_checksum (comp_unit_die (), &ctx, &mark);
31379 unmark_all_dies (comp_unit_die ());
31380 md5_finish_ctx (&ctx, checksum);
31381
31382 if (dwarf_version < 5)
31383 {
31384 /* Use the first 8 bytes of the checksum as the dwo_id,
31385 and add it to both comp-unit DIEs. */
31386 add_AT_data8 (main_comp_unit_die, DW_AT_GNU_dwo_id, checksum);
31387 add_AT_data8 (comp_unit_die (), DW_AT_GNU_dwo_id, checksum);
31388 }
31389
31390 /* Add the base offset of the ranges table to the skeleton
31391 comp-unit DIE. */
31392 if (!vec_safe_is_empty (ranges_table))
31393 {
31394 if (dwarf_version >= 5)
31395 add_AT_lineptr (main_comp_unit_die, DW_AT_rnglists_base,
31396 ranges_base_label);
31397 else
31398 add_AT_lineptr (main_comp_unit_die, DW_AT_GNU_ranges_base,
31399 ranges_section_label);
31400 }
31401
31402 switch_to_section (debug_addr_section);
31403 /* GNU DebugFission https://gcc.gnu.org/wiki/DebugFission
31404 which GCC uses to implement -gsplit-dwarf as DWARF GNU extension
31405 before DWARF5, didn't have a header for .debug_addr units.
31406 DWARF5 specifies a small header when address tables are used. */
31407 if (dwarf_version >= 5)
31408 {
31409 unsigned int last_idx = 0;
31410 unsigned long addrs_length;
31411
31412 addr_index_table->traverse_noresize
31413 <unsigned int *, count_index_addrs> (&last_idx);
31414 addrs_length = last_idx * DWARF2_ADDR_SIZE + 4;
31415
31416 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
31417 dw2_asm_output_data (4, 0xffffffff,
31418 "Escape value for 64-bit DWARF extension");
31419 dw2_asm_output_data (DWARF_OFFSET_SIZE, addrs_length,
31420 "Length of Address Unit");
31421 dw2_asm_output_data (2, 5, "DWARF addr version");
31422 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
31423 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
31424 }
31425 ASM_OUTPUT_LABEL (asm_out_file, debug_addr_section_label);
31426 output_addr_table ();
31427 }
31428
31429 /* Output the main compilation unit if non-empty or if .debug_macinfo
31430 or .debug_macro will be emitted. */
31431 output_comp_unit (comp_unit_die (), have_macinfo,
31432 dwarf_split_debug_info ? checksum : NULL);
31433
31434 if (dwarf_split_debug_info && info_section_emitted)
31435 output_skeleton_debug_sections (main_comp_unit_die, checksum);
31436
31437 /* Output the abbreviation table. */
31438 if (vec_safe_length (abbrev_die_table) != 1)
31439 {
31440 switch_to_section (debug_abbrev_section);
31441 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
31442 output_abbrev_section ();
31443 }
31444
31445 /* Output location list section if necessary. */
31446 if (have_location_lists)
31447 {
31448 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
31449 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
31450 /* Output the location lists info. */
31451 switch_to_section (debug_loc_section);
31452 if (dwarf_version >= 5)
31453 {
31454 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_LOC_SECTION_LABEL, 1);
31455 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_LOC_SECTION_LABEL, 2);
31456 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
31457 dw2_asm_output_data (4, 0xffffffff,
31458 "Initial length escape value indicating "
31459 "64-bit DWARF extension");
31460 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
31461 "Length of Location Lists");
31462 ASM_OUTPUT_LABEL (asm_out_file, l1);
31463 output_dwarf_version ();
31464 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
31465 dw2_asm_output_data (1, 0, "Segment Size");
31466 dw2_asm_output_data (4, dwarf_split_debug_info ? loc_list_idx : 0,
31467 "Offset Entry Count");
31468 }
31469 ASM_OUTPUT_LABEL (asm_out_file, loc_section_label);
31470 if (dwarf_version >= 5 && dwarf_split_debug_info)
31471 {
31472 unsigned int save_loc_list_idx = loc_list_idx;
31473 loc_list_idx = 0;
31474 output_loclists_offsets (comp_unit_die ());
31475 gcc_assert (save_loc_list_idx == loc_list_idx);
31476 }
31477 output_location_lists (comp_unit_die ());
31478 if (dwarf_version >= 5)
31479 ASM_OUTPUT_LABEL (asm_out_file, l2);
31480 }
31481
31482 output_pubtables ();
31483
31484 /* Output the address range information if a CU (.debug_info section)
31485 was emitted. We output an empty table even if we had no functions
31486 to put in it. This because the consumer has no way to tell the
31487 difference between an empty table that we omitted and failure to
31488 generate a table that would have contained data. */
31489 if (info_section_emitted)
31490 {
31491 switch_to_section (debug_aranges_section);
31492 output_aranges ();
31493 }
31494
31495 /* Output ranges section if necessary. */
31496 if (!vec_safe_is_empty (ranges_table))
31497 {
31498 if (dwarf_version >= 5)
31499 output_rnglists (generation);
31500 else
31501 output_ranges ();
31502 }
31503
31504 /* Have to end the macro section. */
31505 if (have_macinfo)
31506 {
31507 switch_to_section (debug_macinfo_section);
31508 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
31509 output_macinfo (!dwarf_split_debug_info ? debug_line_section_label
31510 : debug_skeleton_line_section_label, false);
31511 dw2_asm_output_data (1, 0, "End compilation unit");
31512 }
31513
31514 /* Output the source line correspondence table. We must do this
31515 even if there is no line information. Otherwise, on an empty
31516 translation unit, we will generate a present, but empty,
31517 .debug_info section. IRIX 6.5 `nm' will then complain when
31518 examining the file. This is done late so that any filenames
31519 used by the debug_info section are marked as 'used'. */
31520 switch_to_section (debug_line_section);
31521 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
31522 if (! output_asm_line_debug_info ())
31523 output_line_info (false);
31524
31525 if (dwarf_split_debug_info && info_section_emitted)
31526 {
31527 switch_to_section (debug_skeleton_line_section);
31528 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_line_section_label);
31529 output_line_info (true);
31530 }
31531
31532 /* If we emitted any indirect strings, output the string table too. */
31533 if (debug_str_hash || skeleton_debug_str_hash)
31534 output_indirect_strings ();
31535 if (debug_line_str_hash)
31536 {
31537 switch_to_section (debug_line_str_section);
31538 const enum dwarf_form form = DW_FORM_line_strp;
31539 debug_line_str_hash->traverse<enum dwarf_form,
31540 output_indirect_string> (form);
31541 }
31542
31543 /* ??? Move lvugid out of dwarf2out_source_line and reset it too? */
31544 symview_upper_bound = 0;
31545 if (zero_view_p)
31546 bitmap_clear (zero_view_p);
31547 }
31548
31549 /* Returns a hash value for X (which really is a variable_value_struct). */
31550
31551 inline hashval_t
31552 variable_value_hasher::hash (variable_value_struct *x)
31553 {
31554 return (hashval_t) x->decl_id;
31555 }
31556
31557 /* Return nonzero if decl_id of variable_value_struct X is the same as
31558 UID of decl Y. */
31559
31560 inline bool
31561 variable_value_hasher::equal (variable_value_struct *x, tree y)
31562 {
31563 return x->decl_id == DECL_UID (y);
31564 }
31565
31566 /* Helper function for resolve_variable_value, handle
31567 DW_OP_GNU_variable_value in one location expression.
31568 Return true if exprloc has been changed into loclist. */
31569
31570 static bool
31571 resolve_variable_value_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
31572 {
31573 dw_loc_descr_ref next;
31574 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = next)
31575 {
31576 next = loc->dw_loc_next;
31577 if (loc->dw_loc_opc != DW_OP_GNU_variable_value
31578 || loc->dw_loc_oprnd1.val_class != dw_val_class_decl_ref)
31579 continue;
31580
31581 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31582 if (DECL_CONTEXT (decl) != current_function_decl)
31583 continue;
31584
31585 dw_die_ref ref = lookup_decl_die (decl);
31586 if (ref)
31587 {
31588 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31589 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31590 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31591 continue;
31592 }
31593 dw_loc_list_ref l = loc_list_from_tree (decl, 0, NULL);
31594 if (l == NULL)
31595 continue;
31596 if (l->dw_loc_next)
31597 {
31598 if (AT_class (a) != dw_val_class_loc)
31599 continue;
31600 switch (a->dw_attr)
31601 {
31602 /* Following attributes allow both exprloc and loclist
31603 classes, so we can change them into a loclist. */
31604 case DW_AT_location:
31605 case DW_AT_string_length:
31606 case DW_AT_return_addr:
31607 case DW_AT_data_member_location:
31608 case DW_AT_frame_base:
31609 case DW_AT_segment:
31610 case DW_AT_static_link:
31611 case DW_AT_use_location:
31612 case DW_AT_vtable_elem_location:
31613 if (prev)
31614 {
31615 prev->dw_loc_next = NULL;
31616 prepend_loc_descr_to_each (l, AT_loc (a));
31617 }
31618 if (next)
31619 add_loc_descr_to_each (l, next);
31620 a->dw_attr_val.val_class = dw_val_class_loc_list;
31621 a->dw_attr_val.val_entry = NULL;
31622 a->dw_attr_val.v.val_loc_list = l;
31623 have_location_lists = true;
31624 return true;
31625 /* Following attributes allow both exprloc and reference,
31626 so if the whole expression is DW_OP_GNU_variable_value alone
31627 we could transform it into reference. */
31628 case DW_AT_byte_size:
31629 case DW_AT_bit_size:
31630 case DW_AT_lower_bound:
31631 case DW_AT_upper_bound:
31632 case DW_AT_bit_stride:
31633 case DW_AT_count:
31634 case DW_AT_allocated:
31635 case DW_AT_associated:
31636 case DW_AT_byte_stride:
31637 if (prev == NULL && next == NULL)
31638 break;
31639 /* FALLTHRU */
31640 default:
31641 if (dwarf_strict)
31642 continue;
31643 break;
31644 }
31645 /* Create DW_TAG_variable that we can refer to. */
31646 gen_decl_die (decl, NULL_TREE, NULL,
31647 lookup_decl_die (current_function_decl));
31648 ref = lookup_decl_die (decl);
31649 if (ref)
31650 {
31651 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31652 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31653 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31654 }
31655 continue;
31656 }
31657 if (prev)
31658 {
31659 prev->dw_loc_next = l->expr;
31660 add_loc_descr (&prev->dw_loc_next, next);
31661 free_loc_descr (loc, NULL);
31662 next = prev->dw_loc_next;
31663 }
31664 else
31665 {
31666 memcpy (loc, l->expr, sizeof (dw_loc_descr_node));
31667 add_loc_descr (&loc, next);
31668 next = loc;
31669 }
31670 loc = prev;
31671 }
31672 return false;
31673 }
31674
31675 /* Attempt to resolve DW_OP_GNU_variable_value using loc_list_from_tree. */
31676
31677 static void
31678 resolve_variable_value (dw_die_ref die)
31679 {
31680 dw_attr_node *a;
31681 dw_loc_list_ref loc;
31682 unsigned ix;
31683
31684 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31685 switch (AT_class (a))
31686 {
31687 case dw_val_class_loc:
31688 if (!resolve_variable_value_in_expr (a, AT_loc (a)))
31689 break;
31690 /* FALLTHRU */
31691 case dw_val_class_loc_list:
31692 loc = AT_loc_list (a);
31693 gcc_assert (loc);
31694 for (; loc; loc = loc->dw_loc_next)
31695 resolve_variable_value_in_expr (a, loc->expr);
31696 break;
31697 default:
31698 break;
31699 }
31700 }
31701
31702 /* Attempt to optimize DW_OP_GNU_variable_value refering to
31703 temporaries in the current function. */
31704
31705 static void
31706 resolve_variable_values (void)
31707 {
31708 if (!variable_value_hash || !current_function_decl)
31709 return;
31710
31711 struct variable_value_struct *node
31712 = variable_value_hash->find_with_hash (current_function_decl,
31713 DECL_UID (current_function_decl));
31714
31715 if (node == NULL)
31716 return;
31717
31718 unsigned int i;
31719 dw_die_ref die;
31720 FOR_EACH_VEC_SAFE_ELT (node->dies, i, die)
31721 resolve_variable_value (die);
31722 }
31723
31724 /* Helper function for note_variable_value, handle one location
31725 expression. */
31726
31727 static void
31728 note_variable_value_in_expr (dw_die_ref die, dw_loc_descr_ref loc)
31729 {
31730 for (; loc; loc = loc->dw_loc_next)
31731 if (loc->dw_loc_opc == DW_OP_GNU_variable_value
31732 && loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
31733 {
31734 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31735 dw_die_ref ref = lookup_decl_die (decl);
31736 if (! ref && (flag_generate_lto || flag_generate_offload))
31737 {
31738 /* ??? This is somewhat a hack because we do not create DIEs
31739 for variables not in BLOCK trees early but when generating
31740 early LTO output we need the dw_val_class_decl_ref to be
31741 fully resolved. For fat LTO objects we'd also like to
31742 undo this after LTO dwarf output. */
31743 gcc_assert (DECL_CONTEXT (decl));
31744 dw_die_ref ctx = lookup_decl_die (DECL_CONTEXT (decl));
31745 gcc_assert (ctx != NULL);
31746 gen_decl_die (decl, NULL_TREE, NULL, ctx);
31747 ref = lookup_decl_die (decl);
31748 gcc_assert (ref != NULL);
31749 }
31750 if (ref)
31751 {
31752 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31753 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31754 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31755 continue;
31756 }
31757 if (VAR_P (decl)
31758 && DECL_CONTEXT (decl)
31759 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
31760 && lookup_decl_die (DECL_CONTEXT (decl)))
31761 {
31762 if (!variable_value_hash)
31763 variable_value_hash
31764 = hash_table<variable_value_hasher>::create_ggc (10);
31765
31766 tree fndecl = DECL_CONTEXT (decl);
31767 struct variable_value_struct *node;
31768 struct variable_value_struct **slot
31769 = variable_value_hash->find_slot_with_hash (fndecl,
31770 DECL_UID (fndecl),
31771 INSERT);
31772 if (*slot == NULL)
31773 {
31774 node = ggc_cleared_alloc<variable_value_struct> ();
31775 node->decl_id = DECL_UID (fndecl);
31776 *slot = node;
31777 }
31778 else
31779 node = *slot;
31780
31781 vec_safe_push (node->dies, die);
31782 }
31783 }
31784 }
31785
31786 /* Walk the tree DIE and note DIEs with DW_OP_GNU_variable_value still
31787 with dw_val_class_decl_ref operand. */
31788
31789 static void
31790 note_variable_value (dw_die_ref die)
31791 {
31792 dw_die_ref c;
31793 dw_attr_node *a;
31794 dw_loc_list_ref loc;
31795 unsigned ix;
31796
31797 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31798 switch (AT_class (a))
31799 {
31800 case dw_val_class_loc_list:
31801 loc = AT_loc_list (a);
31802 gcc_assert (loc);
31803 if (!loc->noted_variable_value)
31804 {
31805 loc->noted_variable_value = 1;
31806 for (; loc; loc = loc->dw_loc_next)
31807 note_variable_value_in_expr (die, loc->expr);
31808 }
31809 break;
31810 case dw_val_class_loc:
31811 note_variable_value_in_expr (die, AT_loc (a));
31812 break;
31813 default:
31814 break;
31815 }
31816
31817 /* Mark children. */
31818 FOR_EACH_CHILD (die, c, note_variable_value (c));
31819 }
31820
31821 /* Perform any cleanups needed after the early debug generation pass
31822 has run. */
31823
31824 static void
31825 dwarf2out_early_finish (const char *filename)
31826 {
31827 set_early_dwarf s;
31828 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
31829
31830 /* PCH might result in DW_AT_producer string being restored from the
31831 header compilation, so always fill it with empty string initially
31832 and overwrite only here. */
31833 dw_attr_node *producer = get_AT (comp_unit_die (), DW_AT_producer);
31834 producer_string = gen_producer_string ();
31835 producer->dw_attr_val.v.val_str->refcount--;
31836 producer->dw_attr_val.v.val_str = find_AT_string (producer_string);
31837
31838 /* Add the name for the main input file now. We delayed this from
31839 dwarf2out_init to avoid complications with PCH. */
31840 add_name_attribute (comp_unit_die (), remap_debug_filename (filename));
31841 add_comp_dir_attribute (comp_unit_die ());
31842
31843 /* When emitting DWARF5 .debug_line_str, move DW_AT_name and
31844 DW_AT_comp_dir into .debug_line_str section. */
31845 if (!dwarf2out_as_loc_support
31846 && dwarf_version >= 5
31847 && DWARF5_USE_DEBUG_LINE_STR)
31848 {
31849 for (int i = 0; i < 2; i++)
31850 {
31851 dw_attr_node *a = get_AT (comp_unit_die (),
31852 i ? DW_AT_comp_dir : DW_AT_name);
31853 if (a == NULL
31854 || AT_class (a) != dw_val_class_str
31855 || strlen (AT_string (a)) + 1 <= DWARF_OFFSET_SIZE)
31856 continue;
31857
31858 if (! debug_line_str_hash)
31859 debug_line_str_hash
31860 = hash_table<indirect_string_hasher>::create_ggc (10);
31861
31862 struct indirect_string_node *node
31863 = find_AT_string_in_table (AT_string (a), debug_line_str_hash);
31864 set_indirect_string (node);
31865 node->form = DW_FORM_line_strp;
31866 a->dw_attr_val.v.val_str->refcount--;
31867 a->dw_attr_val.v.val_str = node;
31868 }
31869 }
31870
31871 /* With LTO early dwarf was really finished at compile-time, so make
31872 sure to adjust the phase after annotating the LTRANS CU DIE. */
31873 if (in_lto_p)
31874 {
31875 early_dwarf_finished = true;
31876 return;
31877 }
31878
31879 /* Walk through the list of incomplete types again, trying once more to
31880 emit full debugging info for them. */
31881 retry_incomplete_types ();
31882
31883 /* The point here is to flush out the limbo list so that it is empty
31884 and we don't need to stream it for LTO. */
31885 flush_limbo_die_list ();
31886
31887 gen_scheduled_generic_parms_dies ();
31888 gen_remaining_tmpl_value_param_die_attribute ();
31889
31890 /* Add DW_AT_linkage_name for all deferred DIEs. */
31891 for (limbo_die_node *node = deferred_asm_name; node; node = node->next)
31892 {
31893 tree decl = node->created_for;
31894 if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl)
31895 /* A missing DECL_ASSEMBLER_NAME can be a constant DIE that
31896 ended up in deferred_asm_name before we knew it was
31897 constant and never written to disk. */
31898 && DECL_ASSEMBLER_NAME (decl))
31899 {
31900 add_linkage_attr (node->die, decl);
31901 move_linkage_attr (node->die);
31902 }
31903 }
31904 deferred_asm_name = NULL;
31905
31906 if (flag_eliminate_unused_debug_types)
31907 prune_unused_types ();
31908
31909 /* Generate separate COMDAT sections for type DIEs. */
31910 if (use_debug_types)
31911 {
31912 break_out_comdat_types (comp_unit_die ());
31913
31914 /* Each new type_unit DIE was added to the limbo die list when created.
31915 Since these have all been added to comdat_type_list, clear the
31916 limbo die list. */
31917 limbo_die_list = NULL;
31918
31919 /* For each new comdat type unit, copy declarations for incomplete
31920 types to make the new unit self-contained (i.e., no direct
31921 references to the main compile unit). */
31922 for (comdat_type_node *ctnode = comdat_type_list;
31923 ctnode != NULL; ctnode = ctnode->next)
31924 copy_decls_for_unworthy_types (ctnode->root_die);
31925 copy_decls_for_unworthy_types (comp_unit_die ());
31926
31927 /* In the process of copying declarations from one unit to another,
31928 we may have left some declarations behind that are no longer
31929 referenced. Prune them. */
31930 prune_unused_types ();
31931 }
31932
31933 /* Traverse the DIE's and note DIEs with DW_OP_GNU_variable_value still
31934 with dw_val_class_decl_ref operand. */
31935 note_variable_value (comp_unit_die ());
31936 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31937 note_variable_value (node->die);
31938 for (comdat_type_node *ctnode = comdat_type_list; ctnode != NULL;
31939 ctnode = ctnode->next)
31940 note_variable_value (ctnode->root_die);
31941 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
31942 note_variable_value (node->die);
31943
31944 /* The AT_pubnames attribute needs to go in all skeleton dies, including
31945 both the main_cu and all skeleton TUs. Making this call unconditional
31946 would end up either adding a second copy of the AT_pubnames attribute, or
31947 requiring a special case in add_top_level_skeleton_die_attrs. */
31948 if (!dwarf_split_debug_info)
31949 add_AT_pubnames (comp_unit_die ());
31950
31951 /* The early debug phase is now finished. */
31952 early_dwarf_finished = true;
31953
31954 /* Do not generate DWARF assembler now when not producing LTO bytecode. */
31955 if ((!flag_generate_lto && !flag_generate_offload)
31956 /* FIXME: Disable debug info generation for PE-COFF targets since the
31957 copy_lto_debug_sections operation of the simple object support in
31958 libiberty is not implemented for them yet. */
31959 || TARGET_PECOFF)
31960 return;
31961
31962 /* Now as we are going to output for LTO initialize sections and labels
31963 to the LTO variants. We don't need a random-seed postfix as other
31964 LTO sections as linking the LTO debug sections into one in a partial
31965 link is fine. */
31966 init_sections_and_labels (true);
31967
31968 /* The output below is modeled after dwarf2out_finish with all
31969 location related output removed and some LTO specific changes.
31970 Some refactoring might make both smaller and easier to match up. */
31971
31972 /* Traverse the DIE's and add add sibling attributes to those DIE's
31973 that have children. */
31974 add_sibling_attributes (comp_unit_die ());
31975 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
31976 add_sibling_attributes (node->die);
31977 for (comdat_type_node *ctnode = comdat_type_list;
31978 ctnode != NULL; ctnode = ctnode->next)
31979 add_sibling_attributes (ctnode->root_die);
31980
31981 /* AIX Assembler inserts the length, so adjust the reference to match the
31982 offset expected by debuggers. */
31983 strcpy (dl_section_ref, debug_line_section_label);
31984 if (XCOFF_DEBUGGING_INFO)
31985 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
31986
31987 if (debug_info_level >= DINFO_LEVEL_TERSE)
31988 add_AT_lineptr (comp_unit_die (), DW_AT_stmt_list, dl_section_ref);
31989
31990 if (have_macinfo)
31991 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
31992 macinfo_section_label);
31993
31994 save_macinfo_strings ();
31995
31996 if (dwarf_split_debug_info)
31997 {
31998 unsigned int index = 0;
31999 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
32000 }
32001
32002 /* Output all of the compilation units. We put the main one last so that
32003 the offsets are available to output_pubnames. */
32004 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
32005 output_comp_unit (node->die, 0, NULL);
32006
32007 hash_table<comdat_type_hasher> comdat_type_table (100);
32008 for (comdat_type_node *ctnode = comdat_type_list;
32009 ctnode != NULL; ctnode = ctnode->next)
32010 {
32011 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
32012
32013 /* Don't output duplicate types. */
32014 if (*slot != HTAB_EMPTY_ENTRY)
32015 continue;
32016
32017 /* Add a pointer to the line table for the main compilation unit
32018 so that the debugger can make sense of DW_AT_decl_file
32019 attributes. */
32020 if (debug_info_level >= DINFO_LEVEL_TERSE)
32021 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
32022 (!dwarf_split_debug_info
32023 ? debug_line_section_label
32024 : debug_skeleton_line_section_label));
32025
32026 output_comdat_type_unit (ctnode);
32027 *slot = ctnode;
32028 }
32029
32030 /* Stick a unique symbol to the main debuginfo section. */
32031 compute_comp_unit_symbol (comp_unit_die ());
32032
32033 /* Output the main compilation unit. We always need it if only for
32034 the CU symbol. */
32035 output_comp_unit (comp_unit_die (), true, NULL);
32036
32037 /* Output the abbreviation table. */
32038 if (vec_safe_length (abbrev_die_table) != 1)
32039 {
32040 switch_to_section (debug_abbrev_section);
32041 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
32042 output_abbrev_section ();
32043 }
32044
32045 /* Have to end the macro section. */
32046 if (have_macinfo)
32047 {
32048 /* We have to save macinfo state if we need to output it again
32049 for the FAT part of the object. */
32050 vec<macinfo_entry, va_gc> *saved_macinfo_table = macinfo_table;
32051 if (flag_fat_lto_objects)
32052 macinfo_table = macinfo_table->copy ();
32053
32054 switch_to_section (debug_macinfo_section);
32055 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
32056 output_macinfo (debug_line_section_label, true);
32057 dw2_asm_output_data (1, 0, "End compilation unit");
32058
32059 if (flag_fat_lto_objects)
32060 {
32061 vec_free (macinfo_table);
32062 macinfo_table = saved_macinfo_table;
32063 }
32064 }
32065
32066 /* Emit a skeleton debug_line section. */
32067 switch_to_section (debug_line_section);
32068 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
32069 output_line_info (true);
32070
32071 /* If we emitted any indirect strings, output the string table too. */
32072 if (debug_str_hash || skeleton_debug_str_hash)
32073 output_indirect_strings ();
32074
32075 /* Switch back to the text section. */
32076 switch_to_section (text_section);
32077 }
32078
32079 /* Reset all state within dwarf2out.c so that we can rerun the compiler
32080 within the same process. For use by toplev::finalize. */
32081
32082 void
32083 dwarf2out_c_finalize (void)
32084 {
32085 last_var_location_insn = NULL;
32086 cached_next_real_insn = NULL;
32087 used_rtx_array = NULL;
32088 incomplete_types = NULL;
32089 decl_scope_table = NULL;
32090 debug_info_section = NULL;
32091 debug_skeleton_info_section = NULL;
32092 debug_abbrev_section = NULL;
32093 debug_skeleton_abbrev_section = NULL;
32094 debug_aranges_section = NULL;
32095 debug_addr_section = NULL;
32096 debug_macinfo_section = NULL;
32097 debug_line_section = NULL;
32098 debug_skeleton_line_section = NULL;
32099 debug_loc_section = NULL;
32100 debug_pubnames_section = NULL;
32101 debug_pubtypes_section = NULL;
32102 debug_str_section = NULL;
32103 debug_line_str_section = NULL;
32104 debug_str_dwo_section = NULL;
32105 debug_str_offsets_section = NULL;
32106 debug_ranges_section = NULL;
32107 debug_frame_section = NULL;
32108 fde_vec = NULL;
32109 debug_str_hash = NULL;
32110 debug_line_str_hash = NULL;
32111 skeleton_debug_str_hash = NULL;
32112 dw2_string_counter = 0;
32113 have_multiple_function_sections = false;
32114 text_section_used = false;
32115 cold_text_section_used = false;
32116 cold_text_section = NULL;
32117 current_unit_personality = NULL;
32118
32119 early_dwarf = false;
32120 early_dwarf_finished = false;
32121
32122 next_die_offset = 0;
32123 single_comp_unit_die = NULL;
32124 comdat_type_list = NULL;
32125 limbo_die_list = NULL;
32126 file_table = NULL;
32127 decl_die_table = NULL;
32128 common_block_die_table = NULL;
32129 decl_loc_table = NULL;
32130 call_arg_locations = NULL;
32131 call_arg_loc_last = NULL;
32132 call_site_count = -1;
32133 tail_call_site_count = -1;
32134 cached_dw_loc_list_table = NULL;
32135 abbrev_die_table = NULL;
32136 delete dwarf_proc_stack_usage_map;
32137 dwarf_proc_stack_usage_map = NULL;
32138 line_info_label_num = 0;
32139 cur_line_info_table = NULL;
32140 text_section_line_info = NULL;
32141 cold_text_section_line_info = NULL;
32142 separate_line_info = NULL;
32143 info_section_emitted = false;
32144 pubname_table = NULL;
32145 pubtype_table = NULL;
32146 macinfo_table = NULL;
32147 ranges_table = NULL;
32148 ranges_by_label = NULL;
32149 rnglist_idx = 0;
32150 have_location_lists = false;
32151 loclabel_num = 0;
32152 poc_label_num = 0;
32153 last_emitted_file = NULL;
32154 label_num = 0;
32155 tmpl_value_parm_die_table = NULL;
32156 generic_type_instances = NULL;
32157 frame_pointer_fb_offset = 0;
32158 frame_pointer_fb_offset_valid = false;
32159 base_types.release ();
32160 XDELETEVEC (producer_string);
32161 producer_string = NULL;
32162 }
32163
32164 #include "gt-dwarf2out.h"
This page took 1.526908 seconds and 5 git commands to generate.