]> gcc.gnu.org Git - gcc.git/blob - gcc/dwarf2out.c
fix -fdebug-prefix-map without gas .file support
[gcc.git] / gcc / dwarf2out.c
1 /* Output Dwarf2 format symbol table information from GCC.
2 Copyright (C) 1992-2020 Free Software Foundation, Inc.
3 Contributed by Gary Funck (gary@intrepid.com).
4 Derived from DWARF 1 implementation of Ron Guilmette (rfg@monkeys.com).
5 Extensively modified by Jason Merrill (jason@cygnus.com).
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* TODO: Emit .debug_line header even when there are no functions, since
24 the file numbers are used by .debug_info. Alternately, leave
25 out locations for types and decls.
26 Avoid talking about ctors and op= for PODs.
27 Factor out common prologue sequences into multiple CIEs. */
28
29 /* The first part of this file deals with the DWARF 2 frame unwind
30 information, which is also used by the GCC efficient exception handling
31 mechanism. The second part, controlled only by an #ifdef
32 DWARF2_DEBUGGING_INFO, deals with the other DWARF 2 debugging
33 information. */
34
35 /* DWARF2 Abbreviation Glossary:
36
37 CFA = Canonical Frame Address
38 a fixed address on the stack which identifies a call frame.
39 We define it to be the value of SP just before the call insn.
40 The CFA register and offset, which may change during the course
41 of the function, are used to calculate its value at runtime.
42
43 CFI = Call Frame Instruction
44 an instruction for the DWARF2 abstract machine
45
46 CIE = Common Information Entry
47 information describing information common to one or more FDEs
48
49 DIE = Debugging Information Entry
50
51 FDE = Frame Description Entry
52 information describing the stack call frame, in particular,
53 how to restore registers
54
55 DW_CFA_... = DWARF2 CFA call frame instruction
56 DW_TAG_... = DWARF2 DIE tag */
57
58 #include "config.h"
59 #include "system.h"
60 #include "coretypes.h"
61 #include "target.h"
62 #include "function.h"
63 #include "rtl.h"
64 #include "tree.h"
65 #include "memmodel.h"
66 #include "tm_p.h"
67 #include "stringpool.h"
68 #include "insn-config.h"
69 #include "ira.h"
70 #include "cgraph.h"
71 #include "diagnostic.h"
72 #include "fold-const.h"
73 #include "stor-layout.h"
74 #include "varasm.h"
75 #include "version.h"
76 #include "flags.h"
77 #include "rtlhash.h"
78 #include "reload.h"
79 #include "output.h"
80 #include "expr.h"
81 #include "dwarf2out.h"
82 #include "dwarf2asm.h"
83 #include "toplev.h"
84 #include "md5.h"
85 #include "tree-pretty-print.h"
86 #include "print-rtl.h"
87 #include "debug.h"
88 #include "common/common-target.h"
89 #include "langhooks.h"
90 #include "lra.h"
91 #include "dumpfile.h"
92 #include "opts.h"
93 #include "tree-dfa.h"
94 #include "gdb/gdb-index.h"
95 #include "rtl-iter.h"
96 #include "stringpool.h"
97 #include "attribs.h"
98 #include "file-prefix-map.h" /* remap_debug_filename() */
99
100 static void dwarf2out_source_line (unsigned int, unsigned int, const char *,
101 int, bool);
102 static rtx_insn *last_var_location_insn;
103 static rtx_insn *cached_next_real_insn;
104 static void dwarf2out_decl (tree);
105 static bool is_redundant_typedef (const_tree);
106
107 #ifndef XCOFF_DEBUGGING_INFO
108 #define XCOFF_DEBUGGING_INFO 0
109 #endif
110
111 #ifndef HAVE_XCOFF_DWARF_EXTRAS
112 #define HAVE_XCOFF_DWARF_EXTRAS 0
113 #endif
114
115 #ifdef VMS_DEBUGGING_INFO
116 int vms_file_stats_name (const char *, long long *, long *, char *, int *);
117
118 /* Define this macro to be a nonzero value if the directory specifications
119 which are output in the debug info should end with a separator. */
120 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 1
121 /* Define this macro to evaluate to a nonzero value if GCC should refrain
122 from generating indirect strings in DWARF2 debug information, for instance
123 if your target is stuck with an old version of GDB that is unable to
124 process them properly or uses VMS Debug. */
125 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 1
126 #else
127 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 0
128 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 0
129 #endif
130
131 /* ??? Poison these here until it can be done generically. They've been
132 totally replaced in this file; make sure it stays that way. */
133 #undef DWARF2_UNWIND_INFO
134 #undef DWARF2_FRAME_INFO
135 #if (GCC_VERSION >= 3000)
136 #pragma GCC poison DWARF2_UNWIND_INFO DWARF2_FRAME_INFO
137 #endif
138
139 /* The size of the target's pointer type. */
140 #ifndef PTR_SIZE
141 #define PTR_SIZE (POINTER_SIZE / BITS_PER_UNIT)
142 #endif
143
144 /* Array of RTXes referenced by the debugging information, which therefore
145 must be kept around forever. */
146 static GTY(()) vec<rtx, va_gc> *used_rtx_array;
147
148 /* A pointer to the base of a list of incomplete types which might be
149 completed at some later time. incomplete_types_list needs to be a
150 vec<tree, va_gc> *because we want to tell the garbage collector about
151 it. */
152 static GTY(()) vec<tree, va_gc> *incomplete_types;
153
154 /* Pointers to various DWARF2 sections. */
155 static GTY(()) section *debug_info_section;
156 static GTY(()) section *debug_skeleton_info_section;
157 static GTY(()) section *debug_abbrev_section;
158 static GTY(()) section *debug_skeleton_abbrev_section;
159 static GTY(()) section *debug_aranges_section;
160 static GTY(()) section *debug_addr_section;
161 static GTY(()) section *debug_macinfo_section;
162 static const char *debug_macinfo_section_name;
163 static unsigned macinfo_label_base = 1;
164 static GTY(()) section *debug_line_section;
165 static GTY(()) section *debug_skeleton_line_section;
166 static GTY(()) section *debug_loc_section;
167 static GTY(()) section *debug_pubnames_section;
168 static GTY(()) section *debug_pubtypes_section;
169 static GTY(()) section *debug_str_section;
170 static GTY(()) section *debug_line_str_section;
171 static GTY(()) section *debug_str_dwo_section;
172 static GTY(()) section *debug_str_offsets_section;
173 static GTY(()) section *debug_ranges_section;
174 static GTY(()) section *debug_frame_section;
175
176 /* Maximum size (in bytes) of an artificially generated label. */
177 #define MAX_ARTIFICIAL_LABEL_BYTES 40
178
179 /* According to the (draft) DWARF 3 specification, the initial length
180 should either be 4 or 12 bytes. When it's 12 bytes, the first 4
181 bytes are 0xffffffff, followed by the length stored in the next 8
182 bytes.
183
184 However, the SGI/MIPS ABI uses an initial length which is equal to
185 DWARF_OFFSET_SIZE. It is defined (elsewhere) accordingly. */
186
187 #ifndef DWARF_INITIAL_LENGTH_SIZE
188 #define DWARF_INITIAL_LENGTH_SIZE (DWARF_OFFSET_SIZE == 4 ? 4 : 12)
189 #endif
190
191 #ifndef DWARF_INITIAL_LENGTH_SIZE_STR
192 #define DWARF_INITIAL_LENGTH_SIZE_STR (DWARF_OFFSET_SIZE == 4 ? "-4" : "-12")
193 #endif
194
195 /* Round SIZE up to the nearest BOUNDARY. */
196 #define DWARF_ROUND(SIZE,BOUNDARY) \
197 ((((SIZE) + (BOUNDARY) - 1) / (BOUNDARY)) * (BOUNDARY))
198
199 /* CIE identifier. */
200 #if HOST_BITS_PER_WIDE_INT >= 64
201 #define DWARF_CIE_ID \
202 (unsigned HOST_WIDE_INT) (DWARF_OFFSET_SIZE == 4 ? DW_CIE_ID : DW64_CIE_ID)
203 #else
204 #define DWARF_CIE_ID DW_CIE_ID
205 #endif
206
207
208 /* A vector for a table that contains frame description
209 information for each routine. */
210 #define NOT_INDEXED (-1U)
211 #define NO_INDEX_ASSIGNED (-2U)
212
213 static GTY(()) vec<dw_fde_ref, va_gc> *fde_vec;
214
215 struct GTY((for_user)) indirect_string_node {
216 const char *str;
217 unsigned int refcount;
218 enum dwarf_form form;
219 char *label;
220 unsigned int index;
221 };
222
223 struct indirect_string_hasher : ggc_ptr_hash<indirect_string_node>
224 {
225 typedef const char *compare_type;
226
227 static hashval_t hash (indirect_string_node *);
228 static bool equal (indirect_string_node *, const char *);
229 };
230
231 static GTY (()) hash_table<indirect_string_hasher> *debug_str_hash;
232
233 static GTY (()) hash_table<indirect_string_hasher> *debug_line_str_hash;
234
235 /* With split_debug_info, both the comp_dir and dwo_name go in the
236 main object file, rather than the dwo, similar to the force_direct
237 parameter elsewhere but with additional complications:
238
239 1) The string is needed in both the main object file and the dwo.
240 That is, the comp_dir and dwo_name will appear in both places.
241
242 2) Strings can use four forms: DW_FORM_string, DW_FORM_strp,
243 DW_FORM_line_strp or DW_FORM_strx/GNU_str_index.
244
245 3) GCC chooses the form to use late, depending on the size and
246 reference count.
247
248 Rather than forcing the all debug string handling functions and
249 callers to deal with these complications, simply use a separate,
250 special-cased string table for any attribute that should go in the
251 main object file. This limits the complexity to just the places
252 that need it. */
253
254 static GTY (()) hash_table<indirect_string_hasher> *skeleton_debug_str_hash;
255
256 static GTY(()) int dw2_string_counter;
257
258 /* True if the compilation unit places functions in more than one section. */
259 static GTY(()) bool have_multiple_function_sections = false;
260
261 /* Whether the default text and cold text sections have been used at all. */
262 static GTY(()) bool text_section_used = false;
263 static GTY(()) bool cold_text_section_used = false;
264
265 /* The default cold text section. */
266 static GTY(()) section *cold_text_section;
267
268 /* The DIE for C++14 'auto' in a function return type. */
269 static GTY(()) dw_die_ref auto_die;
270
271 /* The DIE for C++14 'decltype(auto)' in a function return type. */
272 static GTY(()) dw_die_ref decltype_auto_die;
273
274 /* Forward declarations for functions defined in this file. */
275
276 static void output_call_frame_info (int);
277 static void dwarf2out_note_section_used (void);
278
279 /* Personality decl of current unit. Used only when assembler does not support
280 personality CFI. */
281 static GTY(()) rtx current_unit_personality;
282
283 /* Whether an eh_frame section is required. */
284 static GTY(()) bool do_eh_frame = false;
285
286 /* .debug_rnglists next index. */
287 static unsigned int rnglist_idx;
288
289 /* Data and reference forms for relocatable data. */
290 #define DW_FORM_data (DWARF_OFFSET_SIZE == 8 ? DW_FORM_data8 : DW_FORM_data4)
291 #define DW_FORM_ref (DWARF_OFFSET_SIZE == 8 ? DW_FORM_ref8 : DW_FORM_ref4)
292
293 #ifndef DEBUG_FRAME_SECTION
294 #define DEBUG_FRAME_SECTION ".debug_frame"
295 #endif
296
297 #ifndef FUNC_BEGIN_LABEL
298 #define FUNC_BEGIN_LABEL "LFB"
299 #endif
300
301 #ifndef FUNC_SECOND_SECT_LABEL
302 #define FUNC_SECOND_SECT_LABEL "LFSB"
303 #endif
304
305 #ifndef FUNC_END_LABEL
306 #define FUNC_END_LABEL "LFE"
307 #endif
308
309 #ifndef PROLOGUE_END_LABEL
310 #define PROLOGUE_END_LABEL "LPE"
311 #endif
312
313 #ifndef EPILOGUE_BEGIN_LABEL
314 #define EPILOGUE_BEGIN_LABEL "LEB"
315 #endif
316
317 #ifndef FRAME_BEGIN_LABEL
318 #define FRAME_BEGIN_LABEL "Lframe"
319 #endif
320 #define CIE_AFTER_SIZE_LABEL "LSCIE"
321 #define CIE_END_LABEL "LECIE"
322 #define FDE_LABEL "LSFDE"
323 #define FDE_AFTER_SIZE_LABEL "LASFDE"
324 #define FDE_END_LABEL "LEFDE"
325 #define LINE_NUMBER_BEGIN_LABEL "LSLT"
326 #define LINE_NUMBER_END_LABEL "LELT"
327 #define LN_PROLOG_AS_LABEL "LASLTP"
328 #define LN_PROLOG_END_LABEL "LELTP"
329 #define DIE_LABEL_PREFIX "DW"
330 \f
331 /* Match the base name of a file to the base name of a compilation unit. */
332
333 static int
334 matches_main_base (const char *path)
335 {
336 /* Cache the last query. */
337 static const char *last_path = NULL;
338 static int last_match = 0;
339 if (path != last_path)
340 {
341 const char *base;
342 int length = base_of_path (path, &base);
343 last_path = path;
344 last_match = (length == main_input_baselength
345 && memcmp (base, main_input_basename, length) == 0);
346 }
347 return last_match;
348 }
349
350 #ifdef DEBUG_DEBUG_STRUCT
351
352 static int
353 dump_struct_debug (tree type, enum debug_info_usage usage,
354 enum debug_struct_file criterion, int generic,
355 int matches, int result)
356 {
357 /* Find the type name. */
358 tree type_decl = TYPE_STUB_DECL (type);
359 tree t = type_decl;
360 const char *name = 0;
361 if (TREE_CODE (t) == TYPE_DECL)
362 t = DECL_NAME (t);
363 if (t)
364 name = IDENTIFIER_POINTER (t);
365
366 fprintf (stderr, " struct %d %s %s %s %s %d %p %s\n",
367 criterion,
368 DECL_IN_SYSTEM_HEADER (type_decl) ? "sys" : "usr",
369 matches ? "bas" : "hdr",
370 generic ? "gen" : "ord",
371 usage == DINFO_USAGE_DFN ? ";" :
372 usage == DINFO_USAGE_DIR_USE ? "." : "*",
373 result,
374 (void*) type_decl, name);
375 return result;
376 }
377 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
378 dump_struct_debug (type, usage, criterion, generic, matches, result)
379
380 #else
381
382 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
383 (result)
384
385 #endif
386
387 /* Get the number of HOST_WIDE_INTs needed to represent the precision
388 of the number. Some constants have a large uniform precision, so
389 we get the precision needed for the actual value of the number. */
390
391 static unsigned int
392 get_full_len (const wide_int &op)
393 {
394 int prec = wi::min_precision (op, UNSIGNED);
395 return ((prec + HOST_BITS_PER_WIDE_INT - 1)
396 / HOST_BITS_PER_WIDE_INT);
397 }
398
399 static bool
400 should_emit_struct_debug (tree type, enum debug_info_usage usage)
401 {
402 enum debug_struct_file criterion;
403 tree type_decl;
404 bool generic = lang_hooks.types.generic_p (type);
405
406 if (generic)
407 criterion = debug_struct_generic[usage];
408 else
409 criterion = debug_struct_ordinary[usage];
410
411 if (criterion == DINFO_STRUCT_FILE_NONE)
412 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
413 if (criterion == DINFO_STRUCT_FILE_ANY)
414 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
415
416 type_decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (type));
417
418 if (type_decl != NULL)
419 {
420 if (criterion == DINFO_STRUCT_FILE_SYS && DECL_IN_SYSTEM_HEADER (type_decl))
421 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
422
423 if (matches_main_base (DECL_SOURCE_FILE (type_decl)))
424 return DUMP_GSTRUCT (type, usage, criterion, generic, true, true);
425 }
426
427 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
428 }
429 \f
430 /* Switch [BACK] to eh_frame_section. If we don't have an eh_frame_section,
431 switch to the data section instead, and write out a synthetic start label
432 for collect2 the first time around. */
433
434 static void
435 switch_to_eh_frame_section (bool back ATTRIBUTE_UNUSED)
436 {
437 if (eh_frame_section == 0)
438 {
439 int flags;
440
441 if (EH_TABLES_CAN_BE_READ_ONLY)
442 {
443 int fde_encoding;
444 int per_encoding;
445 int lsda_encoding;
446
447 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1,
448 /*global=*/0);
449 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2,
450 /*global=*/1);
451 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0,
452 /*global=*/0);
453 flags = ((! flag_pic
454 || ((fde_encoding & 0x70) != DW_EH_PE_absptr
455 && (fde_encoding & 0x70) != DW_EH_PE_aligned
456 && (per_encoding & 0x70) != DW_EH_PE_absptr
457 && (per_encoding & 0x70) != DW_EH_PE_aligned
458 && (lsda_encoding & 0x70) != DW_EH_PE_absptr
459 && (lsda_encoding & 0x70) != DW_EH_PE_aligned))
460 ? 0 : SECTION_WRITE);
461 }
462 else
463 flags = SECTION_WRITE;
464
465 #ifdef EH_FRAME_SECTION_NAME
466 eh_frame_section = get_section (EH_FRAME_SECTION_NAME, flags, NULL);
467 #else
468 eh_frame_section = ((flags == SECTION_WRITE)
469 ? data_section : readonly_data_section);
470 #endif /* EH_FRAME_SECTION_NAME */
471 }
472
473 switch_to_section (eh_frame_section);
474
475 #ifdef EH_FRAME_THROUGH_COLLECT2
476 /* We have no special eh_frame section. Emit special labels to guide
477 collect2. */
478 if (!back)
479 {
480 tree label = get_file_function_name ("F");
481 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
482 targetm.asm_out.globalize_label (asm_out_file,
483 IDENTIFIER_POINTER (label));
484 ASM_OUTPUT_LABEL (asm_out_file, IDENTIFIER_POINTER (label));
485 }
486 #endif
487 }
488
489 /* Switch [BACK] to the eh or debug frame table section, depending on
490 FOR_EH. */
491
492 static void
493 switch_to_frame_table_section (int for_eh, bool back)
494 {
495 if (for_eh)
496 switch_to_eh_frame_section (back);
497 else
498 {
499 if (!debug_frame_section)
500 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
501 SECTION_DEBUG, NULL);
502 switch_to_section (debug_frame_section);
503 }
504 }
505
506 /* Describe for the GTY machinery what parts of dw_cfi_oprnd1 are used. */
507
508 enum dw_cfi_oprnd_type
509 dw_cfi_oprnd1_desc (enum dwarf_call_frame_info cfi)
510 {
511 switch (cfi)
512 {
513 case DW_CFA_nop:
514 case DW_CFA_GNU_window_save:
515 case DW_CFA_remember_state:
516 case DW_CFA_restore_state:
517 return dw_cfi_oprnd_unused;
518
519 case DW_CFA_set_loc:
520 case DW_CFA_advance_loc1:
521 case DW_CFA_advance_loc2:
522 case DW_CFA_advance_loc4:
523 case DW_CFA_MIPS_advance_loc8:
524 return dw_cfi_oprnd_addr;
525
526 case DW_CFA_offset:
527 case DW_CFA_offset_extended:
528 case DW_CFA_def_cfa:
529 case DW_CFA_offset_extended_sf:
530 case DW_CFA_def_cfa_sf:
531 case DW_CFA_restore:
532 case DW_CFA_restore_extended:
533 case DW_CFA_undefined:
534 case DW_CFA_same_value:
535 case DW_CFA_def_cfa_register:
536 case DW_CFA_register:
537 case DW_CFA_expression:
538 case DW_CFA_val_expression:
539 return dw_cfi_oprnd_reg_num;
540
541 case DW_CFA_def_cfa_offset:
542 case DW_CFA_GNU_args_size:
543 case DW_CFA_def_cfa_offset_sf:
544 return dw_cfi_oprnd_offset;
545
546 case DW_CFA_def_cfa_expression:
547 return dw_cfi_oprnd_loc;
548
549 default:
550 gcc_unreachable ();
551 }
552 }
553
554 /* Describe for the GTY machinery what parts of dw_cfi_oprnd2 are used. */
555
556 enum dw_cfi_oprnd_type
557 dw_cfi_oprnd2_desc (enum dwarf_call_frame_info cfi)
558 {
559 switch (cfi)
560 {
561 case DW_CFA_def_cfa:
562 case DW_CFA_def_cfa_sf:
563 case DW_CFA_offset:
564 case DW_CFA_offset_extended_sf:
565 case DW_CFA_offset_extended:
566 return dw_cfi_oprnd_offset;
567
568 case DW_CFA_register:
569 return dw_cfi_oprnd_reg_num;
570
571 case DW_CFA_expression:
572 case DW_CFA_val_expression:
573 return dw_cfi_oprnd_loc;
574
575 case DW_CFA_def_cfa_expression:
576 return dw_cfi_oprnd_cfa_loc;
577
578 default:
579 return dw_cfi_oprnd_unused;
580 }
581 }
582
583 /* Output one FDE. */
584
585 static void
586 output_fde (dw_fde_ref fde, bool for_eh, bool second,
587 char *section_start_label, int fde_encoding, char *augmentation,
588 bool any_lsda_needed, int lsda_encoding)
589 {
590 const char *begin, *end;
591 static unsigned int j;
592 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
593
594 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, for_eh,
595 /* empty */ 0);
596 targetm.asm_out.internal_label (asm_out_file, FDE_LABEL,
597 for_eh + j);
598 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_AFTER_SIZE_LABEL, for_eh + j);
599 ASM_GENERATE_INTERNAL_LABEL (l2, FDE_END_LABEL, for_eh + j);
600 if (!XCOFF_DEBUGGING_INFO || for_eh)
601 {
602 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
603 dw2_asm_output_data (4, 0xffffffff, "Initial length escape value"
604 " indicating 64-bit DWARF extension");
605 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
606 "FDE Length");
607 }
608 ASM_OUTPUT_LABEL (asm_out_file, l1);
609
610 if (for_eh)
611 dw2_asm_output_delta (4, l1, section_start_label, "FDE CIE offset");
612 else
613 dw2_asm_output_offset (DWARF_OFFSET_SIZE, section_start_label,
614 debug_frame_section, "FDE CIE offset");
615
616 begin = second ? fde->dw_fde_second_begin : fde->dw_fde_begin;
617 end = second ? fde->dw_fde_second_end : fde->dw_fde_end;
618
619 if (for_eh)
620 {
621 rtx sym_ref = gen_rtx_SYMBOL_REF (Pmode, begin);
622 SYMBOL_REF_FLAGS (sym_ref) |= SYMBOL_FLAG_LOCAL;
623 dw2_asm_output_encoded_addr_rtx (fde_encoding, sym_ref, false,
624 "FDE initial location");
625 dw2_asm_output_delta (size_of_encoded_value (fde_encoding),
626 end, begin, "FDE address range");
627 }
628 else
629 {
630 dw2_asm_output_addr (DWARF2_ADDR_SIZE, begin, "FDE initial location");
631 dw2_asm_output_delta (DWARF2_ADDR_SIZE, end, begin, "FDE address range");
632 }
633
634 if (augmentation[0])
635 {
636 if (any_lsda_needed)
637 {
638 int size = size_of_encoded_value (lsda_encoding);
639
640 if (lsda_encoding == DW_EH_PE_aligned)
641 {
642 int offset = ( 4 /* Length */
643 + 4 /* CIE offset */
644 + 2 * size_of_encoded_value (fde_encoding)
645 + 1 /* Augmentation size */ );
646 int pad = -offset & (PTR_SIZE - 1);
647
648 size += pad;
649 gcc_assert (size_of_uleb128 (size) == 1);
650 }
651
652 dw2_asm_output_data_uleb128 (size, "Augmentation size");
653
654 if (fde->uses_eh_lsda)
655 {
656 ASM_GENERATE_INTERNAL_LABEL (l1, second ? "LLSDAC" : "LLSDA",
657 fde->funcdef_number);
658 dw2_asm_output_encoded_addr_rtx (lsda_encoding,
659 gen_rtx_SYMBOL_REF (Pmode, l1),
660 false,
661 "Language Specific Data Area");
662 }
663 else
664 {
665 if (lsda_encoding == DW_EH_PE_aligned)
666 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
667 dw2_asm_output_data (size_of_encoded_value (lsda_encoding), 0,
668 "Language Specific Data Area (none)");
669 }
670 }
671 else
672 dw2_asm_output_data_uleb128 (0, "Augmentation size");
673 }
674
675 /* Loop through the Call Frame Instructions associated with this FDE. */
676 fde->dw_fde_current_label = begin;
677 {
678 size_t from, until, i;
679
680 from = 0;
681 until = vec_safe_length (fde->dw_fde_cfi);
682
683 if (fde->dw_fde_second_begin == NULL)
684 ;
685 else if (!second)
686 until = fde->dw_fde_switch_cfi_index;
687 else
688 from = fde->dw_fde_switch_cfi_index;
689
690 for (i = from; i < until; i++)
691 output_cfi ((*fde->dw_fde_cfi)[i], fde, for_eh);
692 }
693
694 /* If we are to emit a ref/link from function bodies to their frame tables,
695 do it now. This is typically performed to make sure that tables
696 associated with functions are dragged with them and not discarded in
697 garbage collecting links. We need to do this on a per function basis to
698 cope with -ffunction-sections. */
699
700 #ifdef ASM_OUTPUT_DWARF_TABLE_REF
701 /* Switch to the function section, emit the ref to the tables, and
702 switch *back* into the table section. */
703 switch_to_section (function_section (fde->decl));
704 ASM_OUTPUT_DWARF_TABLE_REF (section_start_label);
705 switch_to_frame_table_section (for_eh, true);
706 #endif
707
708 /* Pad the FDE out to an address sized boundary. */
709 ASM_OUTPUT_ALIGN (asm_out_file,
710 floor_log2 ((for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE)));
711 ASM_OUTPUT_LABEL (asm_out_file, l2);
712
713 j += 2;
714 }
715
716 /* Return true if frame description entry FDE is needed for EH. */
717
718 static bool
719 fde_needed_for_eh_p (dw_fde_ref fde)
720 {
721 if (flag_asynchronous_unwind_tables)
722 return true;
723
724 if (TARGET_USES_WEAK_UNWIND_INFO && DECL_WEAK (fde->decl))
725 return true;
726
727 if (fde->uses_eh_lsda)
728 return true;
729
730 /* If exceptions are enabled, we have collected nothrow info. */
731 if (flag_exceptions && (fde->all_throwers_are_sibcalls || fde->nothrow))
732 return false;
733
734 return true;
735 }
736
737 /* Output the call frame information used to record information
738 that relates to calculating the frame pointer, and records the
739 location of saved registers. */
740
741 static void
742 output_call_frame_info (int for_eh)
743 {
744 unsigned int i;
745 dw_fde_ref fde;
746 dw_cfi_ref cfi;
747 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
748 char section_start_label[MAX_ARTIFICIAL_LABEL_BYTES];
749 bool any_lsda_needed = false;
750 char augmentation[6];
751 int augmentation_size;
752 int fde_encoding = DW_EH_PE_absptr;
753 int per_encoding = DW_EH_PE_absptr;
754 int lsda_encoding = DW_EH_PE_absptr;
755 int return_reg;
756 rtx personality = NULL;
757 int dw_cie_version;
758
759 /* Don't emit a CIE if there won't be any FDEs. */
760 if (!fde_vec)
761 return;
762
763 /* Nothing to do if the assembler's doing it all. */
764 if (dwarf2out_do_cfi_asm ())
765 return;
766
767 /* If we don't have any functions we'll want to unwind out of, don't emit
768 any EH unwind information. If we make FDEs linkonce, we may have to
769 emit an empty label for an FDE that wouldn't otherwise be emitted. We
770 want to avoid having an FDE kept around when the function it refers to
771 is discarded. Example where this matters: a primary function template
772 in C++ requires EH information, an explicit specialization doesn't. */
773 if (for_eh)
774 {
775 bool any_eh_needed = false;
776
777 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
778 {
779 if (fde->uses_eh_lsda)
780 any_eh_needed = any_lsda_needed = true;
781 else if (fde_needed_for_eh_p (fde))
782 any_eh_needed = true;
783 else if (TARGET_USES_WEAK_UNWIND_INFO)
784 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, 1, 1);
785 }
786
787 if (!any_eh_needed)
788 return;
789 }
790
791 /* We're going to be generating comments, so turn on app. */
792 if (flag_debug_asm)
793 app_enable ();
794
795 /* Switch to the proper frame section, first time. */
796 switch_to_frame_table_section (for_eh, false);
797
798 ASM_GENERATE_INTERNAL_LABEL (section_start_label, FRAME_BEGIN_LABEL, for_eh);
799 ASM_OUTPUT_LABEL (asm_out_file, section_start_label);
800
801 /* Output the CIE. */
802 ASM_GENERATE_INTERNAL_LABEL (l1, CIE_AFTER_SIZE_LABEL, for_eh);
803 ASM_GENERATE_INTERNAL_LABEL (l2, CIE_END_LABEL, for_eh);
804 if (!XCOFF_DEBUGGING_INFO || for_eh)
805 {
806 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
807 dw2_asm_output_data (4, 0xffffffff,
808 "Initial length escape value indicating 64-bit DWARF extension");
809 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
810 "Length of Common Information Entry");
811 }
812 ASM_OUTPUT_LABEL (asm_out_file, l1);
813
814 /* Now that the CIE pointer is PC-relative for EH,
815 use 0 to identify the CIE. */
816 dw2_asm_output_data ((for_eh ? 4 : DWARF_OFFSET_SIZE),
817 (for_eh ? 0 : DWARF_CIE_ID),
818 "CIE Identifier Tag");
819
820 /* Use the CIE version 3 for DWARF3; allow DWARF2 to continue to
821 use CIE version 1, unless that would produce incorrect results
822 due to overflowing the return register column. */
823 return_reg = DWARF2_FRAME_REG_OUT (DWARF_FRAME_RETURN_COLUMN, for_eh);
824 dw_cie_version = 1;
825 if (return_reg >= 256 || dwarf_version > 2)
826 dw_cie_version = 3;
827 dw2_asm_output_data (1, dw_cie_version, "CIE Version");
828
829 augmentation[0] = 0;
830 augmentation_size = 0;
831
832 personality = current_unit_personality;
833 if (for_eh)
834 {
835 char *p;
836
837 /* Augmentation:
838 z Indicates that a uleb128 is present to size the
839 augmentation section.
840 L Indicates the encoding (and thus presence) of
841 an LSDA pointer in the FDE augmentation.
842 R Indicates a non-default pointer encoding for
843 FDE code pointers.
844 P Indicates the presence of an encoding + language
845 personality routine in the CIE augmentation. */
846
847 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1, /*global=*/0);
848 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
849 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
850
851 p = augmentation + 1;
852 if (personality)
853 {
854 *p++ = 'P';
855 augmentation_size += 1 + size_of_encoded_value (per_encoding);
856 assemble_external_libcall (personality);
857 }
858 if (any_lsda_needed)
859 {
860 *p++ = 'L';
861 augmentation_size += 1;
862 }
863 if (fde_encoding != DW_EH_PE_absptr)
864 {
865 *p++ = 'R';
866 augmentation_size += 1;
867 }
868 if (p > augmentation + 1)
869 {
870 augmentation[0] = 'z';
871 *p = '\0';
872 }
873
874 /* Ug. Some platforms can't do unaligned dynamic relocations at all. */
875 if (personality && per_encoding == DW_EH_PE_aligned)
876 {
877 int offset = ( 4 /* Length */
878 + 4 /* CIE Id */
879 + 1 /* CIE version */
880 + strlen (augmentation) + 1 /* Augmentation */
881 + size_of_uleb128 (1) /* Code alignment */
882 + size_of_sleb128 (DWARF_CIE_DATA_ALIGNMENT)
883 + 1 /* RA column */
884 + 1 /* Augmentation size */
885 + 1 /* Personality encoding */ );
886 int pad = -offset & (PTR_SIZE - 1);
887
888 augmentation_size += pad;
889
890 /* Augmentations should be small, so there's scarce need to
891 iterate for a solution. Die if we exceed one uleb128 byte. */
892 gcc_assert (size_of_uleb128 (augmentation_size) == 1);
893 }
894 }
895
896 dw2_asm_output_nstring (augmentation, -1, "CIE Augmentation");
897 if (dw_cie_version >= 4)
898 {
899 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "CIE Address Size");
900 dw2_asm_output_data (1, 0, "CIE Segment Size");
901 }
902 dw2_asm_output_data_uleb128 (1, "CIE Code Alignment Factor");
903 dw2_asm_output_data_sleb128 (DWARF_CIE_DATA_ALIGNMENT,
904 "CIE Data Alignment Factor");
905
906 if (dw_cie_version == 1)
907 dw2_asm_output_data (1, return_reg, "CIE RA Column");
908 else
909 dw2_asm_output_data_uleb128 (return_reg, "CIE RA Column");
910
911 if (augmentation[0])
912 {
913 dw2_asm_output_data_uleb128 (augmentation_size, "Augmentation size");
914 if (personality)
915 {
916 dw2_asm_output_data (1, per_encoding, "Personality (%s)",
917 eh_data_format_name (per_encoding));
918 dw2_asm_output_encoded_addr_rtx (per_encoding,
919 personality,
920 true, NULL);
921 }
922
923 if (any_lsda_needed)
924 dw2_asm_output_data (1, lsda_encoding, "LSDA Encoding (%s)",
925 eh_data_format_name (lsda_encoding));
926
927 if (fde_encoding != DW_EH_PE_absptr)
928 dw2_asm_output_data (1, fde_encoding, "FDE Encoding (%s)",
929 eh_data_format_name (fde_encoding));
930 }
931
932 FOR_EACH_VEC_ELT (*cie_cfi_vec, i, cfi)
933 output_cfi (cfi, NULL, for_eh);
934
935 /* Pad the CIE out to an address sized boundary. */
936 ASM_OUTPUT_ALIGN (asm_out_file,
937 floor_log2 (for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE));
938 ASM_OUTPUT_LABEL (asm_out_file, l2);
939
940 /* Loop through all of the FDE's. */
941 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
942 {
943 unsigned int k;
944
945 /* Don't emit EH unwind info for leaf functions that don't need it. */
946 if (for_eh && !fde_needed_for_eh_p (fde))
947 continue;
948
949 for (k = 0; k < (fde->dw_fde_second_begin ? 2 : 1); k++)
950 output_fde (fde, for_eh, k, section_start_label, fde_encoding,
951 augmentation, any_lsda_needed, lsda_encoding);
952 }
953
954 if (for_eh && targetm.terminate_dw2_eh_frame_info)
955 dw2_asm_output_data (4, 0, "End of Table");
956
957 /* Turn off app to make assembly quicker. */
958 if (flag_debug_asm)
959 app_disable ();
960 }
961
962 /* Emit .cfi_startproc and .cfi_personality/.cfi_lsda if needed. */
963
964 static void
965 dwarf2out_do_cfi_startproc (bool second)
966 {
967 int enc;
968 rtx ref;
969
970 fprintf (asm_out_file, "\t.cfi_startproc\n");
971
972 targetm.asm_out.post_cfi_startproc (asm_out_file, current_function_decl);
973
974 /* .cfi_personality and .cfi_lsda are only relevant to DWARF2
975 eh unwinders. */
976 if (targetm_common.except_unwind_info (&global_options) != UI_DWARF2)
977 return;
978
979 rtx personality = get_personality_function (current_function_decl);
980
981 if (personality)
982 {
983 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
984 ref = personality;
985
986 /* ??? The GAS support isn't entirely consistent. We have to
987 handle indirect support ourselves, but PC-relative is done
988 in the assembler. Further, the assembler can't handle any
989 of the weirder relocation types. */
990 if (enc & DW_EH_PE_indirect)
991 ref = dw2_force_const_mem (ref, true);
992
993 fprintf (asm_out_file, "\t.cfi_personality %#x,", enc);
994 output_addr_const (asm_out_file, ref);
995 fputc ('\n', asm_out_file);
996 }
997
998 if (crtl->uses_eh_lsda)
999 {
1000 char lab[MAX_ARTIFICIAL_LABEL_BYTES];
1001
1002 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
1003 ASM_GENERATE_INTERNAL_LABEL (lab, second ? "LLSDAC" : "LLSDA",
1004 current_function_funcdef_no);
1005 ref = gen_rtx_SYMBOL_REF (Pmode, lab);
1006 SYMBOL_REF_FLAGS (ref) = SYMBOL_FLAG_LOCAL;
1007
1008 if (enc & DW_EH_PE_indirect)
1009 ref = dw2_force_const_mem (ref, true);
1010
1011 fprintf (asm_out_file, "\t.cfi_lsda %#x,", enc);
1012 output_addr_const (asm_out_file, ref);
1013 fputc ('\n', asm_out_file);
1014 }
1015 }
1016
1017 /* Allocate CURRENT_FDE. Immediately initialize all we can, noting that
1018 this allocation may be done before pass_final. */
1019
1020 dw_fde_ref
1021 dwarf2out_alloc_current_fde (void)
1022 {
1023 dw_fde_ref fde;
1024
1025 fde = ggc_cleared_alloc<dw_fde_node> ();
1026 fde->decl = current_function_decl;
1027 fde->funcdef_number = current_function_funcdef_no;
1028 fde->fde_index = vec_safe_length (fde_vec);
1029 fde->all_throwers_are_sibcalls = crtl->all_throwers_are_sibcalls;
1030 fde->uses_eh_lsda = crtl->uses_eh_lsda;
1031 fde->nothrow = crtl->nothrow;
1032 fde->drap_reg = INVALID_REGNUM;
1033 fde->vdrap_reg = INVALID_REGNUM;
1034
1035 /* Record the FDE associated with this function. */
1036 cfun->fde = fde;
1037 vec_safe_push (fde_vec, fde);
1038
1039 return fde;
1040 }
1041
1042 /* Output a marker (i.e. a label) for the beginning of a function, before
1043 the prologue. */
1044
1045 void
1046 dwarf2out_begin_prologue (unsigned int line ATTRIBUTE_UNUSED,
1047 unsigned int column ATTRIBUTE_UNUSED,
1048 const char *file ATTRIBUTE_UNUSED)
1049 {
1050 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1051 char * dup_label;
1052 dw_fde_ref fde;
1053 section *fnsec;
1054 bool do_frame;
1055
1056 current_function_func_begin_label = NULL;
1057
1058 do_frame = dwarf2out_do_frame ();
1059
1060 /* ??? current_function_func_begin_label is also used by except.c for
1061 call-site information. We must emit this label if it might be used. */
1062 if (!do_frame
1063 && (!flag_exceptions
1064 || targetm_common.except_unwind_info (&global_options) == UI_SJLJ))
1065 return;
1066
1067 fnsec = function_section (current_function_decl);
1068 switch_to_section (fnsec);
1069 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_BEGIN_LABEL,
1070 current_function_funcdef_no);
1071 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, FUNC_BEGIN_LABEL,
1072 current_function_funcdef_no);
1073 dup_label = xstrdup (label);
1074 current_function_func_begin_label = dup_label;
1075
1076 /* We can elide FDE allocation if we're not emitting frame unwind info. */
1077 if (!do_frame)
1078 return;
1079
1080 /* Unlike the debug version, the EH version of frame unwind info is a per-
1081 function setting so we need to record whether we need it for the unit. */
1082 do_eh_frame |= dwarf2out_do_eh_frame ();
1083
1084 /* Cater to the various TARGET_ASM_OUTPUT_MI_THUNK implementations that
1085 emit insns as rtx but bypass the bulk of rest_of_compilation, which
1086 would include pass_dwarf2_frame. If we've not created the FDE yet,
1087 do so now. */
1088 fde = cfun->fde;
1089 if (fde == NULL)
1090 fde = dwarf2out_alloc_current_fde ();
1091
1092 /* Initialize the bits of CURRENT_FDE that were not available earlier. */
1093 fde->dw_fde_begin = dup_label;
1094 fde->dw_fde_current_label = dup_label;
1095 fde->in_std_section = (fnsec == text_section
1096 || (cold_text_section && fnsec == cold_text_section));
1097
1098 /* We only want to output line number information for the genuine dwarf2
1099 prologue case, not the eh frame case. */
1100 #ifdef DWARF2_DEBUGGING_INFO
1101 if (file)
1102 dwarf2out_source_line (line, column, file, 0, true);
1103 #endif
1104
1105 if (dwarf2out_do_cfi_asm ())
1106 dwarf2out_do_cfi_startproc (false);
1107 else
1108 {
1109 rtx personality = get_personality_function (current_function_decl);
1110 if (!current_unit_personality)
1111 current_unit_personality = personality;
1112
1113 /* We cannot keep a current personality per function as without CFI
1114 asm, at the point where we emit the CFI data, there is no current
1115 function anymore. */
1116 if (personality && current_unit_personality != personality)
1117 sorry ("multiple EH personalities are supported only with assemblers "
1118 "supporting %<.cfi_personality%> directive");
1119 }
1120 }
1121
1122 /* Output a marker (i.e. a label) for the end of the generated code
1123 for a function prologue. This gets called *after* the prologue code has
1124 been generated. */
1125
1126 void
1127 dwarf2out_vms_end_prologue (unsigned int line ATTRIBUTE_UNUSED,
1128 const char *file ATTRIBUTE_UNUSED)
1129 {
1130 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1131
1132 /* Output a label to mark the endpoint of the code generated for this
1133 function. */
1134 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
1135 current_function_funcdef_no);
1136 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, PROLOGUE_END_LABEL,
1137 current_function_funcdef_no);
1138 cfun->fde->dw_fde_vms_end_prologue = xstrdup (label);
1139 }
1140
1141 /* Output a marker (i.e. a label) for the beginning of the generated code
1142 for a function epilogue. This gets called *before* the prologue code has
1143 been generated. */
1144
1145 void
1146 dwarf2out_vms_begin_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1147 const char *file ATTRIBUTE_UNUSED)
1148 {
1149 dw_fde_ref fde = cfun->fde;
1150 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1151
1152 if (fde->dw_fde_vms_begin_epilogue)
1153 return;
1154
1155 /* Output a label to mark the endpoint of the code generated for this
1156 function. */
1157 ASM_GENERATE_INTERNAL_LABEL (label, EPILOGUE_BEGIN_LABEL,
1158 current_function_funcdef_no);
1159 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, EPILOGUE_BEGIN_LABEL,
1160 current_function_funcdef_no);
1161 fde->dw_fde_vms_begin_epilogue = xstrdup (label);
1162 }
1163
1164 /* Output a marker (i.e. a label) for the absolute end of the generated code
1165 for a function definition. This gets called *after* the epilogue code has
1166 been generated. */
1167
1168 void
1169 dwarf2out_end_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1170 const char *file ATTRIBUTE_UNUSED)
1171 {
1172 dw_fde_ref fde;
1173 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1174
1175 last_var_location_insn = NULL;
1176 cached_next_real_insn = NULL;
1177
1178 if (dwarf2out_do_cfi_asm ())
1179 fprintf (asm_out_file, "\t.cfi_endproc\n");
1180
1181 /* Output a label to mark the endpoint of the code generated for this
1182 function. */
1183 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
1184 current_function_funcdef_no);
1185 ASM_OUTPUT_LABEL (asm_out_file, label);
1186 fde = cfun->fde;
1187 gcc_assert (fde != NULL);
1188 if (fde->dw_fde_second_begin == NULL)
1189 fde->dw_fde_end = xstrdup (label);
1190 }
1191
1192 void
1193 dwarf2out_frame_finish (void)
1194 {
1195 /* Output call frame information. */
1196 if (targetm.debug_unwind_info () == UI_DWARF2)
1197 output_call_frame_info (0);
1198
1199 /* Output another copy for the unwinder. */
1200 if (do_eh_frame)
1201 output_call_frame_info (1);
1202 }
1203
1204 /* Note that the current function section is being used for code. */
1205
1206 static void
1207 dwarf2out_note_section_used (void)
1208 {
1209 section *sec = current_function_section ();
1210 if (sec == text_section)
1211 text_section_used = true;
1212 else if (sec == cold_text_section)
1213 cold_text_section_used = true;
1214 }
1215
1216 static void var_location_switch_text_section (void);
1217 static void set_cur_line_info_table (section *);
1218
1219 void
1220 dwarf2out_switch_text_section (void)
1221 {
1222 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1223 section *sect;
1224 dw_fde_ref fde = cfun->fde;
1225
1226 gcc_assert (cfun && fde && fde->dw_fde_second_begin == NULL);
1227
1228 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_SECOND_SECT_LABEL,
1229 current_function_funcdef_no);
1230
1231 fde->dw_fde_second_begin = ggc_strdup (label);
1232 if (!in_cold_section_p)
1233 {
1234 fde->dw_fde_end = crtl->subsections.cold_section_end_label;
1235 fde->dw_fde_second_end = crtl->subsections.hot_section_end_label;
1236 }
1237 else
1238 {
1239 fde->dw_fde_end = crtl->subsections.hot_section_end_label;
1240 fde->dw_fde_second_end = crtl->subsections.cold_section_end_label;
1241 }
1242 have_multiple_function_sections = true;
1243
1244 /* There is no need to mark used sections when not debugging. */
1245 if (cold_text_section != NULL)
1246 dwarf2out_note_section_used ();
1247
1248 if (dwarf2out_do_cfi_asm ())
1249 fprintf (asm_out_file, "\t.cfi_endproc\n");
1250
1251 /* Now do the real section switch. */
1252 sect = current_function_section ();
1253 switch_to_section (sect);
1254
1255 fde->second_in_std_section
1256 = (sect == text_section
1257 || (cold_text_section && sect == cold_text_section));
1258
1259 if (dwarf2out_do_cfi_asm ())
1260 dwarf2out_do_cfi_startproc (true);
1261
1262 var_location_switch_text_section ();
1263
1264 if (cold_text_section != NULL)
1265 set_cur_line_info_table (sect);
1266 }
1267 \f
1268 /* And now, the subset of the debugging information support code necessary
1269 for emitting location expressions. */
1270
1271 /* Data about a single source file. */
1272 struct GTY((for_user)) dwarf_file_data {
1273 const char * filename;
1274 int emitted_number;
1275 };
1276
1277 /* Describe an entry into the .debug_addr section. */
1278
1279 enum ate_kind {
1280 ate_kind_rtx,
1281 ate_kind_rtx_dtprel,
1282 ate_kind_label
1283 };
1284
1285 struct GTY((for_user)) addr_table_entry {
1286 enum ate_kind kind;
1287 unsigned int refcount;
1288 unsigned int index;
1289 union addr_table_entry_struct_union
1290 {
1291 rtx GTY ((tag ("0"))) rtl;
1292 char * GTY ((tag ("1"))) label;
1293 }
1294 GTY ((desc ("%1.kind"))) addr;
1295 };
1296
1297 typedef unsigned int var_loc_view;
1298
1299 /* Location lists are ranges + location descriptions for that range,
1300 so you can track variables that are in different places over
1301 their entire life. */
1302 typedef struct GTY(()) dw_loc_list_struct {
1303 dw_loc_list_ref dw_loc_next;
1304 const char *begin; /* Label and addr_entry for start of range */
1305 addr_table_entry *begin_entry;
1306 const char *end; /* Label for end of range */
1307 char *ll_symbol; /* Label for beginning of location list.
1308 Only on head of list. */
1309 char *vl_symbol; /* Label for beginning of view list. Ditto. */
1310 const char *section; /* Section this loclist is relative to */
1311 dw_loc_descr_ref expr;
1312 var_loc_view vbegin, vend;
1313 hashval_t hash;
1314 /* True if all addresses in this and subsequent lists are known to be
1315 resolved. */
1316 bool resolved_addr;
1317 /* True if this list has been replaced by dw_loc_next. */
1318 bool replaced;
1319 /* True if it has been emitted into .debug_loc* / .debug_loclists*
1320 section. */
1321 unsigned char emitted : 1;
1322 /* True if hash field is index rather than hash value. */
1323 unsigned char num_assigned : 1;
1324 /* True if .debug_loclists.dwo offset has been emitted for it already. */
1325 unsigned char offset_emitted : 1;
1326 /* True if note_variable_value_in_expr has been called on it. */
1327 unsigned char noted_variable_value : 1;
1328 /* True if the range should be emitted even if begin and end
1329 are the same. */
1330 bool force;
1331 } dw_loc_list_node;
1332
1333 static dw_loc_descr_ref int_loc_descriptor (poly_int64);
1334 static dw_loc_descr_ref uint_loc_descriptor (unsigned HOST_WIDE_INT);
1335
1336 /* Convert a DWARF stack opcode into its string name. */
1337
1338 static const char *
1339 dwarf_stack_op_name (unsigned int op)
1340 {
1341 const char *name = get_DW_OP_name (op);
1342
1343 if (name != NULL)
1344 return name;
1345
1346 return "OP_<unknown>";
1347 }
1348
1349 /* Return TRUE iff we're to output location view lists as a separate
1350 attribute next to the location lists, as an extension compatible
1351 with DWARF 2 and above. */
1352
1353 static inline bool
1354 dwarf2out_locviews_in_attribute ()
1355 {
1356 return debug_variable_location_views == 1;
1357 }
1358
1359 /* Return TRUE iff we're to output location view lists as part of the
1360 location lists, as proposed for standardization after DWARF 5. */
1361
1362 static inline bool
1363 dwarf2out_locviews_in_loclist ()
1364 {
1365 #ifndef DW_LLE_view_pair
1366 return false;
1367 #else
1368 return debug_variable_location_views == -1;
1369 #endif
1370 }
1371
1372 /* Return a pointer to a newly allocated location description. Location
1373 descriptions are simple expression terms that can be strung
1374 together to form more complicated location (address) descriptions. */
1375
1376 static inline dw_loc_descr_ref
1377 new_loc_descr (enum dwarf_location_atom op, unsigned HOST_WIDE_INT oprnd1,
1378 unsigned HOST_WIDE_INT oprnd2)
1379 {
1380 dw_loc_descr_ref descr = ggc_cleared_alloc<dw_loc_descr_node> ();
1381
1382 descr->dw_loc_opc = op;
1383 descr->dw_loc_oprnd1.val_class = dw_val_class_unsigned_const;
1384 descr->dw_loc_oprnd1.val_entry = NULL;
1385 descr->dw_loc_oprnd1.v.val_unsigned = oprnd1;
1386 descr->dw_loc_oprnd2.val_class = dw_val_class_unsigned_const;
1387 descr->dw_loc_oprnd2.val_entry = NULL;
1388 descr->dw_loc_oprnd2.v.val_unsigned = oprnd2;
1389
1390 return descr;
1391 }
1392
1393 /* Add a location description term to a location description expression. */
1394
1395 static inline void
1396 add_loc_descr (dw_loc_descr_ref *list_head, dw_loc_descr_ref descr)
1397 {
1398 dw_loc_descr_ref *d;
1399
1400 /* Find the end of the chain. */
1401 for (d = list_head; (*d) != NULL; d = &(*d)->dw_loc_next)
1402 ;
1403
1404 *d = descr;
1405 }
1406
1407 /* Compare two location operands for exact equality. */
1408
1409 static bool
1410 dw_val_equal_p (dw_val_node *a, dw_val_node *b)
1411 {
1412 if (a->val_class != b->val_class)
1413 return false;
1414 switch (a->val_class)
1415 {
1416 case dw_val_class_none:
1417 return true;
1418 case dw_val_class_addr:
1419 return rtx_equal_p (a->v.val_addr, b->v.val_addr);
1420
1421 case dw_val_class_offset:
1422 case dw_val_class_unsigned_const:
1423 case dw_val_class_const:
1424 case dw_val_class_unsigned_const_implicit:
1425 case dw_val_class_const_implicit:
1426 case dw_val_class_range_list:
1427 /* These are all HOST_WIDE_INT, signed or unsigned. */
1428 return a->v.val_unsigned == b->v.val_unsigned;
1429
1430 case dw_val_class_loc:
1431 return a->v.val_loc == b->v.val_loc;
1432 case dw_val_class_loc_list:
1433 return a->v.val_loc_list == b->v.val_loc_list;
1434 case dw_val_class_view_list:
1435 return a->v.val_view_list == b->v.val_view_list;
1436 case dw_val_class_die_ref:
1437 return a->v.val_die_ref.die == b->v.val_die_ref.die;
1438 case dw_val_class_fde_ref:
1439 return a->v.val_fde_index == b->v.val_fde_index;
1440 case dw_val_class_symview:
1441 return strcmp (a->v.val_symbolic_view, b->v.val_symbolic_view) == 0;
1442 case dw_val_class_lbl_id:
1443 case dw_val_class_lineptr:
1444 case dw_val_class_macptr:
1445 case dw_val_class_loclistsptr:
1446 case dw_val_class_high_pc:
1447 return strcmp (a->v.val_lbl_id, b->v.val_lbl_id) == 0;
1448 case dw_val_class_str:
1449 return a->v.val_str == b->v.val_str;
1450 case dw_val_class_flag:
1451 return a->v.val_flag == b->v.val_flag;
1452 case dw_val_class_file:
1453 case dw_val_class_file_implicit:
1454 return a->v.val_file == b->v.val_file;
1455 case dw_val_class_decl_ref:
1456 return a->v.val_decl_ref == b->v.val_decl_ref;
1457
1458 case dw_val_class_const_double:
1459 return (a->v.val_double.high == b->v.val_double.high
1460 && a->v.val_double.low == b->v.val_double.low);
1461
1462 case dw_val_class_wide_int:
1463 return *a->v.val_wide == *b->v.val_wide;
1464
1465 case dw_val_class_vec:
1466 {
1467 size_t a_len = a->v.val_vec.elt_size * a->v.val_vec.length;
1468 size_t b_len = b->v.val_vec.elt_size * b->v.val_vec.length;
1469
1470 return (a_len == b_len
1471 && !memcmp (a->v.val_vec.array, b->v.val_vec.array, a_len));
1472 }
1473
1474 case dw_val_class_data8:
1475 return memcmp (a->v.val_data8, b->v.val_data8, 8) == 0;
1476
1477 case dw_val_class_vms_delta:
1478 return (!strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1)
1479 && !strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1));
1480
1481 case dw_val_class_discr_value:
1482 return (a->v.val_discr_value.pos == b->v.val_discr_value.pos
1483 && a->v.val_discr_value.v.uval == b->v.val_discr_value.v.uval);
1484 case dw_val_class_discr_list:
1485 /* It makes no sense comparing two discriminant value lists. */
1486 return false;
1487 }
1488 gcc_unreachable ();
1489 }
1490
1491 /* Compare two location atoms for exact equality. */
1492
1493 static bool
1494 loc_descr_equal_p_1 (dw_loc_descr_ref a, dw_loc_descr_ref b)
1495 {
1496 if (a->dw_loc_opc != b->dw_loc_opc)
1497 return false;
1498
1499 /* ??? This is only ever set for DW_OP_constNu, for N equal to the
1500 address size, but since we always allocate cleared storage it
1501 should be zero for other types of locations. */
1502 if (a->dtprel != b->dtprel)
1503 return false;
1504
1505 return (dw_val_equal_p (&a->dw_loc_oprnd1, &b->dw_loc_oprnd1)
1506 && dw_val_equal_p (&a->dw_loc_oprnd2, &b->dw_loc_oprnd2));
1507 }
1508
1509 /* Compare two complete location expressions for exact equality. */
1510
1511 bool
1512 loc_descr_equal_p (dw_loc_descr_ref a, dw_loc_descr_ref b)
1513 {
1514 while (1)
1515 {
1516 if (a == b)
1517 return true;
1518 if (a == NULL || b == NULL)
1519 return false;
1520 if (!loc_descr_equal_p_1 (a, b))
1521 return false;
1522
1523 a = a->dw_loc_next;
1524 b = b->dw_loc_next;
1525 }
1526 }
1527
1528
1529 /* Add a constant POLY_OFFSET to a location expression. */
1530
1531 static void
1532 loc_descr_plus_const (dw_loc_descr_ref *list_head, poly_int64 poly_offset)
1533 {
1534 dw_loc_descr_ref loc;
1535 HOST_WIDE_INT *p;
1536
1537 gcc_assert (*list_head != NULL);
1538
1539 if (known_eq (poly_offset, 0))
1540 return;
1541
1542 /* Find the end of the chain. */
1543 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
1544 ;
1545
1546 HOST_WIDE_INT offset;
1547 if (!poly_offset.is_constant (&offset))
1548 {
1549 loc->dw_loc_next = int_loc_descriptor (poly_offset);
1550 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_plus, 0, 0));
1551 return;
1552 }
1553
1554 p = NULL;
1555 if (loc->dw_loc_opc == DW_OP_fbreg
1556 || (loc->dw_loc_opc >= DW_OP_breg0 && loc->dw_loc_opc <= DW_OP_breg31))
1557 p = &loc->dw_loc_oprnd1.v.val_int;
1558 else if (loc->dw_loc_opc == DW_OP_bregx)
1559 p = &loc->dw_loc_oprnd2.v.val_int;
1560
1561 /* If the last operation is fbreg, breg{0..31,x}, optimize by adjusting its
1562 offset. Don't optimize if an signed integer overflow would happen. */
1563 if (p != NULL
1564 && ((offset > 0 && *p <= INTTYPE_MAXIMUM (HOST_WIDE_INT) - offset)
1565 || (offset < 0 && *p >= INTTYPE_MINIMUM (HOST_WIDE_INT) - offset)))
1566 *p += offset;
1567
1568 else if (offset > 0)
1569 loc->dw_loc_next = new_loc_descr (DW_OP_plus_uconst, offset, 0);
1570
1571 else
1572 {
1573 loc->dw_loc_next
1574 = uint_loc_descriptor (-(unsigned HOST_WIDE_INT) offset);
1575 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_minus, 0, 0));
1576 }
1577 }
1578
1579 /* Return a pointer to a newly allocated location description for
1580 REG and OFFSET. */
1581
1582 static inline dw_loc_descr_ref
1583 new_reg_loc_descr (unsigned int reg, poly_int64 offset)
1584 {
1585 HOST_WIDE_INT const_offset;
1586 if (offset.is_constant (&const_offset))
1587 {
1588 if (reg <= 31)
1589 return new_loc_descr ((enum dwarf_location_atom) (DW_OP_breg0 + reg),
1590 const_offset, 0);
1591 else
1592 return new_loc_descr (DW_OP_bregx, reg, const_offset);
1593 }
1594 else
1595 {
1596 dw_loc_descr_ref ret = new_reg_loc_descr (reg, 0);
1597 loc_descr_plus_const (&ret, offset);
1598 return ret;
1599 }
1600 }
1601
1602 /* Add a constant OFFSET to a location list. */
1603
1604 static void
1605 loc_list_plus_const (dw_loc_list_ref list_head, poly_int64 offset)
1606 {
1607 dw_loc_list_ref d;
1608 for (d = list_head; d != NULL; d = d->dw_loc_next)
1609 loc_descr_plus_const (&d->expr, offset);
1610 }
1611
1612 #define DWARF_REF_SIZE \
1613 (dwarf_version == 2 ? DWARF2_ADDR_SIZE : DWARF_OFFSET_SIZE)
1614
1615 /* The number of bits that can be encoded by largest DW_FORM_dataN.
1616 In DWARF4 and earlier it is DW_FORM_data8 with 64 bits, in DWARF5
1617 DW_FORM_data16 with 128 bits. */
1618 #define DWARF_LARGEST_DATA_FORM_BITS \
1619 (dwarf_version >= 5 ? 128 : 64)
1620
1621 /* Utility inline function for construction of ops that were GNU extension
1622 before DWARF 5. */
1623 static inline enum dwarf_location_atom
1624 dwarf_OP (enum dwarf_location_atom op)
1625 {
1626 switch (op)
1627 {
1628 case DW_OP_implicit_pointer:
1629 if (dwarf_version < 5)
1630 return DW_OP_GNU_implicit_pointer;
1631 break;
1632
1633 case DW_OP_entry_value:
1634 if (dwarf_version < 5)
1635 return DW_OP_GNU_entry_value;
1636 break;
1637
1638 case DW_OP_const_type:
1639 if (dwarf_version < 5)
1640 return DW_OP_GNU_const_type;
1641 break;
1642
1643 case DW_OP_regval_type:
1644 if (dwarf_version < 5)
1645 return DW_OP_GNU_regval_type;
1646 break;
1647
1648 case DW_OP_deref_type:
1649 if (dwarf_version < 5)
1650 return DW_OP_GNU_deref_type;
1651 break;
1652
1653 case DW_OP_convert:
1654 if (dwarf_version < 5)
1655 return DW_OP_GNU_convert;
1656 break;
1657
1658 case DW_OP_reinterpret:
1659 if (dwarf_version < 5)
1660 return DW_OP_GNU_reinterpret;
1661 break;
1662
1663 case DW_OP_addrx:
1664 if (dwarf_version < 5)
1665 return DW_OP_GNU_addr_index;
1666 break;
1667
1668 case DW_OP_constx:
1669 if (dwarf_version < 5)
1670 return DW_OP_GNU_const_index;
1671 break;
1672
1673 default:
1674 break;
1675 }
1676 return op;
1677 }
1678
1679 /* Similarly for attributes. */
1680 static inline enum dwarf_attribute
1681 dwarf_AT (enum dwarf_attribute at)
1682 {
1683 switch (at)
1684 {
1685 case DW_AT_call_return_pc:
1686 if (dwarf_version < 5)
1687 return DW_AT_low_pc;
1688 break;
1689
1690 case DW_AT_call_tail_call:
1691 if (dwarf_version < 5)
1692 return DW_AT_GNU_tail_call;
1693 break;
1694
1695 case DW_AT_call_origin:
1696 if (dwarf_version < 5)
1697 return DW_AT_abstract_origin;
1698 break;
1699
1700 case DW_AT_call_target:
1701 if (dwarf_version < 5)
1702 return DW_AT_GNU_call_site_target;
1703 break;
1704
1705 case DW_AT_call_target_clobbered:
1706 if (dwarf_version < 5)
1707 return DW_AT_GNU_call_site_target_clobbered;
1708 break;
1709
1710 case DW_AT_call_parameter:
1711 if (dwarf_version < 5)
1712 return DW_AT_abstract_origin;
1713 break;
1714
1715 case DW_AT_call_value:
1716 if (dwarf_version < 5)
1717 return DW_AT_GNU_call_site_value;
1718 break;
1719
1720 case DW_AT_call_data_value:
1721 if (dwarf_version < 5)
1722 return DW_AT_GNU_call_site_data_value;
1723 break;
1724
1725 case DW_AT_call_all_calls:
1726 if (dwarf_version < 5)
1727 return DW_AT_GNU_all_call_sites;
1728 break;
1729
1730 case DW_AT_call_all_tail_calls:
1731 if (dwarf_version < 5)
1732 return DW_AT_GNU_all_tail_call_sites;
1733 break;
1734
1735 case DW_AT_dwo_name:
1736 if (dwarf_version < 5)
1737 return DW_AT_GNU_dwo_name;
1738 break;
1739
1740 case DW_AT_addr_base:
1741 if (dwarf_version < 5)
1742 return DW_AT_GNU_addr_base;
1743 break;
1744
1745 default:
1746 break;
1747 }
1748 return at;
1749 }
1750
1751 /* And similarly for tags. */
1752 static inline enum dwarf_tag
1753 dwarf_TAG (enum dwarf_tag tag)
1754 {
1755 switch (tag)
1756 {
1757 case DW_TAG_call_site:
1758 if (dwarf_version < 5)
1759 return DW_TAG_GNU_call_site;
1760 break;
1761
1762 case DW_TAG_call_site_parameter:
1763 if (dwarf_version < 5)
1764 return DW_TAG_GNU_call_site_parameter;
1765 break;
1766
1767 default:
1768 break;
1769 }
1770 return tag;
1771 }
1772
1773 /* And similarly for forms. */
1774 static inline enum dwarf_form
1775 dwarf_FORM (enum dwarf_form form)
1776 {
1777 switch (form)
1778 {
1779 case DW_FORM_addrx:
1780 if (dwarf_version < 5)
1781 return DW_FORM_GNU_addr_index;
1782 break;
1783
1784 case DW_FORM_strx:
1785 if (dwarf_version < 5)
1786 return DW_FORM_GNU_str_index;
1787 break;
1788
1789 default:
1790 break;
1791 }
1792 return form;
1793 }
1794
1795 static unsigned long int get_base_type_offset (dw_die_ref);
1796
1797 /* Return the size of a location descriptor. */
1798
1799 static unsigned long
1800 size_of_loc_descr (dw_loc_descr_ref loc)
1801 {
1802 unsigned long size = 1;
1803
1804 switch (loc->dw_loc_opc)
1805 {
1806 case DW_OP_addr:
1807 size += DWARF2_ADDR_SIZE;
1808 break;
1809 case DW_OP_GNU_addr_index:
1810 case DW_OP_addrx:
1811 case DW_OP_GNU_const_index:
1812 case DW_OP_constx:
1813 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
1814 size += size_of_uleb128 (loc->dw_loc_oprnd1.val_entry->index);
1815 break;
1816 case DW_OP_const1u:
1817 case DW_OP_const1s:
1818 size += 1;
1819 break;
1820 case DW_OP_const2u:
1821 case DW_OP_const2s:
1822 size += 2;
1823 break;
1824 case DW_OP_const4u:
1825 case DW_OP_const4s:
1826 size += 4;
1827 break;
1828 case DW_OP_const8u:
1829 case DW_OP_const8s:
1830 size += 8;
1831 break;
1832 case DW_OP_constu:
1833 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1834 break;
1835 case DW_OP_consts:
1836 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1837 break;
1838 case DW_OP_pick:
1839 size += 1;
1840 break;
1841 case DW_OP_plus_uconst:
1842 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1843 break;
1844 case DW_OP_skip:
1845 case DW_OP_bra:
1846 size += 2;
1847 break;
1848 case DW_OP_breg0:
1849 case DW_OP_breg1:
1850 case DW_OP_breg2:
1851 case DW_OP_breg3:
1852 case DW_OP_breg4:
1853 case DW_OP_breg5:
1854 case DW_OP_breg6:
1855 case DW_OP_breg7:
1856 case DW_OP_breg8:
1857 case DW_OP_breg9:
1858 case DW_OP_breg10:
1859 case DW_OP_breg11:
1860 case DW_OP_breg12:
1861 case DW_OP_breg13:
1862 case DW_OP_breg14:
1863 case DW_OP_breg15:
1864 case DW_OP_breg16:
1865 case DW_OP_breg17:
1866 case DW_OP_breg18:
1867 case DW_OP_breg19:
1868 case DW_OP_breg20:
1869 case DW_OP_breg21:
1870 case DW_OP_breg22:
1871 case DW_OP_breg23:
1872 case DW_OP_breg24:
1873 case DW_OP_breg25:
1874 case DW_OP_breg26:
1875 case DW_OP_breg27:
1876 case DW_OP_breg28:
1877 case DW_OP_breg29:
1878 case DW_OP_breg30:
1879 case DW_OP_breg31:
1880 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1881 break;
1882 case DW_OP_regx:
1883 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1884 break;
1885 case DW_OP_fbreg:
1886 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1887 break;
1888 case DW_OP_bregx:
1889 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1890 size += size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1891 break;
1892 case DW_OP_piece:
1893 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1894 break;
1895 case DW_OP_bit_piece:
1896 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1897 size += size_of_uleb128 (loc->dw_loc_oprnd2.v.val_unsigned);
1898 break;
1899 case DW_OP_deref_size:
1900 case DW_OP_xderef_size:
1901 size += 1;
1902 break;
1903 case DW_OP_call2:
1904 size += 2;
1905 break;
1906 case DW_OP_call4:
1907 size += 4;
1908 break;
1909 case DW_OP_call_ref:
1910 case DW_OP_GNU_variable_value:
1911 size += DWARF_REF_SIZE;
1912 break;
1913 case DW_OP_implicit_value:
1914 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1915 + loc->dw_loc_oprnd1.v.val_unsigned;
1916 break;
1917 case DW_OP_implicit_pointer:
1918 case DW_OP_GNU_implicit_pointer:
1919 size += DWARF_REF_SIZE + size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1920 break;
1921 case DW_OP_entry_value:
1922 case DW_OP_GNU_entry_value:
1923 {
1924 unsigned long op_size = size_of_locs (loc->dw_loc_oprnd1.v.val_loc);
1925 size += size_of_uleb128 (op_size) + op_size;
1926 break;
1927 }
1928 case DW_OP_const_type:
1929 case DW_OP_GNU_const_type:
1930 {
1931 unsigned long o
1932 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1933 size += size_of_uleb128 (o) + 1;
1934 switch (loc->dw_loc_oprnd2.val_class)
1935 {
1936 case dw_val_class_vec:
1937 size += loc->dw_loc_oprnd2.v.val_vec.length
1938 * loc->dw_loc_oprnd2.v.val_vec.elt_size;
1939 break;
1940 case dw_val_class_const:
1941 size += HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT;
1942 break;
1943 case dw_val_class_const_double:
1944 size += HOST_BITS_PER_DOUBLE_INT / BITS_PER_UNIT;
1945 break;
1946 case dw_val_class_wide_int:
1947 size += (get_full_len (*loc->dw_loc_oprnd2.v.val_wide)
1948 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
1949 break;
1950 default:
1951 gcc_unreachable ();
1952 }
1953 break;
1954 }
1955 case DW_OP_regval_type:
1956 case DW_OP_GNU_regval_type:
1957 {
1958 unsigned long o
1959 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1960 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1961 + size_of_uleb128 (o);
1962 }
1963 break;
1964 case DW_OP_deref_type:
1965 case DW_OP_GNU_deref_type:
1966 {
1967 unsigned long o
1968 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1969 size += 1 + size_of_uleb128 (o);
1970 }
1971 break;
1972 case DW_OP_convert:
1973 case DW_OP_reinterpret:
1974 case DW_OP_GNU_convert:
1975 case DW_OP_GNU_reinterpret:
1976 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
1977 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1978 else
1979 {
1980 unsigned long o
1981 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1982 size += size_of_uleb128 (o);
1983 }
1984 break;
1985 case DW_OP_GNU_parameter_ref:
1986 size += 4;
1987 break;
1988 default:
1989 break;
1990 }
1991
1992 return size;
1993 }
1994
1995 /* Return the size of a series of location descriptors. */
1996
1997 unsigned long
1998 size_of_locs (dw_loc_descr_ref loc)
1999 {
2000 dw_loc_descr_ref l;
2001 unsigned long size;
2002
2003 /* If there are no skip or bra opcodes, don't fill in the dw_loc_addr
2004 field, to avoid writing to a PCH file. */
2005 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
2006 {
2007 if (l->dw_loc_opc == DW_OP_skip || l->dw_loc_opc == DW_OP_bra)
2008 break;
2009 size += size_of_loc_descr (l);
2010 }
2011 if (! l)
2012 return size;
2013
2014 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
2015 {
2016 l->dw_loc_addr = size;
2017 size += size_of_loc_descr (l);
2018 }
2019
2020 return size;
2021 }
2022
2023 /* Return the size of the value in a DW_AT_discr_value attribute. */
2024
2025 static int
2026 size_of_discr_value (dw_discr_value *discr_value)
2027 {
2028 if (discr_value->pos)
2029 return size_of_uleb128 (discr_value->v.uval);
2030 else
2031 return size_of_sleb128 (discr_value->v.sval);
2032 }
2033
2034 /* Return the size of the value in a DW_AT_discr_list attribute. */
2035
2036 static int
2037 size_of_discr_list (dw_discr_list_ref discr_list)
2038 {
2039 int size = 0;
2040
2041 for (dw_discr_list_ref list = discr_list;
2042 list != NULL;
2043 list = list->dw_discr_next)
2044 {
2045 /* One byte for the discriminant value descriptor, and then one or two
2046 LEB128 numbers, depending on whether it's a single case label or a
2047 range label. */
2048 size += 1;
2049 size += size_of_discr_value (&list->dw_discr_lower_bound);
2050 if (list->dw_discr_range != 0)
2051 size += size_of_discr_value (&list->dw_discr_upper_bound);
2052 }
2053 return size;
2054 }
2055
2056 static HOST_WIDE_INT extract_int (const unsigned char *, unsigned);
2057 static void get_ref_die_offset_label (char *, dw_die_ref);
2058 static unsigned long int get_ref_die_offset (dw_die_ref);
2059
2060 /* Output location description stack opcode's operands (if any).
2061 The for_eh_or_skip parameter controls whether register numbers are
2062 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2063 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2064 info). This should be suppressed for the cases that have not been converted
2065 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2066
2067 static void
2068 output_loc_operands (dw_loc_descr_ref loc, int for_eh_or_skip)
2069 {
2070 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2071 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2072
2073 switch (loc->dw_loc_opc)
2074 {
2075 #ifdef DWARF2_DEBUGGING_INFO
2076 case DW_OP_const2u:
2077 case DW_OP_const2s:
2078 dw2_asm_output_data (2, val1->v.val_int, NULL);
2079 break;
2080 case DW_OP_const4u:
2081 if (loc->dtprel)
2082 {
2083 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2084 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 4,
2085 val1->v.val_addr);
2086 fputc ('\n', asm_out_file);
2087 break;
2088 }
2089 /* FALLTHRU */
2090 case DW_OP_const4s:
2091 dw2_asm_output_data (4, val1->v.val_int, NULL);
2092 break;
2093 case DW_OP_const8u:
2094 if (loc->dtprel)
2095 {
2096 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2097 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 8,
2098 val1->v.val_addr);
2099 fputc ('\n', asm_out_file);
2100 break;
2101 }
2102 /* FALLTHRU */
2103 case DW_OP_const8s:
2104 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2105 dw2_asm_output_data (8, val1->v.val_int, NULL);
2106 break;
2107 case DW_OP_skip:
2108 case DW_OP_bra:
2109 {
2110 int offset;
2111
2112 gcc_assert (val1->val_class == dw_val_class_loc);
2113 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2114
2115 dw2_asm_output_data (2, offset, NULL);
2116 }
2117 break;
2118 case DW_OP_implicit_value:
2119 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2120 switch (val2->val_class)
2121 {
2122 case dw_val_class_const:
2123 dw2_asm_output_data (val1->v.val_unsigned, val2->v.val_int, NULL);
2124 break;
2125 case dw_val_class_vec:
2126 {
2127 unsigned int elt_size = val2->v.val_vec.elt_size;
2128 unsigned int len = val2->v.val_vec.length;
2129 unsigned int i;
2130 unsigned char *p;
2131
2132 if (elt_size > sizeof (HOST_WIDE_INT))
2133 {
2134 elt_size /= 2;
2135 len *= 2;
2136 }
2137 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2138 i < len;
2139 i++, p += elt_size)
2140 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2141 "fp or vector constant word %u", i);
2142 }
2143 break;
2144 case dw_val_class_const_double:
2145 {
2146 unsigned HOST_WIDE_INT first, second;
2147
2148 if (WORDS_BIG_ENDIAN)
2149 {
2150 first = val2->v.val_double.high;
2151 second = val2->v.val_double.low;
2152 }
2153 else
2154 {
2155 first = val2->v.val_double.low;
2156 second = val2->v.val_double.high;
2157 }
2158 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2159 first, NULL);
2160 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2161 second, NULL);
2162 }
2163 break;
2164 case dw_val_class_wide_int:
2165 {
2166 int i;
2167 int len = get_full_len (*val2->v.val_wide);
2168 if (WORDS_BIG_ENDIAN)
2169 for (i = len - 1; i >= 0; --i)
2170 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2171 val2->v.val_wide->elt (i), NULL);
2172 else
2173 for (i = 0; i < len; ++i)
2174 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2175 val2->v.val_wide->elt (i), NULL);
2176 }
2177 break;
2178 case dw_val_class_addr:
2179 gcc_assert (val1->v.val_unsigned == DWARF2_ADDR_SIZE);
2180 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val2->v.val_addr, NULL);
2181 break;
2182 default:
2183 gcc_unreachable ();
2184 }
2185 break;
2186 #else
2187 case DW_OP_const2u:
2188 case DW_OP_const2s:
2189 case DW_OP_const4u:
2190 case DW_OP_const4s:
2191 case DW_OP_const8u:
2192 case DW_OP_const8s:
2193 case DW_OP_skip:
2194 case DW_OP_bra:
2195 case DW_OP_implicit_value:
2196 /* We currently don't make any attempt to make sure these are
2197 aligned properly like we do for the main unwind info, so
2198 don't support emitting things larger than a byte if we're
2199 only doing unwinding. */
2200 gcc_unreachable ();
2201 #endif
2202 case DW_OP_const1u:
2203 case DW_OP_const1s:
2204 dw2_asm_output_data (1, val1->v.val_int, NULL);
2205 break;
2206 case DW_OP_constu:
2207 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2208 break;
2209 case DW_OP_consts:
2210 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2211 break;
2212 case DW_OP_pick:
2213 dw2_asm_output_data (1, val1->v.val_int, NULL);
2214 break;
2215 case DW_OP_plus_uconst:
2216 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2217 break;
2218 case DW_OP_breg0:
2219 case DW_OP_breg1:
2220 case DW_OP_breg2:
2221 case DW_OP_breg3:
2222 case DW_OP_breg4:
2223 case DW_OP_breg5:
2224 case DW_OP_breg6:
2225 case DW_OP_breg7:
2226 case DW_OP_breg8:
2227 case DW_OP_breg9:
2228 case DW_OP_breg10:
2229 case DW_OP_breg11:
2230 case DW_OP_breg12:
2231 case DW_OP_breg13:
2232 case DW_OP_breg14:
2233 case DW_OP_breg15:
2234 case DW_OP_breg16:
2235 case DW_OP_breg17:
2236 case DW_OP_breg18:
2237 case DW_OP_breg19:
2238 case DW_OP_breg20:
2239 case DW_OP_breg21:
2240 case DW_OP_breg22:
2241 case DW_OP_breg23:
2242 case DW_OP_breg24:
2243 case DW_OP_breg25:
2244 case DW_OP_breg26:
2245 case DW_OP_breg27:
2246 case DW_OP_breg28:
2247 case DW_OP_breg29:
2248 case DW_OP_breg30:
2249 case DW_OP_breg31:
2250 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2251 break;
2252 case DW_OP_regx:
2253 {
2254 unsigned r = val1->v.val_unsigned;
2255 if (for_eh_or_skip >= 0)
2256 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2257 gcc_assert (size_of_uleb128 (r)
2258 == size_of_uleb128 (val1->v.val_unsigned));
2259 dw2_asm_output_data_uleb128 (r, NULL);
2260 }
2261 break;
2262 case DW_OP_fbreg:
2263 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2264 break;
2265 case DW_OP_bregx:
2266 {
2267 unsigned r = val1->v.val_unsigned;
2268 if (for_eh_or_skip >= 0)
2269 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2270 gcc_assert (size_of_uleb128 (r)
2271 == size_of_uleb128 (val1->v.val_unsigned));
2272 dw2_asm_output_data_uleb128 (r, NULL);
2273 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2274 }
2275 break;
2276 case DW_OP_piece:
2277 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2278 break;
2279 case DW_OP_bit_piece:
2280 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2281 dw2_asm_output_data_uleb128 (val2->v.val_unsigned, NULL);
2282 break;
2283 case DW_OP_deref_size:
2284 case DW_OP_xderef_size:
2285 dw2_asm_output_data (1, val1->v.val_int, NULL);
2286 break;
2287
2288 case DW_OP_addr:
2289 if (loc->dtprel)
2290 {
2291 if (targetm.asm_out.output_dwarf_dtprel)
2292 {
2293 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
2294 DWARF2_ADDR_SIZE,
2295 val1->v.val_addr);
2296 fputc ('\n', asm_out_file);
2297 }
2298 else
2299 gcc_unreachable ();
2300 }
2301 else
2302 {
2303 #ifdef DWARF2_DEBUGGING_INFO
2304 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val1->v.val_addr, NULL);
2305 #else
2306 gcc_unreachable ();
2307 #endif
2308 }
2309 break;
2310
2311 case DW_OP_GNU_addr_index:
2312 case DW_OP_addrx:
2313 case DW_OP_GNU_const_index:
2314 case DW_OP_constx:
2315 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
2316 dw2_asm_output_data_uleb128 (loc->dw_loc_oprnd1.val_entry->index,
2317 "(index into .debug_addr)");
2318 break;
2319
2320 case DW_OP_call2:
2321 case DW_OP_call4:
2322 {
2323 unsigned long die_offset
2324 = get_ref_die_offset (val1->v.val_die_ref.die);
2325 /* Make sure the offset has been computed and that we can encode it as
2326 an operand. */
2327 gcc_assert (die_offset > 0
2328 && die_offset <= (loc->dw_loc_opc == DW_OP_call2
2329 ? 0xffff
2330 : 0xffffffff));
2331 dw2_asm_output_data ((loc->dw_loc_opc == DW_OP_call2) ? 2 : 4,
2332 die_offset, NULL);
2333 }
2334 break;
2335
2336 case DW_OP_call_ref:
2337 case DW_OP_GNU_variable_value:
2338 {
2339 char label[MAX_ARTIFICIAL_LABEL_BYTES
2340 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2341 gcc_assert (val1->val_class == dw_val_class_die_ref);
2342 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2343 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2344 }
2345 break;
2346
2347 case DW_OP_implicit_pointer:
2348 case DW_OP_GNU_implicit_pointer:
2349 {
2350 char label[MAX_ARTIFICIAL_LABEL_BYTES
2351 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2352 gcc_assert (val1->val_class == dw_val_class_die_ref);
2353 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2354 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2355 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2356 }
2357 break;
2358
2359 case DW_OP_entry_value:
2360 case DW_OP_GNU_entry_value:
2361 dw2_asm_output_data_uleb128 (size_of_locs (val1->v.val_loc), NULL);
2362 output_loc_sequence (val1->v.val_loc, for_eh_or_skip);
2363 break;
2364
2365 case DW_OP_const_type:
2366 case DW_OP_GNU_const_type:
2367 {
2368 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die), l;
2369 gcc_assert (o);
2370 dw2_asm_output_data_uleb128 (o, NULL);
2371 switch (val2->val_class)
2372 {
2373 case dw_val_class_const:
2374 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2375 dw2_asm_output_data (1, l, NULL);
2376 dw2_asm_output_data (l, val2->v.val_int, NULL);
2377 break;
2378 case dw_val_class_vec:
2379 {
2380 unsigned int elt_size = val2->v.val_vec.elt_size;
2381 unsigned int len = val2->v.val_vec.length;
2382 unsigned int i;
2383 unsigned char *p;
2384
2385 l = len * elt_size;
2386 dw2_asm_output_data (1, l, NULL);
2387 if (elt_size > sizeof (HOST_WIDE_INT))
2388 {
2389 elt_size /= 2;
2390 len *= 2;
2391 }
2392 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2393 i < len;
2394 i++, p += elt_size)
2395 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2396 "fp or vector constant word %u", i);
2397 }
2398 break;
2399 case dw_val_class_const_double:
2400 {
2401 unsigned HOST_WIDE_INT first, second;
2402 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2403
2404 dw2_asm_output_data (1, 2 * l, NULL);
2405 if (WORDS_BIG_ENDIAN)
2406 {
2407 first = val2->v.val_double.high;
2408 second = val2->v.val_double.low;
2409 }
2410 else
2411 {
2412 first = val2->v.val_double.low;
2413 second = val2->v.val_double.high;
2414 }
2415 dw2_asm_output_data (l, first, NULL);
2416 dw2_asm_output_data (l, second, NULL);
2417 }
2418 break;
2419 case dw_val_class_wide_int:
2420 {
2421 int i;
2422 int len = get_full_len (*val2->v.val_wide);
2423 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2424
2425 dw2_asm_output_data (1, len * l, NULL);
2426 if (WORDS_BIG_ENDIAN)
2427 for (i = len - 1; i >= 0; --i)
2428 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2429 else
2430 for (i = 0; i < len; ++i)
2431 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2432 }
2433 break;
2434 default:
2435 gcc_unreachable ();
2436 }
2437 }
2438 break;
2439 case DW_OP_regval_type:
2440 case DW_OP_GNU_regval_type:
2441 {
2442 unsigned r = val1->v.val_unsigned;
2443 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2444 gcc_assert (o);
2445 if (for_eh_or_skip >= 0)
2446 {
2447 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2448 gcc_assert (size_of_uleb128 (r)
2449 == size_of_uleb128 (val1->v.val_unsigned));
2450 }
2451 dw2_asm_output_data_uleb128 (r, NULL);
2452 dw2_asm_output_data_uleb128 (o, NULL);
2453 }
2454 break;
2455 case DW_OP_deref_type:
2456 case DW_OP_GNU_deref_type:
2457 {
2458 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2459 gcc_assert (o);
2460 dw2_asm_output_data (1, val1->v.val_int, NULL);
2461 dw2_asm_output_data_uleb128 (o, NULL);
2462 }
2463 break;
2464 case DW_OP_convert:
2465 case DW_OP_reinterpret:
2466 case DW_OP_GNU_convert:
2467 case DW_OP_GNU_reinterpret:
2468 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
2469 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2470 else
2471 {
2472 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die);
2473 gcc_assert (o);
2474 dw2_asm_output_data_uleb128 (o, NULL);
2475 }
2476 break;
2477
2478 case DW_OP_GNU_parameter_ref:
2479 {
2480 unsigned long o;
2481 gcc_assert (val1->val_class == dw_val_class_die_ref);
2482 o = get_ref_die_offset (val1->v.val_die_ref.die);
2483 dw2_asm_output_data (4, o, NULL);
2484 }
2485 break;
2486
2487 default:
2488 /* Other codes have no operands. */
2489 break;
2490 }
2491 }
2492
2493 /* Output a sequence of location operations.
2494 The for_eh_or_skip parameter controls whether register numbers are
2495 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2496 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2497 info). This should be suppressed for the cases that have not been converted
2498 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2499
2500 void
2501 output_loc_sequence (dw_loc_descr_ref loc, int for_eh_or_skip)
2502 {
2503 for (; loc != NULL; loc = loc->dw_loc_next)
2504 {
2505 enum dwarf_location_atom opc = loc->dw_loc_opc;
2506 /* Output the opcode. */
2507 if (for_eh_or_skip >= 0
2508 && opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2509 {
2510 unsigned r = (opc - DW_OP_breg0);
2511 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2512 gcc_assert (r <= 31);
2513 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2514 }
2515 else if (for_eh_or_skip >= 0
2516 && opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2517 {
2518 unsigned r = (opc - DW_OP_reg0);
2519 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2520 gcc_assert (r <= 31);
2521 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2522 }
2523
2524 dw2_asm_output_data (1, opc,
2525 "%s", dwarf_stack_op_name (opc));
2526
2527 /* Output the operand(s) (if any). */
2528 output_loc_operands (loc, for_eh_or_skip);
2529 }
2530 }
2531
2532 /* Output location description stack opcode's operands (if any).
2533 The output is single bytes on a line, suitable for .cfi_escape. */
2534
2535 static void
2536 output_loc_operands_raw (dw_loc_descr_ref loc)
2537 {
2538 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2539 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2540
2541 switch (loc->dw_loc_opc)
2542 {
2543 case DW_OP_addr:
2544 case DW_OP_GNU_addr_index:
2545 case DW_OP_addrx:
2546 case DW_OP_GNU_const_index:
2547 case DW_OP_constx:
2548 case DW_OP_implicit_value:
2549 /* We cannot output addresses in .cfi_escape, only bytes. */
2550 gcc_unreachable ();
2551
2552 case DW_OP_const1u:
2553 case DW_OP_const1s:
2554 case DW_OP_pick:
2555 case DW_OP_deref_size:
2556 case DW_OP_xderef_size:
2557 fputc (',', asm_out_file);
2558 dw2_asm_output_data_raw (1, val1->v.val_int);
2559 break;
2560
2561 case DW_OP_const2u:
2562 case DW_OP_const2s:
2563 fputc (',', asm_out_file);
2564 dw2_asm_output_data_raw (2, val1->v.val_int);
2565 break;
2566
2567 case DW_OP_const4u:
2568 case DW_OP_const4s:
2569 fputc (',', asm_out_file);
2570 dw2_asm_output_data_raw (4, val1->v.val_int);
2571 break;
2572
2573 case DW_OP_const8u:
2574 case DW_OP_const8s:
2575 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2576 fputc (',', asm_out_file);
2577 dw2_asm_output_data_raw (8, val1->v.val_int);
2578 break;
2579
2580 case DW_OP_skip:
2581 case DW_OP_bra:
2582 {
2583 int offset;
2584
2585 gcc_assert (val1->val_class == dw_val_class_loc);
2586 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2587
2588 fputc (',', asm_out_file);
2589 dw2_asm_output_data_raw (2, offset);
2590 }
2591 break;
2592
2593 case DW_OP_regx:
2594 {
2595 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2596 gcc_assert (size_of_uleb128 (r)
2597 == size_of_uleb128 (val1->v.val_unsigned));
2598 fputc (',', asm_out_file);
2599 dw2_asm_output_data_uleb128_raw (r);
2600 }
2601 break;
2602
2603 case DW_OP_constu:
2604 case DW_OP_plus_uconst:
2605 case DW_OP_piece:
2606 fputc (',', asm_out_file);
2607 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2608 break;
2609
2610 case DW_OP_bit_piece:
2611 fputc (',', asm_out_file);
2612 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2613 dw2_asm_output_data_uleb128_raw (val2->v.val_unsigned);
2614 break;
2615
2616 case DW_OP_consts:
2617 case DW_OP_breg0:
2618 case DW_OP_breg1:
2619 case DW_OP_breg2:
2620 case DW_OP_breg3:
2621 case DW_OP_breg4:
2622 case DW_OP_breg5:
2623 case DW_OP_breg6:
2624 case DW_OP_breg7:
2625 case DW_OP_breg8:
2626 case DW_OP_breg9:
2627 case DW_OP_breg10:
2628 case DW_OP_breg11:
2629 case DW_OP_breg12:
2630 case DW_OP_breg13:
2631 case DW_OP_breg14:
2632 case DW_OP_breg15:
2633 case DW_OP_breg16:
2634 case DW_OP_breg17:
2635 case DW_OP_breg18:
2636 case DW_OP_breg19:
2637 case DW_OP_breg20:
2638 case DW_OP_breg21:
2639 case DW_OP_breg22:
2640 case DW_OP_breg23:
2641 case DW_OP_breg24:
2642 case DW_OP_breg25:
2643 case DW_OP_breg26:
2644 case DW_OP_breg27:
2645 case DW_OP_breg28:
2646 case DW_OP_breg29:
2647 case DW_OP_breg30:
2648 case DW_OP_breg31:
2649 case DW_OP_fbreg:
2650 fputc (',', asm_out_file);
2651 dw2_asm_output_data_sleb128_raw (val1->v.val_int);
2652 break;
2653
2654 case DW_OP_bregx:
2655 {
2656 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2657 gcc_assert (size_of_uleb128 (r)
2658 == size_of_uleb128 (val1->v.val_unsigned));
2659 fputc (',', asm_out_file);
2660 dw2_asm_output_data_uleb128_raw (r);
2661 fputc (',', asm_out_file);
2662 dw2_asm_output_data_sleb128_raw (val2->v.val_int);
2663 }
2664 break;
2665
2666 case DW_OP_implicit_pointer:
2667 case DW_OP_entry_value:
2668 case DW_OP_const_type:
2669 case DW_OP_regval_type:
2670 case DW_OP_deref_type:
2671 case DW_OP_convert:
2672 case DW_OP_reinterpret:
2673 case DW_OP_GNU_implicit_pointer:
2674 case DW_OP_GNU_entry_value:
2675 case DW_OP_GNU_const_type:
2676 case DW_OP_GNU_regval_type:
2677 case DW_OP_GNU_deref_type:
2678 case DW_OP_GNU_convert:
2679 case DW_OP_GNU_reinterpret:
2680 case DW_OP_GNU_parameter_ref:
2681 gcc_unreachable ();
2682 break;
2683
2684 default:
2685 /* Other codes have no operands. */
2686 break;
2687 }
2688 }
2689
2690 void
2691 output_loc_sequence_raw (dw_loc_descr_ref loc)
2692 {
2693 while (1)
2694 {
2695 enum dwarf_location_atom opc = loc->dw_loc_opc;
2696 /* Output the opcode. */
2697 if (opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2698 {
2699 unsigned r = (opc - DW_OP_breg0);
2700 r = DWARF2_FRAME_REG_OUT (r, 1);
2701 gcc_assert (r <= 31);
2702 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2703 }
2704 else if (opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2705 {
2706 unsigned r = (opc - DW_OP_reg0);
2707 r = DWARF2_FRAME_REG_OUT (r, 1);
2708 gcc_assert (r <= 31);
2709 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2710 }
2711 /* Output the opcode. */
2712 fprintf (asm_out_file, "%#x", opc);
2713 output_loc_operands_raw (loc);
2714
2715 if (!loc->dw_loc_next)
2716 break;
2717 loc = loc->dw_loc_next;
2718
2719 fputc (',', asm_out_file);
2720 }
2721 }
2722
2723 /* This function builds a dwarf location descriptor sequence from a
2724 dw_cfa_location, adding the given OFFSET to the result of the
2725 expression. */
2726
2727 struct dw_loc_descr_node *
2728 build_cfa_loc (dw_cfa_location *cfa, poly_int64 offset)
2729 {
2730 struct dw_loc_descr_node *head, *tmp;
2731
2732 offset += cfa->offset;
2733
2734 if (cfa->indirect)
2735 {
2736 head = new_reg_loc_descr (cfa->reg, cfa->base_offset);
2737 head->dw_loc_oprnd1.val_class = dw_val_class_const;
2738 head->dw_loc_oprnd1.val_entry = NULL;
2739 tmp = new_loc_descr (DW_OP_deref, 0, 0);
2740 add_loc_descr (&head, tmp);
2741 loc_descr_plus_const (&head, offset);
2742 }
2743 else
2744 head = new_reg_loc_descr (cfa->reg, offset);
2745
2746 return head;
2747 }
2748
2749 /* This function builds a dwarf location descriptor sequence for
2750 the address at OFFSET from the CFA when stack is aligned to
2751 ALIGNMENT byte. */
2752
2753 struct dw_loc_descr_node *
2754 build_cfa_aligned_loc (dw_cfa_location *cfa,
2755 poly_int64 offset, HOST_WIDE_INT alignment)
2756 {
2757 struct dw_loc_descr_node *head;
2758 unsigned int dwarf_fp
2759 = DWARF_FRAME_REGNUM (HARD_FRAME_POINTER_REGNUM);
2760
2761 /* When CFA is defined as FP+OFFSET, emulate stack alignment. */
2762 if (cfa->reg == HARD_FRAME_POINTER_REGNUM && cfa->indirect == 0)
2763 {
2764 head = new_reg_loc_descr (dwarf_fp, 0);
2765 add_loc_descr (&head, int_loc_descriptor (alignment));
2766 add_loc_descr (&head, new_loc_descr (DW_OP_and, 0, 0));
2767 loc_descr_plus_const (&head, offset);
2768 }
2769 else
2770 head = new_reg_loc_descr (dwarf_fp, offset);
2771 return head;
2772 }
2773 \f
2774 /* And now, the support for symbolic debugging information. */
2775
2776 /* .debug_str support. */
2777
2778 static void dwarf2out_init (const char *);
2779 static void dwarf2out_finish (const char *);
2780 static void dwarf2out_early_finish (const char *);
2781 static void dwarf2out_assembly_start (void);
2782 static void dwarf2out_define (unsigned int, const char *);
2783 static void dwarf2out_undef (unsigned int, const char *);
2784 static void dwarf2out_start_source_file (unsigned, const char *);
2785 static void dwarf2out_end_source_file (unsigned);
2786 static void dwarf2out_function_decl (tree);
2787 static void dwarf2out_begin_block (unsigned, unsigned);
2788 static void dwarf2out_end_block (unsigned, unsigned);
2789 static bool dwarf2out_ignore_block (const_tree);
2790 static void dwarf2out_early_global_decl (tree);
2791 static void dwarf2out_late_global_decl (tree);
2792 static void dwarf2out_type_decl (tree, int);
2793 static void dwarf2out_imported_module_or_decl (tree, tree, tree, bool, bool);
2794 static void dwarf2out_imported_module_or_decl_1 (tree, tree, tree,
2795 dw_die_ref);
2796 static void dwarf2out_abstract_function (tree);
2797 static void dwarf2out_var_location (rtx_insn *);
2798 static void dwarf2out_inline_entry (tree);
2799 static void dwarf2out_size_function (tree);
2800 static void dwarf2out_begin_function (tree);
2801 static void dwarf2out_end_function (unsigned int);
2802 static void dwarf2out_register_main_translation_unit (tree unit);
2803 static void dwarf2out_set_name (tree, tree);
2804 static void dwarf2out_register_external_die (tree decl, const char *sym,
2805 unsigned HOST_WIDE_INT off);
2806 static bool dwarf2out_die_ref_for_decl (tree decl, const char **sym,
2807 unsigned HOST_WIDE_INT *off);
2808
2809 /* The debug hooks structure. */
2810
2811 const struct gcc_debug_hooks dwarf2_debug_hooks =
2812 {
2813 dwarf2out_init,
2814 dwarf2out_finish,
2815 dwarf2out_early_finish,
2816 dwarf2out_assembly_start,
2817 dwarf2out_define,
2818 dwarf2out_undef,
2819 dwarf2out_start_source_file,
2820 dwarf2out_end_source_file,
2821 dwarf2out_begin_block,
2822 dwarf2out_end_block,
2823 dwarf2out_ignore_block,
2824 dwarf2out_source_line,
2825 dwarf2out_begin_prologue,
2826 #if VMS_DEBUGGING_INFO
2827 dwarf2out_vms_end_prologue,
2828 dwarf2out_vms_begin_epilogue,
2829 #else
2830 debug_nothing_int_charstar,
2831 debug_nothing_int_charstar,
2832 #endif
2833 dwarf2out_end_epilogue,
2834 dwarf2out_begin_function,
2835 dwarf2out_end_function, /* end_function */
2836 dwarf2out_register_main_translation_unit,
2837 dwarf2out_function_decl, /* function_decl */
2838 dwarf2out_early_global_decl,
2839 dwarf2out_late_global_decl,
2840 dwarf2out_type_decl, /* type_decl */
2841 dwarf2out_imported_module_or_decl,
2842 dwarf2out_die_ref_for_decl,
2843 dwarf2out_register_external_die,
2844 debug_nothing_tree, /* deferred_inline_function */
2845 /* The DWARF 2 backend tries to reduce debugging bloat by not
2846 emitting the abstract description of inline functions until
2847 something tries to reference them. */
2848 dwarf2out_abstract_function, /* outlining_inline_function */
2849 debug_nothing_rtx_code_label, /* label */
2850 debug_nothing_int, /* handle_pch */
2851 dwarf2out_var_location,
2852 dwarf2out_inline_entry, /* inline_entry */
2853 dwarf2out_size_function, /* size_function */
2854 dwarf2out_switch_text_section,
2855 dwarf2out_set_name,
2856 1, /* start_end_main_source_file */
2857 TYPE_SYMTAB_IS_DIE /* tree_type_symtab_field */
2858 };
2859
2860 const struct gcc_debug_hooks dwarf2_lineno_debug_hooks =
2861 {
2862 dwarf2out_init,
2863 debug_nothing_charstar,
2864 debug_nothing_charstar,
2865 dwarf2out_assembly_start,
2866 debug_nothing_int_charstar,
2867 debug_nothing_int_charstar,
2868 debug_nothing_int_charstar,
2869 debug_nothing_int,
2870 debug_nothing_int_int, /* begin_block */
2871 debug_nothing_int_int, /* end_block */
2872 debug_true_const_tree, /* ignore_block */
2873 dwarf2out_source_line, /* source_line */
2874 debug_nothing_int_int_charstar, /* begin_prologue */
2875 debug_nothing_int_charstar, /* end_prologue */
2876 debug_nothing_int_charstar, /* begin_epilogue */
2877 debug_nothing_int_charstar, /* end_epilogue */
2878 debug_nothing_tree, /* begin_function */
2879 debug_nothing_int, /* end_function */
2880 debug_nothing_tree, /* register_main_translation_unit */
2881 debug_nothing_tree, /* function_decl */
2882 debug_nothing_tree, /* early_global_decl */
2883 debug_nothing_tree, /* late_global_decl */
2884 debug_nothing_tree_int, /* type_decl */
2885 debug_nothing_tree_tree_tree_bool_bool,/* imported_module_or_decl */
2886 debug_false_tree_charstarstar_uhwistar,/* die_ref_for_decl */
2887 debug_nothing_tree_charstar_uhwi, /* register_external_die */
2888 debug_nothing_tree, /* deferred_inline_function */
2889 debug_nothing_tree, /* outlining_inline_function */
2890 debug_nothing_rtx_code_label, /* label */
2891 debug_nothing_int, /* handle_pch */
2892 debug_nothing_rtx_insn, /* var_location */
2893 debug_nothing_tree, /* inline_entry */
2894 debug_nothing_tree, /* size_function */
2895 debug_nothing_void, /* switch_text_section */
2896 debug_nothing_tree_tree, /* set_name */
2897 0, /* start_end_main_source_file */
2898 TYPE_SYMTAB_IS_ADDRESS /* tree_type_symtab_field */
2899 };
2900 \f
2901 /* NOTE: In the comments in this file, many references are made to
2902 "Debugging Information Entries". This term is abbreviated as `DIE'
2903 throughout the remainder of this file. */
2904
2905 /* An internal representation of the DWARF output is built, and then
2906 walked to generate the DWARF debugging info. The walk of the internal
2907 representation is done after the entire program has been compiled.
2908 The types below are used to describe the internal representation. */
2909
2910 /* Whether to put type DIEs into their own section .debug_types instead
2911 of making them part of the .debug_info section. Only supported for
2912 Dwarf V4 or higher and the user didn't disable them through
2913 -fno-debug-types-section. It is more efficient to put them in a
2914 separate comdat sections since the linker will then be able to
2915 remove duplicates. But not all tools support .debug_types sections
2916 yet. For Dwarf V5 or higher .debug_types doesn't exist any more,
2917 it is DW_UT_type unit type in .debug_info section. For late LTO
2918 debug there should be almost no types emitted so avoid enabling
2919 -fdebug-types-section there. */
2920
2921 #define use_debug_types (dwarf_version >= 4 \
2922 && flag_debug_types_section \
2923 && !in_lto_p)
2924
2925 /* Various DIE's use offsets relative to the beginning of the
2926 .debug_info section to refer to each other. */
2927
2928 typedef long int dw_offset;
2929
2930 struct comdat_type_node;
2931
2932 /* The entries in the line_info table more-or-less mirror the opcodes
2933 that are used in the real dwarf line table. Arrays of these entries
2934 are collected per section when DWARF2_ASM_LINE_DEBUG_INFO is not
2935 supported. */
2936
2937 enum dw_line_info_opcode {
2938 /* Emit DW_LNE_set_address; the operand is the label index. */
2939 LI_set_address,
2940
2941 /* Emit a row to the matrix with the given line. This may be done
2942 via any combination of DW_LNS_copy, DW_LNS_advance_line, and
2943 special opcodes. */
2944 LI_set_line,
2945
2946 /* Emit a DW_LNS_set_file. */
2947 LI_set_file,
2948
2949 /* Emit a DW_LNS_set_column. */
2950 LI_set_column,
2951
2952 /* Emit a DW_LNS_negate_stmt; the operand is ignored. */
2953 LI_negate_stmt,
2954
2955 /* Emit a DW_LNS_set_prologue_end/epilogue_begin; the operand is ignored. */
2956 LI_set_prologue_end,
2957 LI_set_epilogue_begin,
2958
2959 /* Emit a DW_LNE_set_discriminator. */
2960 LI_set_discriminator,
2961
2962 /* Output a Fixed Advance PC; the target PC is the label index; the
2963 base PC is the previous LI_adv_address or LI_set_address entry.
2964 We only use this when emitting debug views without assembler
2965 support, at explicit user request. Ideally, we should only use
2966 it when the offset might be zero but we can't tell: it's the only
2967 way to maybe change the PC without resetting the view number. */
2968 LI_adv_address
2969 };
2970
2971 typedef struct GTY(()) dw_line_info_struct {
2972 enum dw_line_info_opcode opcode;
2973 unsigned int val;
2974 } dw_line_info_entry;
2975
2976
2977 struct GTY(()) dw_line_info_table {
2978 /* The label that marks the end of this section. */
2979 const char *end_label;
2980
2981 /* The values for the last row of the matrix, as collected in the table.
2982 These are used to minimize the changes to the next row. */
2983 unsigned int file_num;
2984 unsigned int line_num;
2985 unsigned int column_num;
2986 int discrim_num;
2987 bool is_stmt;
2988 bool in_use;
2989
2990 /* This denotes the NEXT view number.
2991
2992 If it is 0, it is known that the NEXT view will be the first view
2993 at the given PC.
2994
2995 If it is -1, we're forcing the view number to be reset, e.g. at a
2996 function entry.
2997
2998 The meaning of other nonzero values depends on whether we're
2999 computing views internally or leaving it for the assembler to do
3000 so. If we're emitting them internally, view denotes the view
3001 number since the last known advance of PC. If we're leaving it
3002 for the assembler, it denotes the LVU label number that we're
3003 going to ask the assembler to assign. */
3004 var_loc_view view;
3005
3006 /* This counts the number of symbolic views emitted in this table
3007 since the latest view reset. Its max value, over all tables,
3008 sets symview_upper_bound. */
3009 var_loc_view symviews_since_reset;
3010
3011 #define FORCE_RESET_NEXT_VIEW(x) ((x) = (var_loc_view)-1)
3012 #define RESET_NEXT_VIEW(x) ((x) = (var_loc_view)0)
3013 #define FORCE_RESETTING_VIEW_P(x) ((x) == (var_loc_view)-1)
3014 #define RESETTING_VIEW_P(x) ((x) == (var_loc_view)0 || FORCE_RESETTING_VIEW_P (x))
3015
3016 vec<dw_line_info_entry, va_gc> *entries;
3017 };
3018
3019 /* This is an upper bound for view numbers that the assembler may
3020 assign to symbolic views output in this translation. It is used to
3021 decide how big a field to use to represent view numbers in
3022 symview-classed attributes. */
3023
3024 static var_loc_view symview_upper_bound;
3025
3026 /* If we're keep track of location views and their reset points, and
3027 INSN is a reset point (i.e., it necessarily advances the PC), mark
3028 the next view in TABLE as reset. */
3029
3030 static void
3031 maybe_reset_location_view (rtx_insn *insn, dw_line_info_table *table)
3032 {
3033 if (!debug_internal_reset_location_views)
3034 return;
3035
3036 /* Maybe turn (part of?) this test into a default target hook. */
3037 int reset = 0;
3038
3039 if (targetm.reset_location_view)
3040 reset = targetm.reset_location_view (insn);
3041
3042 if (reset)
3043 ;
3044 else if (JUMP_TABLE_DATA_P (insn))
3045 reset = 1;
3046 else if (GET_CODE (insn) == USE
3047 || GET_CODE (insn) == CLOBBER
3048 || GET_CODE (insn) == ASM_INPUT
3049 || asm_noperands (insn) >= 0)
3050 ;
3051 else if (get_attr_min_length (insn) > 0)
3052 reset = 1;
3053
3054 if (reset > 0 && !RESETTING_VIEW_P (table->view))
3055 RESET_NEXT_VIEW (table->view);
3056 }
3057
3058 /* Each DIE attribute has a field specifying the attribute kind,
3059 a link to the next attribute in the chain, and an attribute value.
3060 Attributes are typically linked below the DIE they modify. */
3061
3062 typedef struct GTY(()) dw_attr_struct {
3063 enum dwarf_attribute dw_attr;
3064 dw_val_node dw_attr_val;
3065 }
3066 dw_attr_node;
3067
3068
3069 /* The Debugging Information Entry (DIE) structure. DIEs form a tree.
3070 The children of each node form a circular list linked by
3071 die_sib. die_child points to the node *before* the "first" child node. */
3072
3073 typedef struct GTY((chain_circular ("%h.die_sib"), for_user)) die_struct {
3074 union die_symbol_or_type_node
3075 {
3076 const char * GTY ((tag ("0"))) die_symbol;
3077 comdat_type_node *GTY ((tag ("1"))) die_type_node;
3078 }
3079 GTY ((desc ("%0.comdat_type_p"))) die_id;
3080 vec<dw_attr_node, va_gc> *die_attr;
3081 dw_die_ref die_parent;
3082 dw_die_ref die_child;
3083 dw_die_ref die_sib;
3084 dw_die_ref die_definition; /* ref from a specification to its definition */
3085 dw_offset die_offset;
3086 unsigned long die_abbrev;
3087 int die_mark;
3088 unsigned int decl_id;
3089 enum dwarf_tag die_tag;
3090 /* Die is used and must not be pruned as unused. */
3091 BOOL_BITFIELD die_perennial_p : 1;
3092 BOOL_BITFIELD comdat_type_p : 1; /* DIE has a type signature */
3093 /* For an external ref to die_symbol if die_offset contains an extra
3094 offset to that symbol. */
3095 BOOL_BITFIELD with_offset : 1;
3096 /* Whether this DIE was removed from the DIE tree, for example via
3097 prune_unused_types. We don't consider those present from the
3098 DIE lookup routines. */
3099 BOOL_BITFIELD removed : 1;
3100 /* Lots of spare bits. */
3101 }
3102 die_node;
3103
3104 /* Set to TRUE while dwarf2out_early_global_decl is running. */
3105 static bool early_dwarf;
3106 static bool early_dwarf_finished;
3107 class set_early_dwarf {
3108 public:
3109 bool saved;
3110 set_early_dwarf () : saved(early_dwarf)
3111 {
3112 gcc_assert (! early_dwarf_finished);
3113 early_dwarf = true;
3114 }
3115 ~set_early_dwarf () { early_dwarf = saved; }
3116 };
3117
3118 /* Evaluate 'expr' while 'c' is set to each child of DIE in order. */
3119 #define FOR_EACH_CHILD(die, c, expr) do { \
3120 c = die->die_child; \
3121 if (c) do { \
3122 c = c->die_sib; \
3123 expr; \
3124 } while (c != die->die_child); \
3125 } while (0)
3126
3127 /* The pubname structure */
3128
3129 typedef struct GTY(()) pubname_struct {
3130 dw_die_ref die;
3131 const char *name;
3132 }
3133 pubname_entry;
3134
3135
3136 struct GTY(()) dw_ranges {
3137 const char *label;
3138 /* If this is positive, it's a block number, otherwise it's a
3139 bitwise-negated index into dw_ranges_by_label. */
3140 int num;
3141 /* Index for the range list for DW_FORM_rnglistx. */
3142 unsigned int idx : 31;
3143 /* True if this range might be possibly in a different section
3144 from previous entry. */
3145 unsigned int maybe_new_sec : 1;
3146 };
3147
3148 /* A structure to hold a macinfo entry. */
3149
3150 typedef struct GTY(()) macinfo_struct {
3151 unsigned char code;
3152 unsigned HOST_WIDE_INT lineno;
3153 const char *info;
3154 }
3155 macinfo_entry;
3156
3157
3158 struct GTY(()) dw_ranges_by_label {
3159 const char *begin;
3160 const char *end;
3161 };
3162
3163 /* The comdat type node structure. */
3164 struct GTY(()) comdat_type_node
3165 {
3166 dw_die_ref root_die;
3167 dw_die_ref type_die;
3168 dw_die_ref skeleton_die;
3169 char signature[DWARF_TYPE_SIGNATURE_SIZE];
3170 comdat_type_node *next;
3171 };
3172
3173 /* A list of DIEs for which we can't determine ancestry (parent_die
3174 field) just yet. Later in dwarf2out_finish we will fill in the
3175 missing bits. */
3176 typedef struct GTY(()) limbo_die_struct {
3177 dw_die_ref die;
3178 /* The tree for which this DIE was created. We use this to
3179 determine ancestry later. */
3180 tree created_for;
3181 struct limbo_die_struct *next;
3182 }
3183 limbo_die_node;
3184
3185 typedef struct skeleton_chain_struct
3186 {
3187 dw_die_ref old_die;
3188 dw_die_ref new_die;
3189 struct skeleton_chain_struct *parent;
3190 }
3191 skeleton_chain_node;
3192
3193 /* Define a macro which returns nonzero for a TYPE_DECL which was
3194 implicitly generated for a type.
3195
3196 Note that, unlike the C front-end (which generates a NULL named
3197 TYPE_DECL node for each complete tagged type, each array type,
3198 and each function type node created) the C++ front-end generates
3199 a _named_ TYPE_DECL node for each tagged type node created.
3200 These TYPE_DECLs have DECL_ARTIFICIAL set, so we know not to
3201 generate a DW_TAG_typedef DIE for them. Likewise with the Ada
3202 front-end, but for each type, tagged or not. */
3203
3204 #define TYPE_DECL_IS_STUB(decl) \
3205 (DECL_NAME (decl) == NULL_TREE \
3206 || (DECL_ARTIFICIAL (decl) \
3207 && ((decl == TYPE_STUB_DECL (TREE_TYPE (decl))) \
3208 /* This is necessary for stub decls that \
3209 appear in nested inline functions. */ \
3210 || (DECL_ABSTRACT_ORIGIN (decl) != NULL_TREE \
3211 && (decl_ultimate_origin (decl) \
3212 == TYPE_STUB_DECL (TREE_TYPE (decl)))))))
3213
3214 /* Information concerning the compilation unit's programming
3215 language, and compiler version. */
3216
3217 /* Fixed size portion of the DWARF compilation unit header. */
3218 #define DWARF_COMPILE_UNIT_HEADER_SIZE \
3219 (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE \
3220 + (dwarf_version >= 5 ? 4 : 3))
3221
3222 /* Fixed size portion of the DWARF comdat type unit header. */
3223 #define DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE \
3224 (DWARF_COMPILE_UNIT_HEADER_SIZE \
3225 + DWARF_TYPE_SIGNATURE_SIZE + DWARF_OFFSET_SIZE)
3226
3227 /* Fixed size portion of the DWARF skeleton compilation unit header. */
3228 #define DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE \
3229 (DWARF_COMPILE_UNIT_HEADER_SIZE + (dwarf_version >= 5 ? 8 : 0))
3230
3231 /* Fixed size portion of public names info. */
3232 #define DWARF_PUBNAMES_HEADER_SIZE (2 * DWARF_OFFSET_SIZE + 2)
3233
3234 /* Fixed size portion of the address range info. */
3235 #define DWARF_ARANGES_HEADER_SIZE \
3236 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3237 DWARF2_ADDR_SIZE * 2) \
3238 - DWARF_INITIAL_LENGTH_SIZE)
3239
3240 /* Size of padding portion in the address range info. It must be
3241 aligned to twice the pointer size. */
3242 #define DWARF_ARANGES_PAD_SIZE \
3243 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3244 DWARF2_ADDR_SIZE * 2) \
3245 - (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4))
3246
3247 /* Use assembler line directives if available. */
3248 #ifndef DWARF2_ASM_LINE_DEBUG_INFO
3249 #ifdef HAVE_AS_DWARF2_DEBUG_LINE
3250 #define DWARF2_ASM_LINE_DEBUG_INFO 1
3251 #else
3252 #define DWARF2_ASM_LINE_DEBUG_INFO 0
3253 #endif
3254 #endif
3255
3256 /* Use assembler views in line directives if available. */
3257 #ifndef DWARF2_ASM_VIEW_DEBUG_INFO
3258 #ifdef HAVE_AS_DWARF2_DEBUG_VIEW
3259 #define DWARF2_ASM_VIEW_DEBUG_INFO 1
3260 #else
3261 #define DWARF2_ASM_VIEW_DEBUG_INFO 0
3262 #endif
3263 #endif
3264
3265 /* Return true if GCC configure detected assembler support for .loc. */
3266
3267 bool
3268 dwarf2out_default_as_loc_support (void)
3269 {
3270 return DWARF2_ASM_LINE_DEBUG_INFO;
3271 #if (GCC_VERSION >= 3000)
3272 # undef DWARF2_ASM_LINE_DEBUG_INFO
3273 # pragma GCC poison DWARF2_ASM_LINE_DEBUG_INFO
3274 #endif
3275 }
3276
3277 /* Return true if GCC configure detected assembler support for views
3278 in .loc directives. */
3279
3280 bool
3281 dwarf2out_default_as_locview_support (void)
3282 {
3283 return DWARF2_ASM_VIEW_DEBUG_INFO;
3284 #if (GCC_VERSION >= 3000)
3285 # undef DWARF2_ASM_VIEW_DEBUG_INFO
3286 # pragma GCC poison DWARF2_ASM_VIEW_DEBUG_INFO
3287 #endif
3288 }
3289
3290 /* A bit is set in ZERO_VIEW_P if we are using the assembler-supported
3291 view computation, and it refers to a view identifier for which we
3292 will not emit a label because it is known to map to a view number
3293 zero. We won't allocate the bitmap if we're not using assembler
3294 support for location views, but we have to make the variable
3295 visible for GGC and for code that will be optimized out for lack of
3296 support but that's still parsed and compiled. We could abstract it
3297 out with macros, but it's not worth it. */
3298 static GTY(()) bitmap zero_view_p;
3299
3300 /* Evaluate to TRUE iff N is known to identify the first location view
3301 at its PC. When not using assembler location view computation,
3302 that must be view number zero. Otherwise, ZERO_VIEW_P is allocated
3303 and views label numbers recorded in it are the ones known to be
3304 zero. */
3305 #define ZERO_VIEW_P(N) ((N) == (var_loc_view)0 \
3306 || (N) == (var_loc_view)-1 \
3307 || (zero_view_p \
3308 && bitmap_bit_p (zero_view_p, (N))))
3309
3310 /* Return true iff we're to emit .loc directives for the assembler to
3311 generate line number sections.
3312
3313 When we're not emitting views, all we need from the assembler is
3314 support for .loc directives.
3315
3316 If we are emitting views, we can only use the assembler's .loc
3317 support if it also supports views.
3318
3319 When the compiler is emitting the line number programs and
3320 computing view numbers itself, it resets view numbers at known PC
3321 changes and counts from that, and then it emits view numbers as
3322 literal constants in locviewlists. There are cases in which the
3323 compiler is not sure about PC changes, e.g. when extra alignment is
3324 requested for a label. In these cases, the compiler may not reset
3325 the view counter, and the potential PC advance in the line number
3326 program will use an opcode that does not reset the view counter
3327 even if the PC actually changes, so that compiler and debug info
3328 consumer can keep view numbers in sync.
3329
3330 When the compiler defers view computation to the assembler, it
3331 emits symbolic view numbers in locviewlists, with the exception of
3332 views known to be zero (forced resets, or reset after
3333 compiler-visible PC changes): instead of emitting symbols for
3334 these, we emit literal zero and assert the assembler agrees with
3335 the compiler's assessment. We could use symbolic views everywhere,
3336 instead of special-casing zero views, but then we'd be unable to
3337 optimize out locviewlists that contain only zeros. */
3338
3339 static bool
3340 output_asm_line_debug_info (void)
3341 {
3342 return (dwarf2out_as_loc_support
3343 && (dwarf2out_as_locview_support
3344 || !debug_variable_location_views));
3345 }
3346
3347 /* Minimum line offset in a special line info. opcode.
3348 This value was chosen to give a reasonable range of values. */
3349 #define DWARF_LINE_BASE -10
3350
3351 /* First special line opcode - leave room for the standard opcodes. */
3352 #define DWARF_LINE_OPCODE_BASE ((int)DW_LNS_set_isa + 1)
3353
3354 /* Range of line offsets in a special line info. opcode. */
3355 #define DWARF_LINE_RANGE (254-DWARF_LINE_OPCODE_BASE+1)
3356
3357 /* Flag that indicates the initial value of the is_stmt_start flag.
3358 In the present implementation, we do not mark any lines as
3359 the beginning of a source statement, because that information
3360 is not made available by the GCC front-end. */
3361 #define DWARF_LINE_DEFAULT_IS_STMT_START 1
3362
3363 /* Maximum number of operations per instruction bundle. */
3364 #ifndef DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN
3365 #define DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN 1
3366 #endif
3367
3368 /* This location is used by calc_die_sizes() to keep track
3369 the offset of each DIE within the .debug_info section. */
3370 static unsigned long next_die_offset;
3371
3372 /* Record the root of the DIE's built for the current compilation unit. */
3373 static GTY(()) dw_die_ref single_comp_unit_die;
3374
3375 /* A list of type DIEs that have been separated into comdat sections. */
3376 static GTY(()) comdat_type_node *comdat_type_list;
3377
3378 /* A list of CU DIEs that have been separated. */
3379 static GTY(()) limbo_die_node *cu_die_list;
3380
3381 /* A list of DIEs with a NULL parent waiting to be relocated. */
3382 static GTY(()) limbo_die_node *limbo_die_list;
3383
3384 /* A list of DIEs for which we may have to generate
3385 DW_AT_{,MIPS_}linkage_name once their DECL_ASSEMBLER_NAMEs are set. */
3386 static GTY(()) limbo_die_node *deferred_asm_name;
3387
3388 struct dwarf_file_hasher : ggc_ptr_hash<dwarf_file_data>
3389 {
3390 typedef const char *compare_type;
3391
3392 static hashval_t hash (dwarf_file_data *);
3393 static bool equal (dwarf_file_data *, const char *);
3394 };
3395
3396 /* Filenames referenced by this compilation unit. */
3397 static GTY(()) hash_table<dwarf_file_hasher> *file_table;
3398
3399 struct decl_die_hasher : ggc_ptr_hash<die_node>
3400 {
3401 typedef tree compare_type;
3402
3403 static hashval_t hash (die_node *);
3404 static bool equal (die_node *, tree);
3405 };
3406 /* A hash table of references to DIE's that describe declarations.
3407 The key is a DECL_UID() which is a unique number identifying each decl. */
3408 static GTY (()) hash_table<decl_die_hasher> *decl_die_table;
3409
3410 struct GTY ((for_user)) variable_value_struct {
3411 unsigned int decl_id;
3412 vec<dw_die_ref, va_gc> *dies;
3413 };
3414
3415 struct variable_value_hasher : ggc_ptr_hash<variable_value_struct>
3416 {
3417 typedef tree compare_type;
3418
3419 static hashval_t hash (variable_value_struct *);
3420 static bool equal (variable_value_struct *, tree);
3421 };
3422 /* A hash table of DIEs that contain DW_OP_GNU_variable_value with
3423 dw_val_class_decl_ref class, indexed by FUNCTION_DECLs which is
3424 DECL_CONTEXT of the referenced VAR_DECLs. */
3425 static GTY (()) hash_table<variable_value_hasher> *variable_value_hash;
3426
3427 struct block_die_hasher : ggc_ptr_hash<die_struct>
3428 {
3429 static hashval_t hash (die_struct *);
3430 static bool equal (die_struct *, die_struct *);
3431 };
3432
3433 /* A hash table of references to DIE's that describe COMMON blocks.
3434 The key is DECL_UID() ^ die_parent. */
3435 static GTY (()) hash_table<block_die_hasher> *common_block_die_table;
3436
3437 typedef struct GTY(()) die_arg_entry_struct {
3438 dw_die_ref die;
3439 tree arg;
3440 } die_arg_entry;
3441
3442
3443 /* Node of the variable location list. */
3444 struct GTY ((chain_next ("%h.next"))) var_loc_node {
3445 /* Either NOTE_INSN_VAR_LOCATION, or, for SRA optimized variables,
3446 EXPR_LIST chain. For small bitsizes, bitsize is encoded
3447 in mode of the EXPR_LIST node and first EXPR_LIST operand
3448 is either NOTE_INSN_VAR_LOCATION for a piece with a known
3449 location or NULL for padding. For larger bitsizes,
3450 mode is 0 and first operand is a CONCAT with bitsize
3451 as first CONCAT operand and NOTE_INSN_VAR_LOCATION resp.
3452 NULL as second operand. */
3453 rtx GTY (()) loc;
3454 const char * GTY (()) label;
3455 struct var_loc_node * GTY (()) next;
3456 var_loc_view view;
3457 };
3458
3459 /* Variable location list. */
3460 struct GTY ((for_user)) var_loc_list_def {
3461 struct var_loc_node * GTY (()) first;
3462
3463 /* Pointer to the last but one or last element of the
3464 chained list. If the list is empty, both first and
3465 last are NULL, if the list contains just one node
3466 or the last node certainly is not redundant, it points
3467 to the last node, otherwise points to the last but one.
3468 Do not mark it for GC because it is marked through the chain. */
3469 struct var_loc_node * GTY ((skip ("%h"))) last;
3470
3471 /* Pointer to the last element before section switch,
3472 if NULL, either sections weren't switched or first
3473 is after section switch. */
3474 struct var_loc_node * GTY ((skip ("%h"))) last_before_switch;
3475
3476 /* DECL_UID of the variable decl. */
3477 unsigned int decl_id;
3478 };
3479 typedef struct var_loc_list_def var_loc_list;
3480
3481 /* Call argument location list. */
3482 struct GTY ((chain_next ("%h.next"))) call_arg_loc_node {
3483 rtx GTY (()) call_arg_loc_note;
3484 const char * GTY (()) label;
3485 tree GTY (()) block;
3486 bool tail_call_p;
3487 rtx GTY (()) symbol_ref;
3488 struct call_arg_loc_node * GTY (()) next;
3489 };
3490
3491
3492 struct decl_loc_hasher : ggc_ptr_hash<var_loc_list>
3493 {
3494 typedef const_tree compare_type;
3495
3496 static hashval_t hash (var_loc_list *);
3497 static bool equal (var_loc_list *, const_tree);
3498 };
3499
3500 /* Table of decl location linked lists. */
3501 static GTY (()) hash_table<decl_loc_hasher> *decl_loc_table;
3502
3503 /* Head and tail of call_arg_loc chain. */
3504 static GTY (()) struct call_arg_loc_node *call_arg_locations;
3505 static struct call_arg_loc_node *call_arg_loc_last;
3506
3507 /* Number of call sites in the current function. */
3508 static int call_site_count = -1;
3509 /* Number of tail call sites in the current function. */
3510 static int tail_call_site_count = -1;
3511
3512 /* A cached location list. */
3513 struct GTY ((for_user)) cached_dw_loc_list_def {
3514 /* The DECL_UID of the decl that this entry describes. */
3515 unsigned int decl_id;
3516
3517 /* The cached location list. */
3518 dw_loc_list_ref loc_list;
3519 };
3520 typedef struct cached_dw_loc_list_def cached_dw_loc_list;
3521
3522 struct dw_loc_list_hasher : ggc_ptr_hash<cached_dw_loc_list>
3523 {
3524
3525 typedef const_tree compare_type;
3526
3527 static hashval_t hash (cached_dw_loc_list *);
3528 static bool equal (cached_dw_loc_list *, const_tree);
3529 };
3530
3531 /* Table of cached location lists. */
3532 static GTY (()) hash_table<dw_loc_list_hasher> *cached_dw_loc_list_table;
3533
3534 /* A vector of references to DIE's that are uniquely identified by their tag,
3535 presence/absence of children DIE's, and list of attribute/value pairs. */
3536 static GTY(()) vec<dw_die_ref, va_gc> *abbrev_die_table;
3537
3538 /* A hash map to remember the stack usage for DWARF procedures. The value
3539 stored is the stack size difference between before the DWARF procedure
3540 invokation and after it returned. In other words, for a DWARF procedure
3541 that consumes N stack slots and that pushes M ones, this stores M - N. */
3542 static hash_map<dw_die_ref, int> *dwarf_proc_stack_usage_map;
3543
3544 /* A global counter for generating labels for line number data. */
3545 static unsigned int line_info_label_num;
3546
3547 /* The current table to which we should emit line number information
3548 for the current function. This will be set up at the beginning of
3549 assembly for the function. */
3550 static GTY(()) dw_line_info_table *cur_line_info_table;
3551
3552 /* The two default tables of line number info. */
3553 static GTY(()) dw_line_info_table *text_section_line_info;
3554 static GTY(()) dw_line_info_table *cold_text_section_line_info;
3555
3556 /* The set of all non-default tables of line number info. */
3557 static GTY(()) vec<dw_line_info_table *, va_gc> *separate_line_info;
3558
3559 /* A flag to tell pubnames/types export if there is an info section to
3560 refer to. */
3561 static bool info_section_emitted;
3562
3563 /* A pointer to the base of a table that contains a list of publicly
3564 accessible names. */
3565 static GTY (()) vec<pubname_entry, va_gc> *pubname_table;
3566
3567 /* A pointer to the base of a table that contains a list of publicly
3568 accessible types. */
3569 static GTY (()) vec<pubname_entry, va_gc> *pubtype_table;
3570
3571 /* A pointer to the base of a table that contains a list of macro
3572 defines/undefines (and file start/end markers). */
3573 static GTY (()) vec<macinfo_entry, va_gc> *macinfo_table;
3574
3575 /* True if .debug_macinfo or .debug_macros section is going to be
3576 emitted. */
3577 #define have_macinfo \
3578 ((!XCOFF_DEBUGGING_INFO || HAVE_XCOFF_DWARF_EXTRAS) \
3579 && debug_info_level >= DINFO_LEVEL_VERBOSE \
3580 && !macinfo_table->is_empty ())
3581
3582 /* Vector of dies for which we should generate .debug_ranges info. */
3583 static GTY (()) vec<dw_ranges, va_gc> *ranges_table;
3584
3585 /* Vector of pairs of labels referenced in ranges_table. */
3586 static GTY (()) vec<dw_ranges_by_label, va_gc> *ranges_by_label;
3587
3588 /* Whether we have location lists that need outputting */
3589 static GTY(()) bool have_location_lists;
3590
3591 /* Unique label counter. */
3592 static GTY(()) unsigned int loclabel_num;
3593
3594 /* Unique label counter for point-of-call tables. */
3595 static GTY(()) unsigned int poc_label_num;
3596
3597 /* The last file entry emitted by maybe_emit_file(). */
3598 static GTY(()) struct dwarf_file_data * last_emitted_file;
3599
3600 /* Number of internal labels generated by gen_internal_sym(). */
3601 static GTY(()) int label_num;
3602
3603 static GTY(()) vec<die_arg_entry, va_gc> *tmpl_value_parm_die_table;
3604
3605 /* Instances of generic types for which we need to generate debug
3606 info that describe their generic parameters and arguments. That
3607 generation needs to happen once all types are properly laid out so
3608 we do it at the end of compilation. */
3609 static GTY(()) vec<tree, va_gc> *generic_type_instances;
3610
3611 /* Offset from the "steady-state frame pointer" to the frame base,
3612 within the current function. */
3613 static poly_int64 frame_pointer_fb_offset;
3614 static bool frame_pointer_fb_offset_valid;
3615
3616 static vec<dw_die_ref> base_types;
3617
3618 /* Flags to represent a set of attribute classes for attributes that represent
3619 a scalar value (bounds, pointers, ...). */
3620 enum dw_scalar_form
3621 {
3622 dw_scalar_form_constant = 0x01,
3623 dw_scalar_form_exprloc = 0x02,
3624 dw_scalar_form_reference = 0x04
3625 };
3626
3627 /* Forward declarations for functions defined in this file. */
3628
3629 static int is_pseudo_reg (const_rtx);
3630 static tree type_main_variant (tree);
3631 static int is_tagged_type (const_tree);
3632 static const char *dwarf_tag_name (unsigned);
3633 static const char *dwarf_attr_name (unsigned);
3634 static const char *dwarf_form_name (unsigned);
3635 static tree decl_ultimate_origin (const_tree);
3636 static tree decl_class_context (tree);
3637 static void add_dwarf_attr (dw_die_ref, dw_attr_node *);
3638 static inline enum dw_val_class AT_class (dw_attr_node *);
3639 static inline unsigned int AT_index (dw_attr_node *);
3640 static void add_AT_flag (dw_die_ref, enum dwarf_attribute, unsigned);
3641 static inline unsigned AT_flag (dw_attr_node *);
3642 static void add_AT_int (dw_die_ref, enum dwarf_attribute, HOST_WIDE_INT);
3643 static inline HOST_WIDE_INT AT_int (dw_attr_node *);
3644 static void add_AT_unsigned (dw_die_ref, enum dwarf_attribute, unsigned HOST_WIDE_INT);
3645 static inline unsigned HOST_WIDE_INT AT_unsigned (dw_attr_node *);
3646 static void add_AT_double (dw_die_ref, enum dwarf_attribute,
3647 HOST_WIDE_INT, unsigned HOST_WIDE_INT);
3648 static inline void add_AT_vec (dw_die_ref, enum dwarf_attribute, unsigned int,
3649 unsigned int, unsigned char *);
3650 static void add_AT_data8 (dw_die_ref, enum dwarf_attribute, unsigned char *);
3651 static void add_AT_string (dw_die_ref, enum dwarf_attribute, const char *);
3652 static inline const char *AT_string (dw_attr_node *);
3653 static enum dwarf_form AT_string_form (dw_attr_node *);
3654 static void add_AT_die_ref (dw_die_ref, enum dwarf_attribute, dw_die_ref);
3655 static void add_AT_specification (dw_die_ref, dw_die_ref);
3656 static inline dw_die_ref AT_ref (dw_attr_node *);
3657 static inline int AT_ref_external (dw_attr_node *);
3658 static inline void set_AT_ref_external (dw_attr_node *, int);
3659 static void add_AT_loc (dw_die_ref, enum dwarf_attribute, dw_loc_descr_ref);
3660 static inline dw_loc_descr_ref AT_loc (dw_attr_node *);
3661 static void add_AT_loc_list (dw_die_ref, enum dwarf_attribute,
3662 dw_loc_list_ref);
3663 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3664 static void add_AT_view_list (dw_die_ref, enum dwarf_attribute);
3665 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3666 static addr_table_entry *add_addr_table_entry (void *, enum ate_kind);
3667 static void remove_addr_table_entry (addr_table_entry *);
3668 static void add_AT_addr (dw_die_ref, enum dwarf_attribute, rtx, bool);
3669 static inline rtx AT_addr (dw_attr_node *);
3670 static void add_AT_symview (dw_die_ref, enum dwarf_attribute, const char *);
3671 static void add_AT_lbl_id (dw_die_ref, enum dwarf_attribute, const char *);
3672 static void add_AT_lineptr (dw_die_ref, enum dwarf_attribute, const char *);
3673 static void add_AT_macptr (dw_die_ref, enum dwarf_attribute, const char *);
3674 static void add_AT_range_list (dw_die_ref, enum dwarf_attribute,
3675 unsigned long, bool);
3676 static inline const char *AT_lbl (dw_attr_node *);
3677 static dw_attr_node *get_AT (dw_die_ref, enum dwarf_attribute);
3678 static const char *get_AT_low_pc (dw_die_ref);
3679 static const char *get_AT_string (dw_die_ref, enum dwarf_attribute);
3680 static int get_AT_flag (dw_die_ref, enum dwarf_attribute);
3681 static unsigned get_AT_unsigned (dw_die_ref, enum dwarf_attribute);
3682 static inline dw_die_ref get_AT_ref (dw_die_ref, enum dwarf_attribute);
3683 static bool is_c (void);
3684 static bool is_cxx (void);
3685 static bool is_cxx (const_tree);
3686 static bool is_fortran (void);
3687 static bool is_ada (void);
3688 static bool remove_AT (dw_die_ref, enum dwarf_attribute);
3689 static void remove_child_TAG (dw_die_ref, enum dwarf_tag);
3690 static void add_child_die (dw_die_ref, dw_die_ref);
3691 static dw_die_ref new_die (enum dwarf_tag, dw_die_ref, tree);
3692 static dw_die_ref lookup_type_die (tree);
3693 static dw_die_ref strip_naming_typedef (tree, dw_die_ref);
3694 static dw_die_ref lookup_type_die_strip_naming_typedef (tree);
3695 static void equate_type_number_to_die (tree, dw_die_ref);
3696 static dw_die_ref lookup_decl_die (tree);
3697 static var_loc_list *lookup_decl_loc (const_tree);
3698 static void equate_decl_number_to_die (tree, dw_die_ref);
3699 static struct var_loc_node *add_var_loc_to_decl (tree, rtx, const char *, var_loc_view);
3700 static void print_spaces (FILE *);
3701 static void print_die (dw_die_ref, FILE *);
3702 static void loc_checksum (dw_loc_descr_ref, struct md5_ctx *);
3703 static void attr_checksum (dw_attr_node *, struct md5_ctx *, int *);
3704 static void die_checksum (dw_die_ref, struct md5_ctx *, int *);
3705 static void checksum_sleb128 (HOST_WIDE_INT, struct md5_ctx *);
3706 static void checksum_uleb128 (unsigned HOST_WIDE_INT, struct md5_ctx *);
3707 static void loc_checksum_ordered (dw_loc_descr_ref, struct md5_ctx *);
3708 static void attr_checksum_ordered (enum dwarf_tag, dw_attr_node *,
3709 struct md5_ctx *, int *);
3710 struct checksum_attributes;
3711 static void collect_checksum_attributes (struct checksum_attributes *, dw_die_ref);
3712 static void die_checksum_ordered (dw_die_ref, struct md5_ctx *, int *);
3713 static void checksum_die_context (dw_die_ref, struct md5_ctx *);
3714 static void generate_type_signature (dw_die_ref, comdat_type_node *);
3715 static int same_loc_p (dw_loc_descr_ref, dw_loc_descr_ref, int *);
3716 static int same_dw_val_p (const dw_val_node *, const dw_val_node *, int *);
3717 static int same_attr_p (dw_attr_node *, dw_attr_node *, int *);
3718 static int same_die_p (dw_die_ref, dw_die_ref, int *);
3719 static int is_type_die (dw_die_ref);
3720 static inline bool is_template_instantiation (dw_die_ref);
3721 static int is_declaration_die (dw_die_ref);
3722 static int should_move_die_to_comdat (dw_die_ref);
3723 static dw_die_ref clone_as_declaration (dw_die_ref);
3724 static dw_die_ref clone_die (dw_die_ref);
3725 static dw_die_ref clone_tree (dw_die_ref);
3726 static dw_die_ref copy_declaration_context (dw_die_ref, dw_die_ref);
3727 static void generate_skeleton_ancestor_tree (skeleton_chain_node *);
3728 static void generate_skeleton_bottom_up (skeleton_chain_node *);
3729 static dw_die_ref generate_skeleton (dw_die_ref);
3730 static dw_die_ref remove_child_or_replace_with_skeleton (dw_die_ref,
3731 dw_die_ref,
3732 dw_die_ref);
3733 static void break_out_comdat_types (dw_die_ref);
3734 static void copy_decls_for_unworthy_types (dw_die_ref);
3735
3736 static void add_sibling_attributes (dw_die_ref);
3737 static void output_location_lists (dw_die_ref);
3738 static int constant_size (unsigned HOST_WIDE_INT);
3739 static unsigned long size_of_die (dw_die_ref);
3740 static void calc_die_sizes (dw_die_ref);
3741 static void calc_base_type_die_sizes (void);
3742 static void mark_dies (dw_die_ref);
3743 static void unmark_dies (dw_die_ref);
3744 static void unmark_all_dies (dw_die_ref);
3745 static unsigned long size_of_pubnames (vec<pubname_entry, va_gc> *);
3746 static unsigned long size_of_aranges (void);
3747 static enum dwarf_form value_format (dw_attr_node *);
3748 static void output_value_format (dw_attr_node *);
3749 static void output_abbrev_section (void);
3750 static void output_die_abbrevs (unsigned long, dw_die_ref);
3751 static void output_die (dw_die_ref);
3752 static void output_compilation_unit_header (enum dwarf_unit_type);
3753 static void output_comp_unit (dw_die_ref, int, const unsigned char *);
3754 static void output_comdat_type_unit (comdat_type_node *, bool);
3755 static const char *dwarf2_name (tree, int);
3756 static void add_pubname (tree, dw_die_ref);
3757 static void add_enumerator_pubname (const char *, dw_die_ref);
3758 static void add_pubname_string (const char *, dw_die_ref);
3759 static void add_pubtype (tree, dw_die_ref);
3760 static void output_pubnames (vec<pubname_entry, va_gc> *);
3761 static void output_aranges (void);
3762 static unsigned int add_ranges (const_tree, bool = false);
3763 static void add_ranges_by_labels (dw_die_ref, const char *, const char *,
3764 bool *, bool);
3765 static void output_ranges (void);
3766 static dw_line_info_table *new_line_info_table (void);
3767 static void output_line_info (bool);
3768 static void output_file_names (void);
3769 static dw_die_ref base_type_die (tree, bool);
3770 static int is_base_type (tree);
3771 static dw_die_ref subrange_type_die (tree, tree, tree, tree, dw_die_ref);
3772 static int decl_quals (const_tree);
3773 static dw_die_ref modified_type_die (tree, int, bool, dw_die_ref);
3774 static dw_die_ref generic_parameter_die (tree, tree, bool, dw_die_ref);
3775 static dw_die_ref template_parameter_pack_die (tree, tree, dw_die_ref);
3776 static unsigned int dbx_reg_number (const_rtx);
3777 static void add_loc_descr_op_piece (dw_loc_descr_ref *, int);
3778 static dw_loc_descr_ref reg_loc_descriptor (rtx, enum var_init_status);
3779 static dw_loc_descr_ref one_reg_loc_descriptor (unsigned int,
3780 enum var_init_status);
3781 static dw_loc_descr_ref multiple_reg_loc_descriptor (rtx, rtx,
3782 enum var_init_status);
3783 static dw_loc_descr_ref based_loc_descr (rtx, poly_int64,
3784 enum var_init_status);
3785 static int is_based_loc (const_rtx);
3786 static bool resolve_one_addr (rtx *);
3787 static dw_loc_descr_ref concat_loc_descriptor (rtx, rtx,
3788 enum var_init_status);
3789 static dw_loc_descr_ref loc_descriptor (rtx, machine_mode mode,
3790 enum var_init_status);
3791 struct loc_descr_context;
3792 static void add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref);
3793 static void add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list);
3794 static dw_loc_list_ref loc_list_from_tree (tree, int,
3795 struct loc_descr_context *);
3796 static dw_loc_descr_ref loc_descriptor_from_tree (tree, int,
3797 struct loc_descr_context *);
3798 static tree field_type (const_tree);
3799 static unsigned int simple_type_align_in_bits (const_tree);
3800 static unsigned int simple_decl_align_in_bits (const_tree);
3801 static unsigned HOST_WIDE_INT simple_type_size_in_bits (const_tree);
3802 struct vlr_context;
3803 static dw_loc_descr_ref field_byte_offset (const_tree, struct vlr_context *,
3804 HOST_WIDE_INT *);
3805 static void add_AT_location_description (dw_die_ref, enum dwarf_attribute,
3806 dw_loc_list_ref);
3807 static void add_data_member_location_attribute (dw_die_ref, tree,
3808 struct vlr_context *);
3809 static bool add_const_value_attribute (dw_die_ref, rtx);
3810 static void insert_int (HOST_WIDE_INT, unsigned, unsigned char *);
3811 static void insert_wide_int (const wide_int &, unsigned char *, int);
3812 static void insert_float (const_rtx, unsigned char *);
3813 static rtx rtl_for_decl_location (tree);
3814 static bool add_location_or_const_value_attribute (dw_die_ref, tree, bool);
3815 static bool tree_add_const_value_attribute (dw_die_ref, tree);
3816 static bool tree_add_const_value_attribute_for_decl (dw_die_ref, tree);
3817 static void add_name_attribute (dw_die_ref, const char *);
3818 static void add_desc_attribute (dw_die_ref, tree);
3819 static void add_gnat_descriptive_type_attribute (dw_die_ref, tree, dw_die_ref);
3820 static void add_comp_dir_attribute (dw_die_ref);
3821 static void add_scalar_info (dw_die_ref, enum dwarf_attribute, tree, int,
3822 struct loc_descr_context *);
3823 static void add_bound_info (dw_die_ref, enum dwarf_attribute, tree,
3824 struct loc_descr_context *);
3825 static void add_subscript_info (dw_die_ref, tree, bool);
3826 static void add_byte_size_attribute (dw_die_ref, tree);
3827 static void add_alignment_attribute (dw_die_ref, tree);
3828 static inline void add_bit_offset_attribute (dw_die_ref, tree,
3829 struct vlr_context *);
3830 static void add_bit_size_attribute (dw_die_ref, tree);
3831 static void add_prototyped_attribute (dw_die_ref, tree);
3832 static void add_abstract_origin_attribute (dw_die_ref, tree);
3833 static void add_pure_or_virtual_attribute (dw_die_ref, tree);
3834 static void add_src_coords_attributes (dw_die_ref, tree);
3835 static void add_name_and_src_coords_attributes (dw_die_ref, tree, bool = false);
3836 static void add_discr_value (dw_die_ref, dw_discr_value *);
3837 static void add_discr_list (dw_die_ref, dw_discr_list_ref);
3838 static inline dw_discr_list_ref AT_discr_list (dw_attr_node *);
3839 static dw_die_ref scope_die_for (tree, dw_die_ref);
3840 static inline int local_scope_p (dw_die_ref);
3841 static inline int class_scope_p (dw_die_ref);
3842 static inline int class_or_namespace_scope_p (dw_die_ref);
3843 static void add_type_attribute (dw_die_ref, tree, int, bool, dw_die_ref);
3844 static void add_calling_convention_attribute (dw_die_ref, tree);
3845 static const char *type_tag (const_tree);
3846 static tree member_declared_type (const_tree);
3847 #if 0
3848 static const char *decl_start_label (tree);
3849 #endif
3850 static void gen_array_type_die (tree, dw_die_ref);
3851 static void gen_descr_array_type_die (tree, struct array_descr_info *, dw_die_ref);
3852 #if 0
3853 static void gen_entry_point_die (tree, dw_die_ref);
3854 #endif
3855 static dw_die_ref gen_enumeration_type_die (tree, dw_die_ref);
3856 static dw_die_ref gen_formal_parameter_die (tree, tree, bool, dw_die_ref);
3857 static dw_die_ref gen_formal_parameter_pack_die (tree, tree, dw_die_ref, tree*);
3858 static void gen_unspecified_parameters_die (tree, dw_die_ref);
3859 static void gen_formal_types_die (tree, dw_die_ref);
3860 static void gen_subprogram_die (tree, dw_die_ref);
3861 static void gen_variable_die (tree, tree, dw_die_ref);
3862 static void gen_const_die (tree, dw_die_ref);
3863 static void gen_label_die (tree, dw_die_ref);
3864 static void gen_lexical_block_die (tree, dw_die_ref);
3865 static void gen_inlined_subroutine_die (tree, dw_die_ref);
3866 static void gen_field_die (tree, struct vlr_context *, dw_die_ref);
3867 static void gen_ptr_to_mbr_type_die (tree, dw_die_ref);
3868 static dw_die_ref gen_compile_unit_die (const char *);
3869 static void gen_inheritance_die (tree, tree, tree, dw_die_ref);
3870 static void gen_member_die (tree, dw_die_ref);
3871 static void gen_struct_or_union_type_die (tree, dw_die_ref,
3872 enum debug_info_usage);
3873 static void gen_subroutine_type_die (tree, dw_die_ref);
3874 static void gen_typedef_die (tree, dw_die_ref);
3875 static void gen_type_die (tree, dw_die_ref);
3876 static void gen_block_die (tree, dw_die_ref);
3877 static void decls_for_scope (tree, dw_die_ref, bool = true);
3878 static bool is_naming_typedef_decl (const_tree);
3879 static inline dw_die_ref get_context_die (tree);
3880 static void gen_namespace_die (tree, dw_die_ref);
3881 static dw_die_ref gen_namelist_decl (tree, dw_die_ref, tree);
3882 static dw_die_ref gen_decl_die (tree, tree, struct vlr_context *, dw_die_ref);
3883 static dw_die_ref force_decl_die (tree);
3884 static dw_die_ref force_type_die (tree);
3885 static dw_die_ref setup_namespace_context (tree, dw_die_ref);
3886 static dw_die_ref declare_in_namespace (tree, dw_die_ref);
3887 static struct dwarf_file_data * lookup_filename (const char *);
3888 static void retry_incomplete_types (void);
3889 static void gen_type_die_for_member (tree, tree, dw_die_ref);
3890 static void gen_generic_params_dies (tree);
3891 static void gen_tagged_type_die (tree, dw_die_ref, enum debug_info_usage);
3892 static void gen_type_die_with_usage (tree, dw_die_ref, enum debug_info_usage);
3893 static void splice_child_die (dw_die_ref, dw_die_ref);
3894 static int file_info_cmp (const void *, const void *);
3895 static dw_loc_list_ref new_loc_list (dw_loc_descr_ref, const char *, var_loc_view,
3896 const char *, var_loc_view, const char *);
3897 static void output_loc_list (dw_loc_list_ref);
3898 static char *gen_internal_sym (const char *);
3899 static bool want_pubnames (void);
3900
3901 static void prune_unmark_dies (dw_die_ref);
3902 static void prune_unused_types_mark_generic_parms_dies (dw_die_ref);
3903 static void prune_unused_types_mark (dw_die_ref, int);
3904 static void prune_unused_types_walk (dw_die_ref);
3905 static void prune_unused_types_walk_attribs (dw_die_ref);
3906 static void prune_unused_types_prune (dw_die_ref);
3907 static void prune_unused_types (void);
3908 static int maybe_emit_file (struct dwarf_file_data *fd);
3909 static inline const char *AT_vms_delta1 (dw_attr_node *);
3910 static inline const char *AT_vms_delta2 (dw_attr_node *);
3911 static inline void add_AT_vms_delta (dw_die_ref, enum dwarf_attribute,
3912 const char *, const char *);
3913 static void append_entry_to_tmpl_value_parm_die_table (dw_die_ref, tree);
3914 static void gen_remaining_tmpl_value_param_die_attribute (void);
3915 static bool generic_type_p (tree);
3916 static void schedule_generic_params_dies_gen (tree t);
3917 static void gen_scheduled_generic_parms_dies (void);
3918 static void resolve_variable_values (void);
3919
3920 static const char *comp_dir_string (void);
3921
3922 static void hash_loc_operands (dw_loc_descr_ref, inchash::hash &);
3923
3924 /* enum for tracking thread-local variables whose address is really an offset
3925 relative to the TLS pointer, which will need link-time relocation, but will
3926 not need relocation by the DWARF consumer. */
3927
3928 enum dtprel_bool
3929 {
3930 dtprel_false = 0,
3931 dtprel_true = 1
3932 };
3933
3934 /* Return the operator to use for an address of a variable. For dtprel_true, we
3935 use DW_OP_const*. For regular variables, which need both link-time
3936 relocation and consumer-level relocation (e.g., to account for shared objects
3937 loaded at a random address), we use DW_OP_addr*. */
3938
3939 static inline enum dwarf_location_atom
3940 dw_addr_op (enum dtprel_bool dtprel)
3941 {
3942 if (dtprel == dtprel_true)
3943 return (dwarf_split_debug_info ? dwarf_OP (DW_OP_constx)
3944 : (DWARF2_ADDR_SIZE == 4 ? DW_OP_const4u : DW_OP_const8u));
3945 else
3946 return dwarf_split_debug_info ? dwarf_OP (DW_OP_addrx) : DW_OP_addr;
3947 }
3948
3949 /* Return a pointer to a newly allocated address location description. If
3950 dwarf_split_debug_info is true, then record the address with the appropriate
3951 relocation. */
3952 static inline dw_loc_descr_ref
3953 new_addr_loc_descr (rtx addr, enum dtprel_bool dtprel)
3954 {
3955 dw_loc_descr_ref ref = new_loc_descr (dw_addr_op (dtprel), 0, 0);
3956
3957 ref->dw_loc_oprnd1.val_class = dw_val_class_addr;
3958 ref->dw_loc_oprnd1.v.val_addr = addr;
3959 ref->dtprel = dtprel;
3960 if (dwarf_split_debug_info)
3961 ref->dw_loc_oprnd1.val_entry
3962 = add_addr_table_entry (addr,
3963 dtprel ? ate_kind_rtx_dtprel : ate_kind_rtx);
3964 else
3965 ref->dw_loc_oprnd1.val_entry = NULL;
3966
3967 return ref;
3968 }
3969
3970 /* Section names used to hold DWARF debugging information. */
3971
3972 #ifndef DEBUG_INFO_SECTION
3973 #define DEBUG_INFO_SECTION ".debug_info"
3974 #endif
3975 #ifndef DEBUG_DWO_INFO_SECTION
3976 #define DEBUG_DWO_INFO_SECTION ".debug_info.dwo"
3977 #endif
3978 #ifndef DEBUG_LTO_INFO_SECTION
3979 #define DEBUG_LTO_INFO_SECTION ".gnu.debuglto_.debug_info"
3980 #endif
3981 #ifndef DEBUG_LTO_DWO_INFO_SECTION
3982 #define DEBUG_LTO_DWO_INFO_SECTION ".gnu.debuglto_.debug_info.dwo"
3983 #endif
3984 #ifndef DEBUG_ABBREV_SECTION
3985 #define DEBUG_ABBREV_SECTION ".debug_abbrev"
3986 #endif
3987 #ifndef DEBUG_LTO_ABBREV_SECTION
3988 #define DEBUG_LTO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev"
3989 #endif
3990 #ifndef DEBUG_DWO_ABBREV_SECTION
3991 #define DEBUG_DWO_ABBREV_SECTION ".debug_abbrev.dwo"
3992 #endif
3993 #ifndef DEBUG_LTO_DWO_ABBREV_SECTION
3994 #define DEBUG_LTO_DWO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev.dwo"
3995 #endif
3996 #ifndef DEBUG_ARANGES_SECTION
3997 #define DEBUG_ARANGES_SECTION ".debug_aranges"
3998 #endif
3999 #ifndef DEBUG_ADDR_SECTION
4000 #define DEBUG_ADDR_SECTION ".debug_addr"
4001 #endif
4002 #ifndef DEBUG_MACINFO_SECTION
4003 #define DEBUG_MACINFO_SECTION ".debug_macinfo"
4004 #endif
4005 #ifndef DEBUG_LTO_MACINFO_SECTION
4006 #define DEBUG_LTO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo"
4007 #endif
4008 #ifndef DEBUG_DWO_MACINFO_SECTION
4009 #define DEBUG_DWO_MACINFO_SECTION ".debug_macinfo.dwo"
4010 #endif
4011 #ifndef DEBUG_LTO_DWO_MACINFO_SECTION
4012 #define DEBUG_LTO_DWO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo.dwo"
4013 #endif
4014 #ifndef DEBUG_MACRO_SECTION
4015 #define DEBUG_MACRO_SECTION ".debug_macro"
4016 #endif
4017 #ifndef DEBUG_LTO_MACRO_SECTION
4018 #define DEBUG_LTO_MACRO_SECTION ".gnu.debuglto_.debug_macro"
4019 #endif
4020 #ifndef DEBUG_DWO_MACRO_SECTION
4021 #define DEBUG_DWO_MACRO_SECTION ".debug_macro.dwo"
4022 #endif
4023 #ifndef DEBUG_LTO_DWO_MACRO_SECTION
4024 #define DEBUG_LTO_DWO_MACRO_SECTION ".gnu.debuglto_.debug_macro.dwo"
4025 #endif
4026 #ifndef DEBUG_LINE_SECTION
4027 #define DEBUG_LINE_SECTION ".debug_line"
4028 #endif
4029 #ifndef DEBUG_LTO_LINE_SECTION
4030 #define DEBUG_LTO_LINE_SECTION ".gnu.debuglto_.debug_line"
4031 #endif
4032 #ifndef DEBUG_DWO_LINE_SECTION
4033 #define DEBUG_DWO_LINE_SECTION ".debug_line.dwo"
4034 #endif
4035 #ifndef DEBUG_LTO_DWO_LINE_SECTION
4036 #define DEBUG_LTO_DWO_LINE_SECTION ".gnu.debuglto_.debug_line.dwo"
4037 #endif
4038 #ifndef DEBUG_LOC_SECTION
4039 #define DEBUG_LOC_SECTION ".debug_loc"
4040 #endif
4041 #ifndef DEBUG_DWO_LOC_SECTION
4042 #define DEBUG_DWO_LOC_SECTION ".debug_loc.dwo"
4043 #endif
4044 #ifndef DEBUG_LOCLISTS_SECTION
4045 #define DEBUG_LOCLISTS_SECTION ".debug_loclists"
4046 #endif
4047 #ifndef DEBUG_DWO_LOCLISTS_SECTION
4048 #define DEBUG_DWO_LOCLISTS_SECTION ".debug_loclists.dwo"
4049 #endif
4050 #ifndef DEBUG_PUBNAMES_SECTION
4051 #define DEBUG_PUBNAMES_SECTION \
4052 ((debug_generate_pub_sections == 2) \
4053 ? ".debug_gnu_pubnames" : ".debug_pubnames")
4054 #endif
4055 #ifndef DEBUG_PUBTYPES_SECTION
4056 #define DEBUG_PUBTYPES_SECTION \
4057 ((debug_generate_pub_sections == 2) \
4058 ? ".debug_gnu_pubtypes" : ".debug_pubtypes")
4059 #endif
4060 #ifndef DEBUG_STR_OFFSETS_SECTION
4061 #define DEBUG_STR_OFFSETS_SECTION ".debug_str_offsets"
4062 #endif
4063 #ifndef DEBUG_DWO_STR_OFFSETS_SECTION
4064 #define DEBUG_DWO_STR_OFFSETS_SECTION ".debug_str_offsets.dwo"
4065 #endif
4066 #ifndef DEBUG_LTO_DWO_STR_OFFSETS_SECTION
4067 #define DEBUG_LTO_DWO_STR_OFFSETS_SECTION ".gnu.debuglto_.debug_str_offsets.dwo"
4068 #endif
4069 #ifndef DEBUG_STR_SECTION
4070 #define DEBUG_STR_SECTION ".debug_str"
4071 #endif
4072 #ifndef DEBUG_LTO_STR_SECTION
4073 #define DEBUG_LTO_STR_SECTION ".gnu.debuglto_.debug_str"
4074 #endif
4075 #ifndef DEBUG_STR_DWO_SECTION
4076 #define DEBUG_STR_DWO_SECTION ".debug_str.dwo"
4077 #endif
4078 #ifndef DEBUG_LTO_STR_DWO_SECTION
4079 #define DEBUG_LTO_STR_DWO_SECTION ".gnu.debuglto_.debug_str.dwo"
4080 #endif
4081 #ifndef DEBUG_RANGES_SECTION
4082 #define DEBUG_RANGES_SECTION ".debug_ranges"
4083 #endif
4084 #ifndef DEBUG_RNGLISTS_SECTION
4085 #define DEBUG_RNGLISTS_SECTION ".debug_rnglists"
4086 #endif
4087 #ifndef DEBUG_LINE_STR_SECTION
4088 #define DEBUG_LINE_STR_SECTION ".debug_line_str"
4089 #endif
4090 #ifndef DEBUG_LTO_LINE_STR_SECTION
4091 #define DEBUG_LTO_LINE_STR_SECTION ".gnu.debuglto_.debug_line_str"
4092 #endif
4093
4094 /* Standard ELF section names for compiled code and data. */
4095 #ifndef TEXT_SECTION_NAME
4096 #define TEXT_SECTION_NAME ".text"
4097 #endif
4098
4099 /* Section flags for .debug_str section. */
4100 #define DEBUG_STR_SECTION_FLAGS \
4101 (HAVE_GAS_SHF_MERGE && flag_merge_debug_strings \
4102 ? SECTION_DEBUG | SECTION_MERGE | SECTION_STRINGS | 1 \
4103 : SECTION_DEBUG)
4104
4105 /* Section flags for .debug_str.dwo section. */
4106 #define DEBUG_STR_DWO_SECTION_FLAGS (SECTION_DEBUG | SECTION_EXCLUDE)
4107
4108 /* Attribute used to refer to the macro section. */
4109 #define DEBUG_MACRO_ATTRIBUTE (dwarf_version >= 5 ? DW_AT_macros \
4110 : dwarf_strict ? DW_AT_macro_info : DW_AT_GNU_macros)
4111
4112 /* Labels we insert at beginning sections we can reference instead of
4113 the section names themselves. */
4114
4115 #ifndef TEXT_SECTION_LABEL
4116 #define TEXT_SECTION_LABEL "Ltext"
4117 #endif
4118 #ifndef COLD_TEXT_SECTION_LABEL
4119 #define COLD_TEXT_SECTION_LABEL "Ltext_cold"
4120 #endif
4121 #ifndef DEBUG_LINE_SECTION_LABEL
4122 #define DEBUG_LINE_SECTION_LABEL "Ldebug_line"
4123 #endif
4124 #ifndef DEBUG_SKELETON_LINE_SECTION_LABEL
4125 #define DEBUG_SKELETON_LINE_SECTION_LABEL "Lskeleton_debug_line"
4126 #endif
4127 #ifndef DEBUG_INFO_SECTION_LABEL
4128 #define DEBUG_INFO_SECTION_LABEL "Ldebug_info"
4129 #endif
4130 #ifndef DEBUG_SKELETON_INFO_SECTION_LABEL
4131 #define DEBUG_SKELETON_INFO_SECTION_LABEL "Lskeleton_debug_info"
4132 #endif
4133 #ifndef DEBUG_ABBREV_SECTION_LABEL
4134 #define DEBUG_ABBREV_SECTION_LABEL "Ldebug_abbrev"
4135 #endif
4136 #ifndef DEBUG_SKELETON_ABBREV_SECTION_LABEL
4137 #define DEBUG_SKELETON_ABBREV_SECTION_LABEL "Lskeleton_debug_abbrev"
4138 #endif
4139 #ifndef DEBUG_ADDR_SECTION_LABEL
4140 #define DEBUG_ADDR_SECTION_LABEL "Ldebug_addr"
4141 #endif
4142 #ifndef DEBUG_LOC_SECTION_LABEL
4143 #define DEBUG_LOC_SECTION_LABEL "Ldebug_loc"
4144 #endif
4145 #ifndef DEBUG_RANGES_SECTION_LABEL
4146 #define DEBUG_RANGES_SECTION_LABEL "Ldebug_ranges"
4147 #endif
4148 #ifndef DEBUG_MACINFO_SECTION_LABEL
4149 #define DEBUG_MACINFO_SECTION_LABEL "Ldebug_macinfo"
4150 #endif
4151 #ifndef DEBUG_MACRO_SECTION_LABEL
4152 #define DEBUG_MACRO_SECTION_LABEL "Ldebug_macro"
4153 #endif
4154 #define SKELETON_COMP_DIE_ABBREV 1
4155 #define SKELETON_TYPE_DIE_ABBREV 2
4156
4157 /* Definitions of defaults for formats and names of various special
4158 (artificial) labels which may be generated within this file (when the -g
4159 options is used and DWARF2_DEBUGGING_INFO is in effect.
4160 If necessary, these may be overridden from within the tm.h file, but
4161 typically, overriding these defaults is unnecessary. */
4162
4163 static char text_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4164 static char text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4165 static char cold_text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4166 static char cold_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4167 static char abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4168 static char debug_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4169 static char debug_skeleton_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4170 static char debug_skeleton_abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4171 static char debug_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4172 static char debug_addr_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4173 static char debug_skeleton_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4174 static char macinfo_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4175 static char loc_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4176 static char ranges_section_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4177 static char ranges_base_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4178
4179 #ifndef TEXT_END_LABEL
4180 #define TEXT_END_LABEL "Letext"
4181 #endif
4182 #ifndef COLD_END_LABEL
4183 #define COLD_END_LABEL "Letext_cold"
4184 #endif
4185 #ifndef BLOCK_BEGIN_LABEL
4186 #define BLOCK_BEGIN_LABEL "LBB"
4187 #endif
4188 #ifndef BLOCK_INLINE_ENTRY_LABEL
4189 #define BLOCK_INLINE_ENTRY_LABEL "LBI"
4190 #endif
4191 #ifndef BLOCK_END_LABEL
4192 #define BLOCK_END_LABEL "LBE"
4193 #endif
4194 #ifndef LINE_CODE_LABEL
4195 #define LINE_CODE_LABEL "LM"
4196 #endif
4197
4198 \f
4199 /* Return the root of the DIE's built for the current compilation unit. */
4200 static dw_die_ref
4201 comp_unit_die (void)
4202 {
4203 if (!single_comp_unit_die)
4204 single_comp_unit_die = gen_compile_unit_die (NULL);
4205 return single_comp_unit_die;
4206 }
4207
4208 /* We allow a language front-end to designate a function that is to be
4209 called to "demangle" any name before it is put into a DIE. */
4210
4211 static const char *(*demangle_name_func) (const char *);
4212
4213 void
4214 dwarf2out_set_demangle_name_func (const char *(*func) (const char *))
4215 {
4216 demangle_name_func = func;
4217 }
4218
4219 /* Test if rtl node points to a pseudo register. */
4220
4221 static inline int
4222 is_pseudo_reg (const_rtx rtl)
4223 {
4224 return ((REG_P (rtl) && REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
4225 || (GET_CODE (rtl) == SUBREG
4226 && REGNO (SUBREG_REG (rtl)) >= FIRST_PSEUDO_REGISTER));
4227 }
4228
4229 /* Return a reference to a type, with its const and volatile qualifiers
4230 removed. */
4231
4232 static inline tree
4233 type_main_variant (tree type)
4234 {
4235 type = TYPE_MAIN_VARIANT (type);
4236
4237 /* ??? There really should be only one main variant among any group of
4238 variants of a given type (and all of the MAIN_VARIANT values for all
4239 members of the group should point to that one type) but sometimes the C
4240 front-end messes this up for array types, so we work around that bug
4241 here. */
4242 if (TREE_CODE (type) == ARRAY_TYPE)
4243 while (type != TYPE_MAIN_VARIANT (type))
4244 type = TYPE_MAIN_VARIANT (type);
4245
4246 return type;
4247 }
4248
4249 /* Return nonzero if the given type node represents a tagged type. */
4250
4251 static inline int
4252 is_tagged_type (const_tree type)
4253 {
4254 enum tree_code code = TREE_CODE (type);
4255
4256 return (code == RECORD_TYPE || code == UNION_TYPE
4257 || code == QUAL_UNION_TYPE || code == ENUMERAL_TYPE);
4258 }
4259
4260 /* Set label to debug_info_section_label + die_offset of a DIE reference. */
4261
4262 static void
4263 get_ref_die_offset_label (char *label, dw_die_ref ref)
4264 {
4265 sprintf (label, "%s+%ld", debug_info_section_label, ref->die_offset);
4266 }
4267
4268 /* Return die_offset of a DIE reference to a base type. */
4269
4270 static unsigned long int
4271 get_base_type_offset (dw_die_ref ref)
4272 {
4273 if (ref->die_offset)
4274 return ref->die_offset;
4275 if (comp_unit_die ()->die_abbrev)
4276 {
4277 calc_base_type_die_sizes ();
4278 gcc_assert (ref->die_offset);
4279 }
4280 return ref->die_offset;
4281 }
4282
4283 /* Return die_offset of a DIE reference other than base type. */
4284
4285 static unsigned long int
4286 get_ref_die_offset (dw_die_ref ref)
4287 {
4288 gcc_assert (ref->die_offset);
4289 return ref->die_offset;
4290 }
4291
4292 /* Convert a DIE tag into its string name. */
4293
4294 static const char *
4295 dwarf_tag_name (unsigned int tag)
4296 {
4297 const char *name = get_DW_TAG_name (tag);
4298
4299 if (name != NULL)
4300 return name;
4301
4302 return "DW_TAG_<unknown>";
4303 }
4304
4305 /* Convert a DWARF attribute code into its string name. */
4306
4307 static const char *
4308 dwarf_attr_name (unsigned int attr)
4309 {
4310 const char *name;
4311
4312 switch (attr)
4313 {
4314 #if VMS_DEBUGGING_INFO
4315 case DW_AT_HP_prologue:
4316 return "DW_AT_HP_prologue";
4317 #else
4318 case DW_AT_MIPS_loop_unroll_factor:
4319 return "DW_AT_MIPS_loop_unroll_factor";
4320 #endif
4321
4322 #if VMS_DEBUGGING_INFO
4323 case DW_AT_HP_epilogue:
4324 return "DW_AT_HP_epilogue";
4325 #else
4326 case DW_AT_MIPS_stride:
4327 return "DW_AT_MIPS_stride";
4328 #endif
4329 }
4330
4331 name = get_DW_AT_name (attr);
4332
4333 if (name != NULL)
4334 return name;
4335
4336 return "DW_AT_<unknown>";
4337 }
4338
4339 /* Convert a DWARF value form code into its string name. */
4340
4341 static const char *
4342 dwarf_form_name (unsigned int form)
4343 {
4344 const char *name = get_DW_FORM_name (form);
4345
4346 if (name != NULL)
4347 return name;
4348
4349 return "DW_FORM_<unknown>";
4350 }
4351 \f
4352 /* Determine the "ultimate origin" of a decl. The decl may be an inlined
4353 instance of an inlined instance of a decl which is local to an inline
4354 function, so we have to trace all of the way back through the origin chain
4355 to find out what sort of node actually served as the original seed for the
4356 given block. */
4357
4358 static tree
4359 decl_ultimate_origin (const_tree decl)
4360 {
4361 if (!CODE_CONTAINS_STRUCT (TREE_CODE (decl), TS_DECL_COMMON))
4362 return NULL_TREE;
4363
4364 /* DECL_ABSTRACT_ORIGIN can point to itself; ignore that if
4365 we're trying to output the abstract instance of this function. */
4366 if (DECL_ABSTRACT_P (decl) && DECL_ABSTRACT_ORIGIN (decl) == decl)
4367 return NULL_TREE;
4368
4369 /* Since the DECL_ABSTRACT_ORIGIN for a DECL is supposed to be the
4370 most distant ancestor, this should never happen. */
4371 gcc_assert (!DECL_FROM_INLINE (DECL_ORIGIN (decl)));
4372
4373 return DECL_ABSTRACT_ORIGIN (decl);
4374 }
4375
4376 /* Get the class to which DECL belongs, if any. In g++, the DECL_CONTEXT
4377 of a virtual function may refer to a base class, so we check the 'this'
4378 parameter. */
4379
4380 static tree
4381 decl_class_context (tree decl)
4382 {
4383 tree context = NULL_TREE;
4384
4385 if (TREE_CODE (decl) != FUNCTION_DECL || ! DECL_VINDEX (decl))
4386 context = DECL_CONTEXT (decl);
4387 else
4388 context = TYPE_MAIN_VARIANT
4389 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
4390
4391 if (context && !TYPE_P (context))
4392 context = NULL_TREE;
4393
4394 return context;
4395 }
4396 \f
4397 /* Add an attribute/value pair to a DIE. */
4398
4399 static inline void
4400 add_dwarf_attr (dw_die_ref die, dw_attr_node *attr)
4401 {
4402 /* Maybe this should be an assert? */
4403 if (die == NULL)
4404 return;
4405
4406 if (flag_checking)
4407 {
4408 /* Check we do not add duplicate attrs. Can't use get_AT here
4409 because that recurses to the specification/abstract origin DIE. */
4410 dw_attr_node *a;
4411 unsigned ix;
4412 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
4413 gcc_assert (a->dw_attr != attr->dw_attr);
4414 }
4415
4416 vec_safe_reserve (die->die_attr, 1);
4417 vec_safe_push (die->die_attr, *attr);
4418 }
4419
4420 static inline enum dw_val_class
4421 AT_class (dw_attr_node *a)
4422 {
4423 return a->dw_attr_val.val_class;
4424 }
4425
4426 /* Return the index for any attribute that will be referenced with a
4427 DW_FORM_addrx/GNU_addr_index or DW_FORM_strx/GNU_str_index. String
4428 indices are stored in dw_attr_val.v.val_str for reference counting
4429 pruning. */
4430
4431 static inline unsigned int
4432 AT_index (dw_attr_node *a)
4433 {
4434 if (AT_class (a) == dw_val_class_str)
4435 return a->dw_attr_val.v.val_str->index;
4436 else if (a->dw_attr_val.val_entry != NULL)
4437 return a->dw_attr_val.val_entry->index;
4438 return NOT_INDEXED;
4439 }
4440
4441 /* Add a flag value attribute to a DIE. */
4442
4443 static inline void
4444 add_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int flag)
4445 {
4446 dw_attr_node attr;
4447
4448 attr.dw_attr = attr_kind;
4449 attr.dw_attr_val.val_class = dw_val_class_flag;
4450 attr.dw_attr_val.val_entry = NULL;
4451 attr.dw_attr_val.v.val_flag = flag;
4452 add_dwarf_attr (die, &attr);
4453 }
4454
4455 static inline unsigned
4456 AT_flag (dw_attr_node *a)
4457 {
4458 gcc_assert (a && AT_class (a) == dw_val_class_flag);
4459 return a->dw_attr_val.v.val_flag;
4460 }
4461
4462 /* Add a signed integer attribute value to a DIE. */
4463
4464 static inline void
4465 add_AT_int (dw_die_ref die, enum dwarf_attribute attr_kind, HOST_WIDE_INT int_val)
4466 {
4467 dw_attr_node attr;
4468
4469 attr.dw_attr = attr_kind;
4470 attr.dw_attr_val.val_class = dw_val_class_const;
4471 attr.dw_attr_val.val_entry = NULL;
4472 attr.dw_attr_val.v.val_int = int_val;
4473 add_dwarf_attr (die, &attr);
4474 }
4475
4476 static inline HOST_WIDE_INT
4477 AT_int (dw_attr_node *a)
4478 {
4479 gcc_assert (a && (AT_class (a) == dw_val_class_const
4480 || AT_class (a) == dw_val_class_const_implicit));
4481 return a->dw_attr_val.v.val_int;
4482 }
4483
4484 /* Add an unsigned integer attribute value to a DIE. */
4485
4486 static inline void
4487 add_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind,
4488 unsigned HOST_WIDE_INT unsigned_val)
4489 {
4490 dw_attr_node attr;
4491
4492 attr.dw_attr = attr_kind;
4493 attr.dw_attr_val.val_class = dw_val_class_unsigned_const;
4494 attr.dw_attr_val.val_entry = NULL;
4495 attr.dw_attr_val.v.val_unsigned = unsigned_val;
4496 add_dwarf_attr (die, &attr);
4497 }
4498
4499 static inline unsigned HOST_WIDE_INT
4500 AT_unsigned (dw_attr_node *a)
4501 {
4502 gcc_assert (a && (AT_class (a) == dw_val_class_unsigned_const
4503 || AT_class (a) == dw_val_class_unsigned_const_implicit));
4504 return a->dw_attr_val.v.val_unsigned;
4505 }
4506
4507 /* Add an unsigned wide integer attribute value to a DIE. */
4508
4509 static inline void
4510 add_AT_wide (dw_die_ref die, enum dwarf_attribute attr_kind,
4511 const wide_int& w)
4512 {
4513 dw_attr_node attr;
4514
4515 attr.dw_attr = attr_kind;
4516 attr.dw_attr_val.val_class = dw_val_class_wide_int;
4517 attr.dw_attr_val.val_entry = NULL;
4518 attr.dw_attr_val.v.val_wide = ggc_alloc<wide_int> ();
4519 *attr.dw_attr_val.v.val_wide = w;
4520 add_dwarf_attr (die, &attr);
4521 }
4522
4523 /* Add an unsigned double integer attribute value to a DIE. */
4524
4525 static inline void
4526 add_AT_double (dw_die_ref die, enum dwarf_attribute attr_kind,
4527 HOST_WIDE_INT high, unsigned HOST_WIDE_INT low)
4528 {
4529 dw_attr_node attr;
4530
4531 attr.dw_attr = attr_kind;
4532 attr.dw_attr_val.val_class = dw_val_class_const_double;
4533 attr.dw_attr_val.val_entry = NULL;
4534 attr.dw_attr_val.v.val_double.high = high;
4535 attr.dw_attr_val.v.val_double.low = low;
4536 add_dwarf_attr (die, &attr);
4537 }
4538
4539 /* Add a floating point attribute value to a DIE and return it. */
4540
4541 static inline void
4542 add_AT_vec (dw_die_ref die, enum dwarf_attribute attr_kind,
4543 unsigned int length, unsigned int elt_size, unsigned char *array)
4544 {
4545 dw_attr_node attr;
4546
4547 attr.dw_attr = attr_kind;
4548 attr.dw_attr_val.val_class = dw_val_class_vec;
4549 attr.dw_attr_val.val_entry = NULL;
4550 attr.dw_attr_val.v.val_vec.length = length;
4551 attr.dw_attr_val.v.val_vec.elt_size = elt_size;
4552 attr.dw_attr_val.v.val_vec.array = array;
4553 add_dwarf_attr (die, &attr);
4554 }
4555
4556 /* Add an 8-byte data attribute value to a DIE. */
4557
4558 static inline void
4559 add_AT_data8 (dw_die_ref die, enum dwarf_attribute attr_kind,
4560 unsigned char data8[8])
4561 {
4562 dw_attr_node attr;
4563
4564 attr.dw_attr = attr_kind;
4565 attr.dw_attr_val.val_class = dw_val_class_data8;
4566 attr.dw_attr_val.val_entry = NULL;
4567 memcpy (attr.dw_attr_val.v.val_data8, data8, 8);
4568 add_dwarf_attr (die, &attr);
4569 }
4570
4571 /* Add DW_AT_low_pc and DW_AT_high_pc to a DIE. When using
4572 dwarf_split_debug_info, address attributes in dies destined for the
4573 final executable have force_direct set to avoid using indexed
4574 references. */
4575
4576 static inline void
4577 add_AT_low_high_pc (dw_die_ref die, const char *lbl_low, const char *lbl_high,
4578 bool force_direct)
4579 {
4580 dw_attr_node attr;
4581 char * lbl_id;
4582
4583 lbl_id = xstrdup (lbl_low);
4584 attr.dw_attr = DW_AT_low_pc;
4585 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4586 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4587 if (dwarf_split_debug_info && !force_direct)
4588 attr.dw_attr_val.val_entry
4589 = add_addr_table_entry (lbl_id, ate_kind_label);
4590 else
4591 attr.dw_attr_val.val_entry = NULL;
4592 add_dwarf_attr (die, &attr);
4593
4594 attr.dw_attr = DW_AT_high_pc;
4595 if (dwarf_version < 4)
4596 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4597 else
4598 attr.dw_attr_val.val_class = dw_val_class_high_pc;
4599 lbl_id = xstrdup (lbl_high);
4600 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4601 if (attr.dw_attr_val.val_class == dw_val_class_lbl_id
4602 && dwarf_split_debug_info && !force_direct)
4603 attr.dw_attr_val.val_entry
4604 = add_addr_table_entry (lbl_id, ate_kind_label);
4605 else
4606 attr.dw_attr_val.val_entry = NULL;
4607 add_dwarf_attr (die, &attr);
4608 }
4609
4610 /* Hash and equality functions for debug_str_hash. */
4611
4612 hashval_t
4613 indirect_string_hasher::hash (indirect_string_node *x)
4614 {
4615 return htab_hash_string (x->str);
4616 }
4617
4618 bool
4619 indirect_string_hasher::equal (indirect_string_node *x1, const char *x2)
4620 {
4621 return strcmp (x1->str, x2) == 0;
4622 }
4623
4624 /* Add STR to the given string hash table. */
4625
4626 static struct indirect_string_node *
4627 find_AT_string_in_table (const char *str,
4628 hash_table<indirect_string_hasher> *table,
4629 enum insert_option insert = INSERT)
4630 {
4631 struct indirect_string_node *node;
4632
4633 indirect_string_node **slot
4634 = table->find_slot_with_hash (str, htab_hash_string (str), insert);
4635 if (*slot == NULL)
4636 {
4637 node = ggc_cleared_alloc<indirect_string_node> ();
4638 node->str = ggc_strdup (str);
4639 *slot = node;
4640 }
4641 else
4642 node = *slot;
4643
4644 node->refcount++;
4645 return node;
4646 }
4647
4648 /* Add STR to the indirect string hash table. */
4649
4650 static struct indirect_string_node *
4651 find_AT_string (const char *str, enum insert_option insert = INSERT)
4652 {
4653 if (! debug_str_hash)
4654 debug_str_hash = hash_table<indirect_string_hasher>::create_ggc (10);
4655
4656 return find_AT_string_in_table (str, debug_str_hash, insert);
4657 }
4658
4659 /* Add a string attribute value to a DIE. */
4660
4661 static inline void
4662 add_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind, const char *str)
4663 {
4664 dw_attr_node attr;
4665 struct indirect_string_node *node;
4666
4667 node = find_AT_string (str);
4668
4669 attr.dw_attr = attr_kind;
4670 attr.dw_attr_val.val_class = dw_val_class_str;
4671 attr.dw_attr_val.val_entry = NULL;
4672 attr.dw_attr_val.v.val_str = node;
4673 add_dwarf_attr (die, &attr);
4674 }
4675
4676 static inline const char *
4677 AT_string (dw_attr_node *a)
4678 {
4679 gcc_assert (a && AT_class (a) == dw_val_class_str);
4680 return a->dw_attr_val.v.val_str->str;
4681 }
4682
4683 /* Call this function directly to bypass AT_string_form's logic to put
4684 the string inline in the die. */
4685
4686 static void
4687 set_indirect_string (struct indirect_string_node *node)
4688 {
4689 char label[MAX_ARTIFICIAL_LABEL_BYTES];
4690 /* Already indirect is a no op. */
4691 if (node->form == DW_FORM_strp
4692 || node->form == DW_FORM_line_strp
4693 || node->form == dwarf_FORM (DW_FORM_strx))
4694 {
4695 gcc_assert (node->label);
4696 return;
4697 }
4698 ASM_GENERATE_INTERNAL_LABEL (label, "LASF", dw2_string_counter);
4699 ++dw2_string_counter;
4700 node->label = xstrdup (label);
4701
4702 if (!dwarf_split_debug_info)
4703 {
4704 node->form = DW_FORM_strp;
4705 node->index = NOT_INDEXED;
4706 }
4707 else
4708 {
4709 node->form = dwarf_FORM (DW_FORM_strx);
4710 node->index = NO_INDEX_ASSIGNED;
4711 }
4712 }
4713
4714 /* A helper function for dwarf2out_finish, called to reset indirect
4715 string decisions done for early LTO dwarf output before fat object
4716 dwarf output. */
4717
4718 int
4719 reset_indirect_string (indirect_string_node **h, void *)
4720 {
4721 struct indirect_string_node *node = *h;
4722 if (node->form == DW_FORM_strp || node->form == dwarf_FORM (DW_FORM_strx))
4723 {
4724 free (node->label);
4725 node->label = NULL;
4726 node->form = (dwarf_form) 0;
4727 node->index = 0;
4728 }
4729 return 1;
4730 }
4731
4732 /* Find out whether a string should be output inline in DIE
4733 or out-of-line in .debug_str section. */
4734
4735 static enum dwarf_form
4736 find_string_form (struct indirect_string_node *node)
4737 {
4738 unsigned int len;
4739
4740 if (node->form)
4741 return node->form;
4742
4743 len = strlen (node->str) + 1;
4744
4745 /* If the string is shorter or equal to the size of the reference, it is
4746 always better to put it inline. */
4747 if (len <= DWARF_OFFSET_SIZE || node->refcount == 0)
4748 return node->form = DW_FORM_string;
4749
4750 /* If we cannot expect the linker to merge strings in .debug_str
4751 section, only put it into .debug_str if it is worth even in this
4752 single module. */
4753 if (DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
4754 || ((debug_str_section->common.flags & SECTION_MERGE) == 0
4755 && (len - DWARF_OFFSET_SIZE) * node->refcount <= len))
4756 return node->form = DW_FORM_string;
4757
4758 set_indirect_string (node);
4759
4760 return node->form;
4761 }
4762
4763 /* Find out whether the string referenced from the attribute should be
4764 output inline in DIE or out-of-line in .debug_str section. */
4765
4766 static enum dwarf_form
4767 AT_string_form (dw_attr_node *a)
4768 {
4769 gcc_assert (a && AT_class (a) == dw_val_class_str);
4770 return find_string_form (a->dw_attr_val.v.val_str);
4771 }
4772
4773 /* Add a DIE reference attribute value to a DIE. */
4774
4775 static inline void
4776 add_AT_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind, dw_die_ref targ_die)
4777 {
4778 dw_attr_node attr;
4779 gcc_checking_assert (targ_die != NULL);
4780
4781 /* With LTO we can end up trying to reference something we didn't create
4782 a DIE for. Avoid crashing later on a NULL referenced DIE. */
4783 if (targ_die == NULL)
4784 return;
4785
4786 attr.dw_attr = attr_kind;
4787 attr.dw_attr_val.val_class = dw_val_class_die_ref;
4788 attr.dw_attr_val.val_entry = NULL;
4789 attr.dw_attr_val.v.val_die_ref.die = targ_die;
4790 attr.dw_attr_val.v.val_die_ref.external = 0;
4791 add_dwarf_attr (die, &attr);
4792 }
4793
4794 /* Change DIE reference REF to point to NEW_DIE instead. */
4795
4796 static inline void
4797 change_AT_die_ref (dw_attr_node *ref, dw_die_ref new_die)
4798 {
4799 gcc_assert (ref->dw_attr_val.val_class == dw_val_class_die_ref);
4800 ref->dw_attr_val.v.val_die_ref.die = new_die;
4801 ref->dw_attr_val.v.val_die_ref.external = 0;
4802 }
4803
4804 /* Add an AT_specification attribute to a DIE, and also make the back
4805 pointer from the specification to the definition. */
4806
4807 static inline void
4808 add_AT_specification (dw_die_ref die, dw_die_ref targ_die)
4809 {
4810 add_AT_die_ref (die, DW_AT_specification, targ_die);
4811 gcc_assert (!targ_die->die_definition);
4812 targ_die->die_definition = die;
4813 }
4814
4815 static inline dw_die_ref
4816 AT_ref (dw_attr_node *a)
4817 {
4818 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4819 return a->dw_attr_val.v.val_die_ref.die;
4820 }
4821
4822 static inline int
4823 AT_ref_external (dw_attr_node *a)
4824 {
4825 if (a && AT_class (a) == dw_val_class_die_ref)
4826 return a->dw_attr_val.v.val_die_ref.external;
4827
4828 return 0;
4829 }
4830
4831 static inline void
4832 set_AT_ref_external (dw_attr_node *a, int i)
4833 {
4834 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4835 a->dw_attr_val.v.val_die_ref.external = i;
4836 }
4837
4838 /* Add a location description attribute value to a DIE. */
4839
4840 static inline void
4841 add_AT_loc (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_descr_ref loc)
4842 {
4843 dw_attr_node attr;
4844
4845 attr.dw_attr = attr_kind;
4846 attr.dw_attr_val.val_class = dw_val_class_loc;
4847 attr.dw_attr_val.val_entry = NULL;
4848 attr.dw_attr_val.v.val_loc = loc;
4849 add_dwarf_attr (die, &attr);
4850 }
4851
4852 static inline dw_loc_descr_ref
4853 AT_loc (dw_attr_node *a)
4854 {
4855 gcc_assert (a && AT_class (a) == dw_val_class_loc);
4856 return a->dw_attr_val.v.val_loc;
4857 }
4858
4859 static inline void
4860 add_AT_loc_list (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_list_ref loc_list)
4861 {
4862 dw_attr_node attr;
4863
4864 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4865 return;
4866
4867 attr.dw_attr = attr_kind;
4868 attr.dw_attr_val.val_class = dw_val_class_loc_list;
4869 attr.dw_attr_val.val_entry = NULL;
4870 attr.dw_attr_val.v.val_loc_list = loc_list;
4871 add_dwarf_attr (die, &attr);
4872 have_location_lists = true;
4873 }
4874
4875 static inline dw_loc_list_ref
4876 AT_loc_list (dw_attr_node *a)
4877 {
4878 gcc_assert (a && AT_class (a) == dw_val_class_loc_list);
4879 return a->dw_attr_val.v.val_loc_list;
4880 }
4881
4882 /* Add a view list attribute to DIE. It must have a DW_AT_location
4883 attribute, because the view list complements the location list. */
4884
4885 static inline void
4886 add_AT_view_list (dw_die_ref die, enum dwarf_attribute attr_kind)
4887 {
4888 dw_attr_node attr;
4889
4890 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4891 return;
4892
4893 attr.dw_attr = attr_kind;
4894 attr.dw_attr_val.val_class = dw_val_class_view_list;
4895 attr.dw_attr_val.val_entry = NULL;
4896 attr.dw_attr_val.v.val_view_list = die;
4897 add_dwarf_attr (die, &attr);
4898 gcc_checking_assert (get_AT (die, DW_AT_location));
4899 gcc_assert (have_location_lists);
4900 }
4901
4902 /* Return a pointer to the location list referenced by the attribute.
4903 If the named attribute is a view list, look up the corresponding
4904 DW_AT_location attribute and return its location list. */
4905
4906 static inline dw_loc_list_ref *
4907 AT_loc_list_ptr (dw_attr_node *a)
4908 {
4909 gcc_assert (a);
4910 switch (AT_class (a))
4911 {
4912 case dw_val_class_loc_list:
4913 return &a->dw_attr_val.v.val_loc_list;
4914 case dw_val_class_view_list:
4915 {
4916 dw_attr_node *l;
4917 l = get_AT (a->dw_attr_val.v.val_view_list, DW_AT_location);
4918 if (!l)
4919 return NULL;
4920 gcc_checking_assert (l + 1 == a);
4921 return AT_loc_list_ptr (l);
4922 }
4923 default:
4924 gcc_unreachable ();
4925 }
4926 }
4927
4928 /* Return the location attribute value associated with a view list
4929 attribute value. */
4930
4931 static inline dw_val_node *
4932 view_list_to_loc_list_val_node (dw_val_node *val)
4933 {
4934 gcc_assert (val->val_class == dw_val_class_view_list);
4935 dw_attr_node *loc = get_AT (val->v.val_view_list, DW_AT_location);
4936 if (!loc)
4937 return NULL;
4938 gcc_checking_assert (&(loc + 1)->dw_attr_val == val);
4939 gcc_assert (AT_class (loc) == dw_val_class_loc_list);
4940 return &loc->dw_attr_val;
4941 }
4942
4943 struct addr_hasher : ggc_ptr_hash<addr_table_entry>
4944 {
4945 static hashval_t hash (addr_table_entry *);
4946 static bool equal (addr_table_entry *, addr_table_entry *);
4947 };
4948
4949 /* Table of entries into the .debug_addr section. */
4950
4951 static GTY (()) hash_table<addr_hasher> *addr_index_table;
4952
4953 /* Hash an address_table_entry. */
4954
4955 hashval_t
4956 addr_hasher::hash (addr_table_entry *a)
4957 {
4958 inchash::hash hstate;
4959 switch (a->kind)
4960 {
4961 case ate_kind_rtx:
4962 hstate.add_int (0);
4963 break;
4964 case ate_kind_rtx_dtprel:
4965 hstate.add_int (1);
4966 break;
4967 case ate_kind_label:
4968 return htab_hash_string (a->addr.label);
4969 default:
4970 gcc_unreachable ();
4971 }
4972 inchash::add_rtx (a->addr.rtl, hstate);
4973 return hstate.end ();
4974 }
4975
4976 /* Determine equality for two address_table_entries. */
4977
4978 bool
4979 addr_hasher::equal (addr_table_entry *a1, addr_table_entry *a2)
4980 {
4981 if (a1->kind != a2->kind)
4982 return 0;
4983 switch (a1->kind)
4984 {
4985 case ate_kind_rtx:
4986 case ate_kind_rtx_dtprel:
4987 return rtx_equal_p (a1->addr.rtl, a2->addr.rtl);
4988 case ate_kind_label:
4989 return strcmp (a1->addr.label, a2->addr.label) == 0;
4990 default:
4991 gcc_unreachable ();
4992 }
4993 }
4994
4995 /* Initialize an addr_table_entry. */
4996
4997 void
4998 init_addr_table_entry (addr_table_entry *e, enum ate_kind kind, void *addr)
4999 {
5000 e->kind = kind;
5001 switch (kind)
5002 {
5003 case ate_kind_rtx:
5004 case ate_kind_rtx_dtprel:
5005 e->addr.rtl = (rtx) addr;
5006 break;
5007 case ate_kind_label:
5008 e->addr.label = (char *) addr;
5009 break;
5010 }
5011 e->refcount = 0;
5012 e->index = NO_INDEX_ASSIGNED;
5013 }
5014
5015 /* Add attr to the address table entry to the table. Defer setting an
5016 index until output time. */
5017
5018 static addr_table_entry *
5019 add_addr_table_entry (void *addr, enum ate_kind kind)
5020 {
5021 addr_table_entry *node;
5022 addr_table_entry finder;
5023
5024 gcc_assert (dwarf_split_debug_info);
5025 if (! addr_index_table)
5026 addr_index_table = hash_table<addr_hasher>::create_ggc (10);
5027 init_addr_table_entry (&finder, kind, addr);
5028 addr_table_entry **slot = addr_index_table->find_slot (&finder, INSERT);
5029
5030 if (*slot == HTAB_EMPTY_ENTRY)
5031 {
5032 node = ggc_cleared_alloc<addr_table_entry> ();
5033 init_addr_table_entry (node, kind, addr);
5034 *slot = node;
5035 }
5036 else
5037 node = *slot;
5038
5039 node->refcount++;
5040 return node;
5041 }
5042
5043 /* Remove an entry from the addr table by decrementing its refcount.
5044 Strictly, decrementing the refcount would be enough, but the
5045 assertion that the entry is actually in the table has found
5046 bugs. */
5047
5048 static void
5049 remove_addr_table_entry (addr_table_entry *entry)
5050 {
5051 gcc_assert (dwarf_split_debug_info && addr_index_table);
5052 /* After an index is assigned, the table is frozen. */
5053 gcc_assert (entry->refcount > 0 && entry->index == NO_INDEX_ASSIGNED);
5054 entry->refcount--;
5055 }
5056
5057 /* Given a location list, remove all addresses it refers to from the
5058 address_table. */
5059
5060 static void
5061 remove_loc_list_addr_table_entries (dw_loc_descr_ref descr)
5062 {
5063 for (; descr; descr = descr->dw_loc_next)
5064 if (descr->dw_loc_oprnd1.val_entry != NULL)
5065 {
5066 gcc_assert (descr->dw_loc_oprnd1.val_entry->index == NO_INDEX_ASSIGNED);
5067 remove_addr_table_entry (descr->dw_loc_oprnd1.val_entry);
5068 }
5069 }
5070
5071 /* A helper function for dwarf2out_finish called through
5072 htab_traverse. Assign an addr_table_entry its index. All entries
5073 must be collected into the table when this function is called,
5074 because the indexing code relies on htab_traverse to traverse nodes
5075 in the same order for each run. */
5076
5077 int
5078 index_addr_table_entry (addr_table_entry **h, unsigned int *index)
5079 {
5080 addr_table_entry *node = *h;
5081
5082 /* Don't index unreferenced nodes. */
5083 if (node->refcount == 0)
5084 return 1;
5085
5086 gcc_assert (node->index == NO_INDEX_ASSIGNED);
5087 node->index = *index;
5088 *index += 1;
5089
5090 return 1;
5091 }
5092
5093 /* Add an address constant attribute value to a DIE. When using
5094 dwarf_split_debug_info, address attributes in dies destined for the
5095 final executable should be direct references--setting the parameter
5096 force_direct ensures this behavior. */
5097
5098 static inline void
5099 add_AT_addr (dw_die_ref die, enum dwarf_attribute attr_kind, rtx addr,
5100 bool force_direct)
5101 {
5102 dw_attr_node attr;
5103
5104 attr.dw_attr = attr_kind;
5105 attr.dw_attr_val.val_class = dw_val_class_addr;
5106 attr.dw_attr_val.v.val_addr = addr;
5107 if (dwarf_split_debug_info && !force_direct)
5108 attr.dw_attr_val.val_entry = add_addr_table_entry (addr, ate_kind_rtx);
5109 else
5110 attr.dw_attr_val.val_entry = NULL;
5111 add_dwarf_attr (die, &attr);
5112 }
5113
5114 /* Get the RTX from to an address DIE attribute. */
5115
5116 static inline rtx
5117 AT_addr (dw_attr_node *a)
5118 {
5119 gcc_assert (a && AT_class (a) == dw_val_class_addr);
5120 return a->dw_attr_val.v.val_addr;
5121 }
5122
5123 /* Add a file attribute value to a DIE. */
5124
5125 static inline void
5126 add_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind,
5127 struct dwarf_file_data *fd)
5128 {
5129 dw_attr_node attr;
5130
5131 attr.dw_attr = attr_kind;
5132 attr.dw_attr_val.val_class = dw_val_class_file;
5133 attr.dw_attr_val.val_entry = NULL;
5134 attr.dw_attr_val.v.val_file = fd;
5135 add_dwarf_attr (die, &attr);
5136 }
5137
5138 /* Get the dwarf_file_data from a file DIE attribute. */
5139
5140 static inline struct dwarf_file_data *
5141 AT_file (dw_attr_node *a)
5142 {
5143 gcc_assert (a && (AT_class (a) == dw_val_class_file
5144 || AT_class (a) == dw_val_class_file_implicit));
5145 return a->dw_attr_val.v.val_file;
5146 }
5147
5148 /* Add a vms delta attribute value to a DIE. */
5149
5150 static inline void
5151 add_AT_vms_delta (dw_die_ref die, enum dwarf_attribute attr_kind,
5152 const char *lbl1, const char *lbl2)
5153 {
5154 dw_attr_node attr;
5155
5156 attr.dw_attr = attr_kind;
5157 attr.dw_attr_val.val_class = dw_val_class_vms_delta;
5158 attr.dw_attr_val.val_entry = NULL;
5159 attr.dw_attr_val.v.val_vms_delta.lbl1 = xstrdup (lbl1);
5160 attr.dw_attr_val.v.val_vms_delta.lbl2 = xstrdup (lbl2);
5161 add_dwarf_attr (die, &attr);
5162 }
5163
5164 /* Add a symbolic view identifier attribute value to a DIE. */
5165
5166 static inline void
5167 add_AT_symview (dw_die_ref die, enum dwarf_attribute attr_kind,
5168 const char *view_label)
5169 {
5170 dw_attr_node attr;
5171
5172 attr.dw_attr = attr_kind;
5173 attr.dw_attr_val.val_class = dw_val_class_symview;
5174 attr.dw_attr_val.val_entry = NULL;
5175 attr.dw_attr_val.v.val_symbolic_view = xstrdup (view_label);
5176 add_dwarf_attr (die, &attr);
5177 }
5178
5179 /* Add a label identifier attribute value to a DIE. */
5180
5181 static inline void
5182 add_AT_lbl_id (dw_die_ref die, enum dwarf_attribute attr_kind,
5183 const char *lbl_id)
5184 {
5185 dw_attr_node attr;
5186
5187 attr.dw_attr = attr_kind;
5188 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
5189 attr.dw_attr_val.val_entry = NULL;
5190 attr.dw_attr_val.v.val_lbl_id = xstrdup (lbl_id);
5191 if (dwarf_split_debug_info)
5192 attr.dw_attr_val.val_entry
5193 = add_addr_table_entry (attr.dw_attr_val.v.val_lbl_id,
5194 ate_kind_label);
5195 add_dwarf_attr (die, &attr);
5196 }
5197
5198 /* Add a section offset attribute value to a DIE, an offset into the
5199 debug_line section. */
5200
5201 static inline void
5202 add_AT_lineptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5203 const char *label)
5204 {
5205 dw_attr_node attr;
5206
5207 attr.dw_attr = attr_kind;
5208 attr.dw_attr_val.val_class = dw_val_class_lineptr;
5209 attr.dw_attr_val.val_entry = NULL;
5210 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5211 add_dwarf_attr (die, &attr);
5212 }
5213
5214 /* Add a section offset attribute value to a DIE, an offset into the
5215 debug_macinfo section. */
5216
5217 static inline void
5218 add_AT_macptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5219 const char *label)
5220 {
5221 dw_attr_node attr;
5222
5223 attr.dw_attr = attr_kind;
5224 attr.dw_attr_val.val_class = dw_val_class_macptr;
5225 attr.dw_attr_val.val_entry = NULL;
5226 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5227 add_dwarf_attr (die, &attr);
5228 }
5229
5230 /* Add a range_list attribute value to a DIE. When using
5231 dwarf_split_debug_info, address attributes in dies destined for the
5232 final executable should be direct references--setting the parameter
5233 force_direct ensures this behavior. */
5234
5235 #define UNRELOCATED_OFFSET ((addr_table_entry *) 1)
5236 #define RELOCATED_OFFSET (NULL)
5237
5238 static void
5239 add_AT_range_list (dw_die_ref die, enum dwarf_attribute attr_kind,
5240 long unsigned int offset, bool force_direct)
5241 {
5242 dw_attr_node attr;
5243
5244 attr.dw_attr = attr_kind;
5245 attr.dw_attr_val.val_class = dw_val_class_range_list;
5246 /* For the range_list attribute, use val_entry to store whether the
5247 offset should follow split-debug-info or normal semantics. This
5248 value is read in output_range_list_offset. */
5249 if (dwarf_split_debug_info && !force_direct)
5250 attr.dw_attr_val.val_entry = UNRELOCATED_OFFSET;
5251 else
5252 attr.dw_attr_val.val_entry = RELOCATED_OFFSET;
5253 attr.dw_attr_val.v.val_offset = offset;
5254 add_dwarf_attr (die, &attr);
5255 }
5256
5257 /* Return the start label of a delta attribute. */
5258
5259 static inline const char *
5260 AT_vms_delta1 (dw_attr_node *a)
5261 {
5262 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5263 return a->dw_attr_val.v.val_vms_delta.lbl1;
5264 }
5265
5266 /* Return the end label of a delta attribute. */
5267
5268 static inline const char *
5269 AT_vms_delta2 (dw_attr_node *a)
5270 {
5271 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5272 return a->dw_attr_val.v.val_vms_delta.lbl2;
5273 }
5274
5275 static inline const char *
5276 AT_lbl (dw_attr_node *a)
5277 {
5278 gcc_assert (a && (AT_class (a) == dw_val_class_lbl_id
5279 || AT_class (a) == dw_val_class_lineptr
5280 || AT_class (a) == dw_val_class_macptr
5281 || AT_class (a) == dw_val_class_loclistsptr
5282 || AT_class (a) == dw_val_class_high_pc));
5283 return a->dw_attr_val.v.val_lbl_id;
5284 }
5285
5286 /* Get the attribute of type attr_kind. */
5287
5288 static dw_attr_node *
5289 get_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5290 {
5291 dw_attr_node *a;
5292 unsigned ix;
5293 dw_die_ref spec = NULL;
5294
5295 if (! die)
5296 return NULL;
5297
5298 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5299 if (a->dw_attr == attr_kind)
5300 return a;
5301 else if (a->dw_attr == DW_AT_specification
5302 || a->dw_attr == DW_AT_abstract_origin)
5303 spec = AT_ref (a);
5304
5305 if (spec)
5306 return get_AT (spec, attr_kind);
5307
5308 return NULL;
5309 }
5310
5311 /* Returns the parent of the declaration of DIE. */
5312
5313 static dw_die_ref
5314 get_die_parent (dw_die_ref die)
5315 {
5316 dw_die_ref t;
5317
5318 if (!die)
5319 return NULL;
5320
5321 if ((t = get_AT_ref (die, DW_AT_abstract_origin))
5322 || (t = get_AT_ref (die, DW_AT_specification)))
5323 die = t;
5324
5325 return die->die_parent;
5326 }
5327
5328 /* Return the "low pc" attribute value, typically associated with a subprogram
5329 DIE. Return null if the "low pc" attribute is either not present, or if it
5330 cannot be represented as an assembler label identifier. */
5331
5332 static inline const char *
5333 get_AT_low_pc (dw_die_ref die)
5334 {
5335 dw_attr_node *a = get_AT (die, DW_AT_low_pc);
5336
5337 return a ? AT_lbl (a) : NULL;
5338 }
5339
5340 /* Return the value of the string attribute designated by ATTR_KIND, or
5341 NULL if it is not present. */
5342
5343 static inline const char *
5344 get_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind)
5345 {
5346 dw_attr_node *a = get_AT (die, attr_kind);
5347
5348 return a ? AT_string (a) : NULL;
5349 }
5350
5351 /* Return the value of the flag attribute designated by ATTR_KIND, or -1
5352 if it is not present. */
5353
5354 static inline int
5355 get_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind)
5356 {
5357 dw_attr_node *a = get_AT (die, attr_kind);
5358
5359 return a ? AT_flag (a) : 0;
5360 }
5361
5362 /* Return the value of the unsigned attribute designated by ATTR_KIND, or 0
5363 if it is not present. */
5364
5365 static inline unsigned
5366 get_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind)
5367 {
5368 dw_attr_node *a = get_AT (die, attr_kind);
5369
5370 return a ? AT_unsigned (a) : 0;
5371 }
5372
5373 static inline dw_die_ref
5374 get_AT_ref (dw_die_ref die, enum dwarf_attribute attr_kind)
5375 {
5376 dw_attr_node *a = get_AT (die, attr_kind);
5377
5378 return a ? AT_ref (a) : NULL;
5379 }
5380
5381 static inline struct dwarf_file_data *
5382 get_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind)
5383 {
5384 dw_attr_node *a = get_AT (die, attr_kind);
5385
5386 return a ? AT_file (a) : NULL;
5387 }
5388
5389 /* Return TRUE if the language is C. */
5390
5391 static inline bool
5392 is_c (void)
5393 {
5394 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5395
5396 return (lang == DW_LANG_C || lang == DW_LANG_C89 || lang == DW_LANG_C99
5397 || lang == DW_LANG_C11 || lang == DW_LANG_ObjC);
5398
5399
5400 }
5401
5402 /* Return TRUE if the language is C++. */
5403
5404 static inline bool
5405 is_cxx (void)
5406 {
5407 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5408
5409 return (lang == DW_LANG_C_plus_plus || lang == DW_LANG_ObjC_plus_plus
5410 || lang == DW_LANG_C_plus_plus_11 || lang == DW_LANG_C_plus_plus_14);
5411 }
5412
5413 /* Return TRUE if DECL was created by the C++ frontend. */
5414
5415 static bool
5416 is_cxx (const_tree decl)
5417 {
5418 if (in_lto_p)
5419 {
5420 const_tree context = get_ultimate_context (decl);
5421 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5422 return strncmp (TRANSLATION_UNIT_LANGUAGE (context), "GNU C++", 7) == 0;
5423 }
5424 return is_cxx ();
5425 }
5426
5427 /* Return TRUE if the language is Fortran. */
5428
5429 static inline bool
5430 is_fortran (void)
5431 {
5432 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5433
5434 return (lang == DW_LANG_Fortran77
5435 || lang == DW_LANG_Fortran90
5436 || lang == DW_LANG_Fortran95
5437 || lang == DW_LANG_Fortran03
5438 || lang == DW_LANG_Fortran08);
5439 }
5440
5441 static inline bool
5442 is_fortran (const_tree decl)
5443 {
5444 if (in_lto_p)
5445 {
5446 const_tree context = get_ultimate_context (decl);
5447 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5448 return (strncmp (TRANSLATION_UNIT_LANGUAGE (context),
5449 "GNU Fortran", 11) == 0
5450 || strcmp (TRANSLATION_UNIT_LANGUAGE (context),
5451 "GNU F77") == 0);
5452 }
5453 return is_fortran ();
5454 }
5455
5456 /* Return TRUE if the language is Ada. */
5457
5458 static inline bool
5459 is_ada (void)
5460 {
5461 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5462
5463 return lang == DW_LANG_Ada95 || lang == DW_LANG_Ada83;
5464 }
5465
5466 /* Return TRUE if the language is D. */
5467
5468 static inline bool
5469 is_dlang (void)
5470 {
5471 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5472
5473 return lang == DW_LANG_D;
5474 }
5475
5476 /* Remove the specified attribute if present. Return TRUE if removal
5477 was successful. */
5478
5479 static bool
5480 remove_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5481 {
5482 dw_attr_node *a;
5483 unsigned ix;
5484
5485 if (! die)
5486 return false;
5487
5488 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5489 if (a->dw_attr == attr_kind)
5490 {
5491 if (AT_class (a) == dw_val_class_str)
5492 if (a->dw_attr_val.v.val_str->refcount)
5493 a->dw_attr_val.v.val_str->refcount--;
5494
5495 /* vec::ordered_remove should help reduce the number of abbrevs
5496 that are needed. */
5497 die->die_attr->ordered_remove (ix);
5498 return true;
5499 }
5500 return false;
5501 }
5502
5503 /* Remove CHILD from its parent. PREV must have the property that
5504 PREV->DIE_SIB == CHILD. Does not alter CHILD. */
5505
5506 static void
5507 remove_child_with_prev (dw_die_ref child, dw_die_ref prev)
5508 {
5509 gcc_assert (child->die_parent == prev->die_parent);
5510 gcc_assert (prev->die_sib == child);
5511 if (prev == child)
5512 {
5513 gcc_assert (child->die_parent->die_child == child);
5514 prev = NULL;
5515 }
5516 else
5517 prev->die_sib = child->die_sib;
5518 if (child->die_parent->die_child == child)
5519 child->die_parent->die_child = prev;
5520 child->die_sib = NULL;
5521 }
5522
5523 /* Replace OLD_CHILD with NEW_CHILD. PREV must have the property that
5524 PREV->DIE_SIB == OLD_CHILD. Does not alter OLD_CHILD. */
5525
5526 static void
5527 replace_child (dw_die_ref old_child, dw_die_ref new_child, dw_die_ref prev)
5528 {
5529 dw_die_ref parent = old_child->die_parent;
5530
5531 gcc_assert (parent == prev->die_parent);
5532 gcc_assert (prev->die_sib == old_child);
5533
5534 new_child->die_parent = parent;
5535 if (prev == old_child)
5536 {
5537 gcc_assert (parent->die_child == old_child);
5538 new_child->die_sib = new_child;
5539 }
5540 else
5541 {
5542 prev->die_sib = new_child;
5543 new_child->die_sib = old_child->die_sib;
5544 }
5545 if (old_child->die_parent->die_child == old_child)
5546 old_child->die_parent->die_child = new_child;
5547 old_child->die_sib = NULL;
5548 }
5549
5550 /* Move all children from OLD_PARENT to NEW_PARENT. */
5551
5552 static void
5553 move_all_children (dw_die_ref old_parent, dw_die_ref new_parent)
5554 {
5555 dw_die_ref c;
5556 new_parent->die_child = old_parent->die_child;
5557 old_parent->die_child = NULL;
5558 FOR_EACH_CHILD (new_parent, c, c->die_parent = new_parent);
5559 }
5560
5561 /* Remove child DIE whose die_tag is TAG. Do nothing if no child
5562 matches TAG. */
5563
5564 static void
5565 remove_child_TAG (dw_die_ref die, enum dwarf_tag tag)
5566 {
5567 dw_die_ref c;
5568
5569 c = die->die_child;
5570 if (c) do {
5571 dw_die_ref prev = c;
5572 c = c->die_sib;
5573 while (c->die_tag == tag)
5574 {
5575 remove_child_with_prev (c, prev);
5576 c->die_parent = NULL;
5577 /* Might have removed every child. */
5578 if (die->die_child == NULL)
5579 return;
5580 c = prev->die_sib;
5581 }
5582 } while (c != die->die_child);
5583 }
5584
5585 /* Add a CHILD_DIE as the last child of DIE. */
5586
5587 static void
5588 add_child_die (dw_die_ref die, dw_die_ref child_die)
5589 {
5590 /* FIXME this should probably be an assert. */
5591 if (! die || ! child_die)
5592 return;
5593 gcc_assert (die != child_die);
5594
5595 child_die->die_parent = die;
5596 if (die->die_child)
5597 {
5598 child_die->die_sib = die->die_child->die_sib;
5599 die->die_child->die_sib = child_die;
5600 }
5601 else
5602 child_die->die_sib = child_die;
5603 die->die_child = child_die;
5604 }
5605
5606 /* Like add_child_die, but put CHILD_DIE after AFTER_DIE. */
5607
5608 static void
5609 add_child_die_after (dw_die_ref die, dw_die_ref child_die,
5610 dw_die_ref after_die)
5611 {
5612 gcc_assert (die
5613 && child_die
5614 && after_die
5615 && die->die_child
5616 && die != child_die);
5617
5618 child_die->die_parent = die;
5619 child_die->die_sib = after_die->die_sib;
5620 after_die->die_sib = child_die;
5621 if (die->die_child == after_die)
5622 die->die_child = child_die;
5623 }
5624
5625 /* Unassociate CHILD from its parent, and make its parent be
5626 NEW_PARENT. */
5627
5628 static void
5629 reparent_child (dw_die_ref child, dw_die_ref new_parent)
5630 {
5631 for (dw_die_ref p = child->die_parent->die_child; ; p = p->die_sib)
5632 if (p->die_sib == child)
5633 {
5634 remove_child_with_prev (child, p);
5635 break;
5636 }
5637 add_child_die (new_parent, child);
5638 }
5639
5640 /* Move CHILD, which must be a child of PARENT or the DIE for which PARENT
5641 is the specification, to the end of PARENT's list of children.
5642 This is done by removing and re-adding it. */
5643
5644 static void
5645 splice_child_die (dw_die_ref parent, dw_die_ref child)
5646 {
5647 /* We want the declaration DIE from inside the class, not the
5648 specification DIE at toplevel. */
5649 if (child->die_parent != parent)
5650 {
5651 dw_die_ref tmp = get_AT_ref (child, DW_AT_specification);
5652
5653 if (tmp)
5654 child = tmp;
5655 }
5656
5657 gcc_assert (child->die_parent == parent
5658 || (child->die_parent
5659 == get_AT_ref (parent, DW_AT_specification)));
5660
5661 reparent_child (child, parent);
5662 }
5663
5664 /* Create and return a new die with TAG_VALUE as tag. */
5665
5666 static inline dw_die_ref
5667 new_die_raw (enum dwarf_tag tag_value)
5668 {
5669 dw_die_ref die = ggc_cleared_alloc<die_node> ();
5670 die->die_tag = tag_value;
5671 return die;
5672 }
5673
5674 /* Create and return a new die with a parent of PARENT_DIE. If
5675 PARENT_DIE is NULL, the new DIE is placed in limbo and an
5676 associated tree T must be supplied to determine parenthood
5677 later. */
5678
5679 static inline dw_die_ref
5680 new_die (enum dwarf_tag tag_value, dw_die_ref parent_die, tree t)
5681 {
5682 dw_die_ref die = new_die_raw (tag_value);
5683
5684 if (parent_die != NULL)
5685 add_child_die (parent_die, die);
5686 else
5687 {
5688 limbo_die_node *limbo_node;
5689
5690 /* No DIEs created after early dwarf should end up in limbo,
5691 because the limbo list should not persist past LTO
5692 streaming. */
5693 if (tag_value != DW_TAG_compile_unit
5694 /* These are allowed because they're generated while
5695 breaking out COMDAT units late. */
5696 && tag_value != DW_TAG_type_unit
5697 && tag_value != DW_TAG_skeleton_unit
5698 && !early_dwarf
5699 /* Allow nested functions to live in limbo because they will
5700 only temporarily live there, as decls_for_scope will fix
5701 them up. */
5702 && (TREE_CODE (t) != FUNCTION_DECL
5703 || !decl_function_context (t))
5704 /* Same as nested functions above but for types. Types that
5705 are local to a function will be fixed in
5706 decls_for_scope. */
5707 && (!RECORD_OR_UNION_TYPE_P (t)
5708 || !TYPE_CONTEXT (t)
5709 || TREE_CODE (TYPE_CONTEXT (t)) != FUNCTION_DECL)
5710 /* FIXME debug-early: Allow late limbo DIE creation for LTO,
5711 especially in the ltrans stage, but once we implement LTO
5712 dwarf streaming, we should remove this exception. */
5713 && !in_lto_p)
5714 {
5715 fprintf (stderr, "symbol ended up in limbo too late:");
5716 debug_generic_stmt (t);
5717 gcc_unreachable ();
5718 }
5719
5720 limbo_node = ggc_cleared_alloc<limbo_die_node> ();
5721 limbo_node->die = die;
5722 limbo_node->created_for = t;
5723 limbo_node->next = limbo_die_list;
5724 limbo_die_list = limbo_node;
5725 }
5726
5727 return die;
5728 }
5729
5730 /* Return the DIE associated with the given type specifier. */
5731
5732 static inline dw_die_ref
5733 lookup_type_die (tree type)
5734 {
5735 dw_die_ref die = TYPE_SYMTAB_DIE (type);
5736 if (die && die->removed)
5737 {
5738 TYPE_SYMTAB_DIE (type) = NULL;
5739 return NULL;
5740 }
5741 return die;
5742 }
5743
5744 /* Given a TYPE_DIE representing the type TYPE, if TYPE is an
5745 anonymous type named by the typedef TYPE_DIE, return the DIE of the
5746 anonymous type instead the one of the naming typedef. */
5747
5748 static inline dw_die_ref
5749 strip_naming_typedef (tree type, dw_die_ref type_die)
5750 {
5751 if (type
5752 && TREE_CODE (type) == RECORD_TYPE
5753 && type_die
5754 && type_die->die_tag == DW_TAG_typedef
5755 && is_naming_typedef_decl (TYPE_NAME (type)))
5756 type_die = get_AT_ref (type_die, DW_AT_type);
5757 return type_die;
5758 }
5759
5760 /* Like lookup_type_die, but if type is an anonymous type named by a
5761 typedef[1], return the DIE of the anonymous type instead the one of
5762 the naming typedef. This is because in gen_typedef_die, we did
5763 equate the anonymous struct named by the typedef with the DIE of
5764 the naming typedef. So by default, lookup_type_die on an anonymous
5765 struct yields the DIE of the naming typedef.
5766
5767 [1]: Read the comment of is_naming_typedef_decl to learn about what
5768 a naming typedef is. */
5769
5770 static inline dw_die_ref
5771 lookup_type_die_strip_naming_typedef (tree type)
5772 {
5773 dw_die_ref die = lookup_type_die (type);
5774 return strip_naming_typedef (type, die);
5775 }
5776
5777 /* Equate a DIE to a given type specifier. */
5778
5779 static inline void
5780 equate_type_number_to_die (tree type, dw_die_ref type_die)
5781 {
5782 TYPE_SYMTAB_DIE (type) = type_die;
5783 }
5784
5785 static dw_die_ref maybe_create_die_with_external_ref (tree);
5786 struct GTY(()) sym_off_pair
5787 {
5788 const char * GTY((skip)) sym;
5789 unsigned HOST_WIDE_INT off;
5790 };
5791 static GTY(()) hash_map<tree, sym_off_pair> *external_die_map;
5792
5793 /* Returns a hash value for X (which really is a die_struct). */
5794
5795 inline hashval_t
5796 decl_die_hasher::hash (die_node *x)
5797 {
5798 return (hashval_t) x->decl_id;
5799 }
5800
5801 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
5802
5803 inline bool
5804 decl_die_hasher::equal (die_node *x, tree y)
5805 {
5806 return (x->decl_id == DECL_UID (y));
5807 }
5808
5809 /* Return the DIE associated with a given declaration. */
5810
5811 static inline dw_die_ref
5812 lookup_decl_die (tree decl)
5813 {
5814 dw_die_ref *die = decl_die_table->find_slot_with_hash (decl, DECL_UID (decl),
5815 NO_INSERT);
5816 if (!die)
5817 {
5818 if (in_lto_p)
5819 return maybe_create_die_with_external_ref (decl);
5820 return NULL;
5821 }
5822 if ((*die)->removed)
5823 {
5824 decl_die_table->clear_slot (die);
5825 return NULL;
5826 }
5827 return *die;
5828 }
5829
5830
5831 /* Return the DIE associated with BLOCK. */
5832
5833 static inline dw_die_ref
5834 lookup_block_die (tree block)
5835 {
5836 dw_die_ref die = BLOCK_DIE (block);
5837 if (!die && in_lto_p)
5838 return maybe_create_die_with_external_ref (block);
5839 return die;
5840 }
5841
5842 /* Associate DIE with BLOCK. */
5843
5844 static inline void
5845 equate_block_to_die (tree block, dw_die_ref die)
5846 {
5847 BLOCK_DIE (block) = die;
5848 }
5849 #undef BLOCK_DIE
5850
5851
5852 /* For DECL which might have early dwarf output query a SYMBOL + OFFSET
5853 style reference. Return true if we found one refering to a DIE for
5854 DECL, otherwise return false. */
5855
5856 static bool
5857 dwarf2out_die_ref_for_decl (tree decl, const char **sym,
5858 unsigned HOST_WIDE_INT *off)
5859 {
5860 dw_die_ref die;
5861
5862 if (in_lto_p)
5863 {
5864 /* During WPA stage and incremental linking we use a hash-map
5865 to store the decl <-> label + offset map. */
5866 if (!external_die_map)
5867 return false;
5868 sym_off_pair *desc = external_die_map->get (decl);
5869 if (!desc)
5870 return false;
5871 *sym = desc->sym;
5872 *off = desc->off;
5873 return true;
5874 }
5875
5876 if (TREE_CODE (decl) == BLOCK)
5877 die = lookup_block_die (decl);
5878 else
5879 die = lookup_decl_die (decl);
5880 if (!die)
5881 return false;
5882
5883 /* Similar to get_ref_die_offset_label, but using the "correct"
5884 label. */
5885 *off = die->die_offset;
5886 while (die->die_parent)
5887 die = die->die_parent;
5888 /* For the containing CU DIE we compute a die_symbol in
5889 compute_comp_unit_symbol. */
5890 gcc_assert (die->die_tag == DW_TAG_compile_unit
5891 && die->die_id.die_symbol != NULL);
5892 *sym = die->die_id.die_symbol;
5893 return true;
5894 }
5895
5896 /* Add a reference of kind ATTR_KIND to a DIE at SYMBOL + OFFSET to DIE. */
5897
5898 static void
5899 add_AT_external_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind,
5900 const char *symbol, HOST_WIDE_INT offset)
5901 {
5902 /* Create a fake DIE that contains the reference. Don't use
5903 new_die because we don't want to end up in the limbo list. */
5904 /* ??? We probably want to share these, thus put a ref to the DIE
5905 we create here to the external_die_map entry. */
5906 dw_die_ref ref = new_die_raw (die->die_tag);
5907 ref->die_id.die_symbol = symbol;
5908 ref->die_offset = offset;
5909 ref->with_offset = 1;
5910 add_AT_die_ref (die, attr_kind, ref);
5911 }
5912
5913 /* Create a DIE for DECL if required and add a reference to a DIE
5914 at SYMBOL + OFFSET which contains attributes dumped early. */
5915
5916 static void
5917 dwarf2out_register_external_die (tree decl, const char *sym,
5918 unsigned HOST_WIDE_INT off)
5919 {
5920 if (debug_info_level == DINFO_LEVEL_NONE)
5921 return;
5922
5923 if (!external_die_map)
5924 external_die_map = hash_map<tree, sym_off_pair>::create_ggc (1000);
5925 gcc_checking_assert (!external_die_map->get (decl));
5926 sym_off_pair p = { IDENTIFIER_POINTER (get_identifier (sym)), off };
5927 external_die_map->put (decl, p);
5928 }
5929
5930 /* If we have a registered external DIE for DECL return a new DIE for
5931 the concrete instance with an appropriate abstract origin. */
5932
5933 static dw_die_ref
5934 maybe_create_die_with_external_ref (tree decl)
5935 {
5936 if (!external_die_map)
5937 return NULL;
5938 sym_off_pair *desc = external_die_map->get (decl);
5939 if (!desc)
5940 return NULL;
5941
5942 const char *sym = desc->sym;
5943 unsigned HOST_WIDE_INT off = desc->off;
5944
5945 in_lto_p = false;
5946 dw_die_ref die = (TREE_CODE (decl) == BLOCK
5947 ? lookup_block_die (decl) : lookup_decl_die (decl));
5948 gcc_assert (!die);
5949 in_lto_p = true;
5950
5951 tree ctx;
5952 dw_die_ref parent = NULL;
5953 /* Need to lookup a DIE for the decls context - the containing
5954 function or translation unit. */
5955 if (TREE_CODE (decl) == BLOCK)
5956 {
5957 ctx = BLOCK_SUPERCONTEXT (decl);
5958 /* ??? We do not output DIEs for all scopes thus skip as
5959 many DIEs as needed. */
5960 while (TREE_CODE (ctx) == BLOCK
5961 && !lookup_block_die (ctx))
5962 ctx = BLOCK_SUPERCONTEXT (ctx);
5963 }
5964 else
5965 ctx = DECL_CONTEXT (decl);
5966 /* Peel types in the context stack. */
5967 while (ctx && TYPE_P (ctx))
5968 ctx = TYPE_CONTEXT (ctx);
5969 /* Likewise namespaces in case we do not want to emit DIEs for them. */
5970 if (debug_info_level <= DINFO_LEVEL_TERSE)
5971 while (ctx && TREE_CODE (ctx) == NAMESPACE_DECL)
5972 ctx = DECL_CONTEXT (ctx);
5973 if (ctx)
5974 {
5975 if (TREE_CODE (ctx) == BLOCK)
5976 parent = lookup_block_die (ctx);
5977 else if (TREE_CODE (ctx) == TRANSLATION_UNIT_DECL
5978 /* Keep the 1:1 association during WPA. */
5979 && !flag_wpa
5980 && flag_incremental_link != INCREMENTAL_LINK_LTO)
5981 /* Otherwise all late annotations go to the main CU which
5982 imports the original CUs. */
5983 parent = comp_unit_die ();
5984 else if (TREE_CODE (ctx) == FUNCTION_DECL
5985 && TREE_CODE (decl) != FUNCTION_DECL
5986 && TREE_CODE (decl) != PARM_DECL
5987 && TREE_CODE (decl) != RESULT_DECL
5988 && TREE_CODE (decl) != BLOCK)
5989 /* Leave function local entities parent determination to when
5990 we process scope vars. */
5991 ;
5992 else
5993 parent = lookup_decl_die (ctx);
5994 }
5995 else
5996 /* In some cases the FEs fail to set DECL_CONTEXT properly.
5997 Handle this case gracefully by globalizing stuff. */
5998 parent = comp_unit_die ();
5999 /* Create a DIE "stub". */
6000 switch (TREE_CODE (decl))
6001 {
6002 case TRANSLATION_UNIT_DECL:
6003 {
6004 die = comp_unit_die ();
6005 /* We re-target all CU decls to the LTRANS CU DIE, so no need
6006 to create a DIE for the original CUs. */
6007 return die;
6008 }
6009 case NAMESPACE_DECL:
6010 if (is_fortran (decl))
6011 die = new_die (DW_TAG_module, parent, decl);
6012 else
6013 die = new_die (DW_TAG_namespace, parent, decl);
6014 break;
6015 case FUNCTION_DECL:
6016 die = new_die (DW_TAG_subprogram, parent, decl);
6017 break;
6018 case VAR_DECL:
6019 die = new_die (DW_TAG_variable, parent, decl);
6020 break;
6021 case RESULT_DECL:
6022 die = new_die (DW_TAG_variable, parent, decl);
6023 break;
6024 case PARM_DECL:
6025 die = new_die (DW_TAG_formal_parameter, parent, decl);
6026 break;
6027 case CONST_DECL:
6028 die = new_die (DW_TAG_constant, parent, decl);
6029 break;
6030 case LABEL_DECL:
6031 die = new_die (DW_TAG_label, parent, decl);
6032 break;
6033 case BLOCK:
6034 die = new_die (DW_TAG_lexical_block, parent, decl);
6035 break;
6036 default:
6037 gcc_unreachable ();
6038 }
6039 if (TREE_CODE (decl) == BLOCK)
6040 equate_block_to_die (decl, die);
6041 else
6042 equate_decl_number_to_die (decl, die);
6043
6044 add_desc_attribute (die, decl);
6045
6046 /* Add a reference to the DIE providing early debug at $sym + off. */
6047 add_AT_external_die_ref (die, DW_AT_abstract_origin, sym, off);
6048
6049 return die;
6050 }
6051
6052 /* Returns a hash value for X (which really is a var_loc_list). */
6053
6054 inline hashval_t
6055 decl_loc_hasher::hash (var_loc_list *x)
6056 {
6057 return (hashval_t) x->decl_id;
6058 }
6059
6060 /* Return nonzero if decl_id of var_loc_list X is the same as
6061 UID of decl *Y. */
6062
6063 inline bool
6064 decl_loc_hasher::equal (var_loc_list *x, const_tree y)
6065 {
6066 return (x->decl_id == DECL_UID (y));
6067 }
6068
6069 /* Return the var_loc list associated with a given declaration. */
6070
6071 static inline var_loc_list *
6072 lookup_decl_loc (const_tree decl)
6073 {
6074 if (!decl_loc_table)
6075 return NULL;
6076 return decl_loc_table->find_with_hash (decl, DECL_UID (decl));
6077 }
6078
6079 /* Returns a hash value for X (which really is a cached_dw_loc_list_list). */
6080
6081 inline hashval_t
6082 dw_loc_list_hasher::hash (cached_dw_loc_list *x)
6083 {
6084 return (hashval_t) x->decl_id;
6085 }
6086
6087 /* Return nonzero if decl_id of cached_dw_loc_list X is the same as
6088 UID of decl *Y. */
6089
6090 inline bool
6091 dw_loc_list_hasher::equal (cached_dw_loc_list *x, const_tree y)
6092 {
6093 return (x->decl_id == DECL_UID (y));
6094 }
6095
6096 /* Equate a DIE to a particular declaration. */
6097
6098 static void
6099 equate_decl_number_to_die (tree decl, dw_die_ref decl_die)
6100 {
6101 unsigned int decl_id = DECL_UID (decl);
6102
6103 *decl_die_table->find_slot_with_hash (decl, decl_id, INSERT) = decl_die;
6104 decl_die->decl_id = decl_id;
6105 }
6106
6107 /* Return how many bits covers PIECE EXPR_LIST. */
6108
6109 static HOST_WIDE_INT
6110 decl_piece_bitsize (rtx piece)
6111 {
6112 int ret = (int) GET_MODE (piece);
6113 if (ret)
6114 return ret;
6115 gcc_assert (GET_CODE (XEXP (piece, 0)) == CONCAT
6116 && CONST_INT_P (XEXP (XEXP (piece, 0), 0)));
6117 return INTVAL (XEXP (XEXP (piece, 0), 0));
6118 }
6119
6120 /* Return pointer to the location of location note in PIECE EXPR_LIST. */
6121
6122 static rtx *
6123 decl_piece_varloc_ptr (rtx piece)
6124 {
6125 if ((int) GET_MODE (piece))
6126 return &XEXP (piece, 0);
6127 else
6128 return &XEXP (XEXP (piece, 0), 1);
6129 }
6130
6131 /* Create an EXPR_LIST for location note LOC_NOTE covering BITSIZE bits.
6132 Next is the chain of following piece nodes. */
6133
6134 static rtx_expr_list *
6135 decl_piece_node (rtx loc_note, HOST_WIDE_INT bitsize, rtx next)
6136 {
6137 if (bitsize > 0 && bitsize <= (int) MAX_MACHINE_MODE)
6138 return alloc_EXPR_LIST (bitsize, loc_note, next);
6139 else
6140 return alloc_EXPR_LIST (0, gen_rtx_CONCAT (VOIDmode,
6141 GEN_INT (bitsize),
6142 loc_note), next);
6143 }
6144
6145 /* Return rtx that should be stored into loc field for
6146 LOC_NOTE and BITPOS/BITSIZE. */
6147
6148 static rtx
6149 construct_piece_list (rtx loc_note, HOST_WIDE_INT bitpos,
6150 HOST_WIDE_INT bitsize)
6151 {
6152 if (bitsize != -1)
6153 {
6154 loc_note = decl_piece_node (loc_note, bitsize, NULL_RTX);
6155 if (bitpos != 0)
6156 loc_note = decl_piece_node (NULL_RTX, bitpos, loc_note);
6157 }
6158 return loc_note;
6159 }
6160
6161 /* This function either modifies location piece list *DEST in
6162 place (if SRC and INNER is NULL), or copies location piece list
6163 *SRC to *DEST while modifying it. Location BITPOS is modified
6164 to contain LOC_NOTE, any pieces overlapping it are removed resp.
6165 not copied and if needed some padding around it is added.
6166 When modifying in place, DEST should point to EXPR_LIST where
6167 earlier pieces cover PIECE_BITPOS bits, when copying SRC points
6168 to the start of the whole list and INNER points to the EXPR_LIST
6169 where earlier pieces cover PIECE_BITPOS bits. */
6170
6171 static void
6172 adjust_piece_list (rtx *dest, rtx *src, rtx *inner,
6173 HOST_WIDE_INT bitpos, HOST_WIDE_INT piece_bitpos,
6174 HOST_WIDE_INT bitsize, rtx loc_note)
6175 {
6176 HOST_WIDE_INT diff;
6177 bool copy = inner != NULL;
6178
6179 if (copy)
6180 {
6181 /* First copy all nodes preceding the current bitpos. */
6182 while (src != inner)
6183 {
6184 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6185 decl_piece_bitsize (*src), NULL_RTX);
6186 dest = &XEXP (*dest, 1);
6187 src = &XEXP (*src, 1);
6188 }
6189 }
6190 /* Add padding if needed. */
6191 if (bitpos != piece_bitpos)
6192 {
6193 *dest = decl_piece_node (NULL_RTX, bitpos - piece_bitpos,
6194 copy ? NULL_RTX : *dest);
6195 dest = &XEXP (*dest, 1);
6196 }
6197 else if (*dest && decl_piece_bitsize (*dest) == bitsize)
6198 {
6199 gcc_assert (!copy);
6200 /* A piece with correct bitpos and bitsize already exist,
6201 just update the location for it and return. */
6202 *decl_piece_varloc_ptr (*dest) = loc_note;
6203 return;
6204 }
6205 /* Add the piece that changed. */
6206 *dest = decl_piece_node (loc_note, bitsize, copy ? NULL_RTX : *dest);
6207 dest = &XEXP (*dest, 1);
6208 /* Skip over pieces that overlap it. */
6209 diff = bitpos - piece_bitpos + bitsize;
6210 if (!copy)
6211 src = dest;
6212 while (diff > 0 && *src)
6213 {
6214 rtx piece = *src;
6215 diff -= decl_piece_bitsize (piece);
6216 if (copy)
6217 src = &XEXP (piece, 1);
6218 else
6219 {
6220 *src = XEXP (piece, 1);
6221 free_EXPR_LIST_node (piece);
6222 }
6223 }
6224 /* Add padding if needed. */
6225 if (diff < 0 && *src)
6226 {
6227 if (!copy)
6228 dest = src;
6229 *dest = decl_piece_node (NULL_RTX, -diff, copy ? NULL_RTX : *dest);
6230 dest = &XEXP (*dest, 1);
6231 }
6232 if (!copy)
6233 return;
6234 /* Finally copy all nodes following it. */
6235 while (*src)
6236 {
6237 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6238 decl_piece_bitsize (*src), NULL_RTX);
6239 dest = &XEXP (*dest, 1);
6240 src = &XEXP (*src, 1);
6241 }
6242 }
6243
6244 /* Add a variable location node to the linked list for DECL. */
6245
6246 static struct var_loc_node *
6247 add_var_loc_to_decl (tree decl, rtx loc_note, const char *label, var_loc_view view)
6248 {
6249 unsigned int decl_id;
6250 var_loc_list *temp;
6251 struct var_loc_node *loc = NULL;
6252 HOST_WIDE_INT bitsize = -1, bitpos = -1;
6253
6254 if (VAR_P (decl) && DECL_HAS_DEBUG_EXPR_P (decl))
6255 {
6256 tree realdecl = DECL_DEBUG_EXPR (decl);
6257 if (handled_component_p (realdecl)
6258 || (TREE_CODE (realdecl) == MEM_REF
6259 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
6260 {
6261 bool reverse;
6262 tree innerdecl = get_ref_base_and_extent_hwi (realdecl, &bitpos,
6263 &bitsize, &reverse);
6264 if (!innerdecl
6265 || !DECL_P (innerdecl)
6266 || DECL_IGNORED_P (innerdecl)
6267 || TREE_STATIC (innerdecl)
6268 || bitsize == 0
6269 || bitpos + bitsize > 256)
6270 return NULL;
6271 decl = innerdecl;
6272 }
6273 }
6274
6275 decl_id = DECL_UID (decl);
6276 var_loc_list **slot
6277 = decl_loc_table->find_slot_with_hash (decl, decl_id, INSERT);
6278 if (*slot == NULL)
6279 {
6280 temp = ggc_cleared_alloc<var_loc_list> ();
6281 temp->decl_id = decl_id;
6282 *slot = temp;
6283 }
6284 else
6285 temp = *slot;
6286
6287 /* For PARM_DECLs try to keep around the original incoming value,
6288 even if that means we'll emit a zero-range .debug_loc entry. */
6289 if (temp->last
6290 && temp->first == temp->last
6291 && TREE_CODE (decl) == PARM_DECL
6292 && NOTE_P (temp->first->loc)
6293 && NOTE_VAR_LOCATION_DECL (temp->first->loc) == decl
6294 && DECL_INCOMING_RTL (decl)
6295 && NOTE_VAR_LOCATION_LOC (temp->first->loc)
6296 && GET_CODE (NOTE_VAR_LOCATION_LOC (temp->first->loc))
6297 == GET_CODE (DECL_INCOMING_RTL (decl))
6298 && prev_real_insn (as_a<rtx_insn *> (temp->first->loc)) == NULL_RTX
6299 && (bitsize != -1
6300 || !rtx_equal_p (NOTE_VAR_LOCATION_LOC (temp->first->loc),
6301 NOTE_VAR_LOCATION_LOC (loc_note))
6302 || (NOTE_VAR_LOCATION_STATUS (temp->first->loc)
6303 != NOTE_VAR_LOCATION_STATUS (loc_note))))
6304 {
6305 loc = ggc_cleared_alloc<var_loc_node> ();
6306 temp->first->next = loc;
6307 temp->last = loc;
6308 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6309 }
6310 else if (temp->last)
6311 {
6312 struct var_loc_node *last = temp->last, *unused = NULL;
6313 rtx *piece_loc = NULL, last_loc_note;
6314 HOST_WIDE_INT piece_bitpos = 0;
6315 if (last->next)
6316 {
6317 last = last->next;
6318 gcc_assert (last->next == NULL);
6319 }
6320 if (bitsize != -1 && GET_CODE (last->loc) == EXPR_LIST)
6321 {
6322 piece_loc = &last->loc;
6323 do
6324 {
6325 HOST_WIDE_INT cur_bitsize = decl_piece_bitsize (*piece_loc);
6326 if (piece_bitpos + cur_bitsize > bitpos)
6327 break;
6328 piece_bitpos += cur_bitsize;
6329 piece_loc = &XEXP (*piece_loc, 1);
6330 }
6331 while (*piece_loc);
6332 }
6333 /* TEMP->LAST here is either pointer to the last but one or
6334 last element in the chained list, LAST is pointer to the
6335 last element. */
6336 if (label && strcmp (last->label, label) == 0 && last->view == view)
6337 {
6338 /* For SRA optimized variables if there weren't any real
6339 insns since last note, just modify the last node. */
6340 if (piece_loc != NULL)
6341 {
6342 adjust_piece_list (piece_loc, NULL, NULL,
6343 bitpos, piece_bitpos, bitsize, loc_note);
6344 return NULL;
6345 }
6346 /* If the last note doesn't cover any instructions, remove it. */
6347 if (temp->last != last)
6348 {
6349 temp->last->next = NULL;
6350 unused = last;
6351 last = temp->last;
6352 gcc_assert (strcmp (last->label, label) != 0 || last->view != view);
6353 }
6354 else
6355 {
6356 gcc_assert (temp->first == temp->last
6357 || (temp->first->next == temp->last
6358 && TREE_CODE (decl) == PARM_DECL));
6359 memset (temp->last, '\0', sizeof (*temp->last));
6360 temp->last->loc = construct_piece_list (loc_note, bitpos, bitsize);
6361 return temp->last;
6362 }
6363 }
6364 if (bitsize == -1 && NOTE_P (last->loc))
6365 last_loc_note = last->loc;
6366 else if (piece_loc != NULL
6367 && *piece_loc != NULL_RTX
6368 && piece_bitpos == bitpos
6369 && decl_piece_bitsize (*piece_loc) == bitsize)
6370 last_loc_note = *decl_piece_varloc_ptr (*piece_loc);
6371 else
6372 last_loc_note = NULL_RTX;
6373 /* If the current location is the same as the end of the list,
6374 and either both or neither of the locations is uninitialized,
6375 we have nothing to do. */
6376 if (last_loc_note == NULL_RTX
6377 || (!rtx_equal_p (NOTE_VAR_LOCATION_LOC (last_loc_note),
6378 NOTE_VAR_LOCATION_LOC (loc_note)))
6379 || ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6380 != NOTE_VAR_LOCATION_STATUS (loc_note))
6381 && ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6382 == VAR_INIT_STATUS_UNINITIALIZED)
6383 || (NOTE_VAR_LOCATION_STATUS (loc_note)
6384 == VAR_INIT_STATUS_UNINITIALIZED))))
6385 {
6386 /* Add LOC to the end of list and update LAST. If the last
6387 element of the list has been removed above, reuse its
6388 memory for the new node, otherwise allocate a new one. */
6389 if (unused)
6390 {
6391 loc = unused;
6392 memset (loc, '\0', sizeof (*loc));
6393 }
6394 else
6395 loc = ggc_cleared_alloc<var_loc_node> ();
6396 if (bitsize == -1 || piece_loc == NULL)
6397 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6398 else
6399 adjust_piece_list (&loc->loc, &last->loc, piece_loc,
6400 bitpos, piece_bitpos, bitsize, loc_note);
6401 last->next = loc;
6402 /* Ensure TEMP->LAST will point either to the new last but one
6403 element of the chain, or to the last element in it. */
6404 if (last != temp->last)
6405 temp->last = last;
6406 }
6407 else if (unused)
6408 ggc_free (unused);
6409 }
6410 else
6411 {
6412 loc = ggc_cleared_alloc<var_loc_node> ();
6413 temp->first = loc;
6414 temp->last = loc;
6415 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6416 }
6417 return loc;
6418 }
6419 \f
6420 /* Keep track of the number of spaces used to indent the
6421 output of the debugging routines that print the structure of
6422 the DIE internal representation. */
6423 static int print_indent;
6424
6425 /* Indent the line the number of spaces given by print_indent. */
6426
6427 static inline void
6428 print_spaces (FILE *outfile)
6429 {
6430 fprintf (outfile, "%*s", print_indent, "");
6431 }
6432
6433 /* Print a type signature in hex. */
6434
6435 static inline void
6436 print_signature (FILE *outfile, char *sig)
6437 {
6438 int i;
6439
6440 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
6441 fprintf (outfile, "%02x", sig[i] & 0xff);
6442 }
6443
6444 static inline void
6445 print_discr_value (FILE *outfile, dw_discr_value *discr_value)
6446 {
6447 if (discr_value->pos)
6448 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, discr_value->v.sval);
6449 else
6450 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, discr_value->v.uval);
6451 }
6452
6453 static void print_loc_descr (dw_loc_descr_ref, FILE *);
6454
6455 /* Print the value associated to the VAL DWARF value node to OUTFILE. If
6456 RECURSE, output location descriptor operations. */
6457
6458 static void
6459 print_dw_val (dw_val_node *val, bool recurse, FILE *outfile)
6460 {
6461 switch (val->val_class)
6462 {
6463 case dw_val_class_addr:
6464 fprintf (outfile, "address");
6465 break;
6466 case dw_val_class_offset:
6467 fprintf (outfile, "offset");
6468 break;
6469 case dw_val_class_loc:
6470 fprintf (outfile, "location descriptor");
6471 if (val->v.val_loc == NULL)
6472 fprintf (outfile, " -> <null>\n");
6473 else if (recurse)
6474 {
6475 fprintf (outfile, ":\n");
6476 print_indent += 4;
6477 print_loc_descr (val->v.val_loc, outfile);
6478 print_indent -= 4;
6479 }
6480 else
6481 {
6482 if (flag_dump_noaddr || flag_dump_unnumbered)
6483 fprintf (outfile, " #\n");
6484 else
6485 fprintf (outfile, " (%p)\n", (void *) val->v.val_loc);
6486 }
6487 break;
6488 case dw_val_class_loc_list:
6489 fprintf (outfile, "location list -> label:%s",
6490 val->v.val_loc_list->ll_symbol);
6491 break;
6492 case dw_val_class_view_list:
6493 val = view_list_to_loc_list_val_node (val);
6494 fprintf (outfile, "location list with views -> labels:%s and %s",
6495 val->v.val_loc_list->ll_symbol,
6496 val->v.val_loc_list->vl_symbol);
6497 break;
6498 case dw_val_class_range_list:
6499 fprintf (outfile, "range list");
6500 break;
6501 case dw_val_class_const:
6502 case dw_val_class_const_implicit:
6503 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, val->v.val_int);
6504 break;
6505 case dw_val_class_unsigned_const:
6506 case dw_val_class_unsigned_const_implicit:
6507 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, val->v.val_unsigned);
6508 break;
6509 case dw_val_class_const_double:
6510 fprintf (outfile, "constant (" HOST_WIDE_INT_PRINT_DEC","\
6511 HOST_WIDE_INT_PRINT_UNSIGNED")",
6512 val->v.val_double.high,
6513 val->v.val_double.low);
6514 break;
6515 case dw_val_class_wide_int:
6516 {
6517 int i = val->v.val_wide->get_len ();
6518 fprintf (outfile, "constant (");
6519 gcc_assert (i > 0);
6520 if (val->v.val_wide->elt (i - 1) == 0)
6521 fprintf (outfile, "0x");
6522 fprintf (outfile, HOST_WIDE_INT_PRINT_HEX,
6523 val->v.val_wide->elt (--i));
6524 while (--i >= 0)
6525 fprintf (outfile, HOST_WIDE_INT_PRINT_PADDED_HEX,
6526 val->v.val_wide->elt (i));
6527 fprintf (outfile, ")");
6528 break;
6529 }
6530 case dw_val_class_vec:
6531 fprintf (outfile, "floating-point or vector constant");
6532 break;
6533 case dw_val_class_flag:
6534 fprintf (outfile, "%u", val->v.val_flag);
6535 break;
6536 case dw_val_class_die_ref:
6537 if (val->v.val_die_ref.die != NULL)
6538 {
6539 dw_die_ref die = val->v.val_die_ref.die;
6540
6541 if (die->comdat_type_p)
6542 {
6543 fprintf (outfile, "die -> signature: ");
6544 print_signature (outfile,
6545 die->die_id.die_type_node->signature);
6546 }
6547 else if (die->die_id.die_symbol)
6548 {
6549 fprintf (outfile, "die -> label: %s", die->die_id.die_symbol);
6550 if (die->with_offset)
6551 fprintf (outfile, " + %ld", die->die_offset);
6552 }
6553 else
6554 fprintf (outfile, "die -> %ld", die->die_offset);
6555 if (flag_dump_noaddr || flag_dump_unnumbered)
6556 fprintf (outfile, " #");
6557 else
6558 fprintf (outfile, " (%p)", (void *) die);
6559 }
6560 else
6561 fprintf (outfile, "die -> <null>");
6562 break;
6563 case dw_val_class_vms_delta:
6564 fprintf (outfile, "delta: @slotcount(%s-%s)",
6565 val->v.val_vms_delta.lbl2, val->v.val_vms_delta.lbl1);
6566 break;
6567 case dw_val_class_symview:
6568 fprintf (outfile, "view: %s", val->v.val_symbolic_view);
6569 break;
6570 case dw_val_class_lbl_id:
6571 case dw_val_class_lineptr:
6572 case dw_val_class_macptr:
6573 case dw_val_class_loclistsptr:
6574 case dw_val_class_high_pc:
6575 fprintf (outfile, "label: %s", val->v.val_lbl_id);
6576 break;
6577 case dw_val_class_str:
6578 if (val->v.val_str->str != NULL)
6579 fprintf (outfile, "\"%s\"", val->v.val_str->str);
6580 else
6581 fprintf (outfile, "<null>");
6582 break;
6583 case dw_val_class_file:
6584 case dw_val_class_file_implicit:
6585 fprintf (outfile, "\"%s\" (%d)", val->v.val_file->filename,
6586 val->v.val_file->emitted_number);
6587 break;
6588 case dw_val_class_data8:
6589 {
6590 int i;
6591
6592 for (i = 0; i < 8; i++)
6593 fprintf (outfile, "%02x", val->v.val_data8[i]);
6594 break;
6595 }
6596 case dw_val_class_discr_value:
6597 print_discr_value (outfile, &val->v.val_discr_value);
6598 break;
6599 case dw_val_class_discr_list:
6600 for (dw_discr_list_ref node = val->v.val_discr_list;
6601 node != NULL;
6602 node = node->dw_discr_next)
6603 {
6604 if (node->dw_discr_range)
6605 {
6606 fprintf (outfile, " .. ");
6607 print_discr_value (outfile, &node->dw_discr_lower_bound);
6608 print_discr_value (outfile, &node->dw_discr_upper_bound);
6609 }
6610 else
6611 print_discr_value (outfile, &node->dw_discr_lower_bound);
6612
6613 if (node->dw_discr_next != NULL)
6614 fprintf (outfile, " | ");
6615 }
6616 default:
6617 break;
6618 }
6619 }
6620
6621 /* Likewise, for a DIE attribute. */
6622
6623 static void
6624 print_attribute (dw_attr_node *a, bool recurse, FILE *outfile)
6625 {
6626 print_dw_val (&a->dw_attr_val, recurse, outfile);
6627 }
6628
6629
6630 /* Print the list of operands in the LOC location description to OUTFILE. This
6631 routine is a debugging aid only. */
6632
6633 static void
6634 print_loc_descr (dw_loc_descr_ref loc, FILE *outfile)
6635 {
6636 dw_loc_descr_ref l = loc;
6637
6638 if (loc == NULL)
6639 {
6640 print_spaces (outfile);
6641 fprintf (outfile, "<null>\n");
6642 return;
6643 }
6644
6645 for (l = loc; l != NULL; l = l->dw_loc_next)
6646 {
6647 print_spaces (outfile);
6648 if (flag_dump_noaddr || flag_dump_unnumbered)
6649 fprintf (outfile, "#");
6650 else
6651 fprintf (outfile, "(%p)", (void *) l);
6652 fprintf (outfile, " %s",
6653 dwarf_stack_op_name (l->dw_loc_opc));
6654 if (l->dw_loc_oprnd1.val_class != dw_val_class_none)
6655 {
6656 fprintf (outfile, " ");
6657 print_dw_val (&l->dw_loc_oprnd1, false, outfile);
6658 }
6659 if (l->dw_loc_oprnd2.val_class != dw_val_class_none)
6660 {
6661 fprintf (outfile, ", ");
6662 print_dw_val (&l->dw_loc_oprnd2, false, outfile);
6663 }
6664 fprintf (outfile, "\n");
6665 }
6666 }
6667
6668 /* Print the information associated with a given DIE, and its children.
6669 This routine is a debugging aid only. */
6670
6671 static void
6672 print_die (dw_die_ref die, FILE *outfile)
6673 {
6674 dw_attr_node *a;
6675 dw_die_ref c;
6676 unsigned ix;
6677
6678 print_spaces (outfile);
6679 fprintf (outfile, "DIE %4ld: %s ",
6680 die->die_offset, dwarf_tag_name (die->die_tag));
6681 if (flag_dump_noaddr || flag_dump_unnumbered)
6682 fprintf (outfile, "#\n");
6683 else
6684 fprintf (outfile, "(%p)\n", (void*) die);
6685 print_spaces (outfile);
6686 fprintf (outfile, " abbrev id: %lu", die->die_abbrev);
6687 fprintf (outfile, " offset: %ld", die->die_offset);
6688 fprintf (outfile, " mark: %d\n", die->die_mark);
6689
6690 if (die->comdat_type_p)
6691 {
6692 print_spaces (outfile);
6693 fprintf (outfile, " signature: ");
6694 print_signature (outfile, die->die_id.die_type_node->signature);
6695 fprintf (outfile, "\n");
6696 }
6697
6698 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6699 {
6700 print_spaces (outfile);
6701 fprintf (outfile, " %s: ", dwarf_attr_name (a->dw_attr));
6702
6703 print_attribute (a, true, outfile);
6704 fprintf (outfile, "\n");
6705 }
6706
6707 if (die->die_child != NULL)
6708 {
6709 print_indent += 4;
6710 FOR_EACH_CHILD (die, c, print_die (c, outfile));
6711 print_indent -= 4;
6712 }
6713 if (print_indent == 0)
6714 fprintf (outfile, "\n");
6715 }
6716
6717 /* Print the list of operations in the LOC location description. */
6718
6719 DEBUG_FUNCTION void
6720 debug_dwarf_loc_descr (dw_loc_descr_ref loc)
6721 {
6722 print_loc_descr (loc, stderr);
6723 }
6724
6725 /* Print the information collected for a given DIE. */
6726
6727 DEBUG_FUNCTION void
6728 debug_dwarf_die (dw_die_ref die)
6729 {
6730 print_die (die, stderr);
6731 }
6732
6733 DEBUG_FUNCTION void
6734 debug (die_struct &ref)
6735 {
6736 print_die (&ref, stderr);
6737 }
6738
6739 DEBUG_FUNCTION void
6740 debug (die_struct *ptr)
6741 {
6742 if (ptr)
6743 debug (*ptr);
6744 else
6745 fprintf (stderr, "<nil>\n");
6746 }
6747
6748
6749 /* Print all DWARF information collected for the compilation unit.
6750 This routine is a debugging aid only. */
6751
6752 DEBUG_FUNCTION void
6753 debug_dwarf (void)
6754 {
6755 print_indent = 0;
6756 print_die (comp_unit_die (), stderr);
6757 }
6758
6759 /* Verify the DIE tree structure. */
6760
6761 DEBUG_FUNCTION void
6762 verify_die (dw_die_ref die)
6763 {
6764 gcc_assert (!die->die_mark);
6765 if (die->die_parent == NULL
6766 && die->die_sib == NULL)
6767 return;
6768 /* Verify the die_sib list is cyclic. */
6769 dw_die_ref x = die;
6770 do
6771 {
6772 x->die_mark = 1;
6773 x = x->die_sib;
6774 }
6775 while (x && !x->die_mark);
6776 gcc_assert (x == die);
6777 x = die;
6778 do
6779 {
6780 /* Verify all dies have the same parent. */
6781 gcc_assert (x->die_parent == die->die_parent);
6782 if (x->die_child)
6783 {
6784 /* Verify the child has the proper parent and recurse. */
6785 gcc_assert (x->die_child->die_parent == x);
6786 verify_die (x->die_child);
6787 }
6788 x->die_mark = 0;
6789 x = x->die_sib;
6790 }
6791 while (x && x->die_mark);
6792 }
6793
6794 /* Sanity checks on DIEs. */
6795
6796 static void
6797 check_die (dw_die_ref die)
6798 {
6799 unsigned ix;
6800 dw_attr_node *a;
6801 bool inline_found = false;
6802 int n_location = 0, n_low_pc = 0, n_high_pc = 0, n_artificial = 0;
6803 int n_decl_line = 0, n_decl_column = 0, n_decl_file = 0;
6804 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6805 {
6806 switch (a->dw_attr)
6807 {
6808 case DW_AT_inline:
6809 if (a->dw_attr_val.v.val_unsigned)
6810 inline_found = true;
6811 break;
6812 case DW_AT_location:
6813 ++n_location;
6814 break;
6815 case DW_AT_low_pc:
6816 ++n_low_pc;
6817 break;
6818 case DW_AT_high_pc:
6819 ++n_high_pc;
6820 break;
6821 case DW_AT_artificial:
6822 ++n_artificial;
6823 break;
6824 case DW_AT_decl_column:
6825 ++n_decl_column;
6826 break;
6827 case DW_AT_decl_line:
6828 ++n_decl_line;
6829 break;
6830 case DW_AT_decl_file:
6831 ++n_decl_file;
6832 break;
6833 default:
6834 break;
6835 }
6836 }
6837 if (n_location > 1 || n_low_pc > 1 || n_high_pc > 1 || n_artificial > 1
6838 || n_decl_column > 1 || n_decl_line > 1 || n_decl_file > 1)
6839 {
6840 fprintf (stderr, "Duplicate attributes in DIE:\n");
6841 debug_dwarf_die (die);
6842 gcc_unreachable ();
6843 }
6844 if (inline_found)
6845 {
6846 /* A debugging information entry that is a member of an abstract
6847 instance tree [that has DW_AT_inline] should not contain any
6848 attributes which describe aspects of the subroutine which vary
6849 between distinct inlined expansions or distinct out-of-line
6850 expansions. */
6851 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6852 gcc_assert (a->dw_attr != DW_AT_low_pc
6853 && a->dw_attr != DW_AT_high_pc
6854 && a->dw_attr != DW_AT_location
6855 && a->dw_attr != DW_AT_frame_base
6856 && a->dw_attr != DW_AT_call_all_calls
6857 && a->dw_attr != DW_AT_GNU_all_call_sites);
6858 }
6859 }
6860 \f
6861 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6862 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6863 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO), ctx)
6864
6865 /* Calculate the checksum of a location expression. */
6866
6867 static inline void
6868 loc_checksum (dw_loc_descr_ref loc, struct md5_ctx *ctx)
6869 {
6870 int tem;
6871 inchash::hash hstate;
6872 hashval_t hash;
6873
6874 tem = (loc->dtprel << 8) | ((unsigned int) loc->dw_loc_opc);
6875 CHECKSUM (tem);
6876 hash_loc_operands (loc, hstate);
6877 hash = hstate.end();
6878 CHECKSUM (hash);
6879 }
6880
6881 /* Calculate the checksum of an attribute. */
6882
6883 static void
6884 attr_checksum (dw_attr_node *at, struct md5_ctx *ctx, int *mark)
6885 {
6886 dw_loc_descr_ref loc;
6887 rtx r;
6888
6889 CHECKSUM (at->dw_attr);
6890
6891 /* We don't care that this was compiled with a different compiler
6892 snapshot; if the output is the same, that's what matters. */
6893 if (at->dw_attr == DW_AT_producer)
6894 return;
6895
6896 switch (AT_class (at))
6897 {
6898 case dw_val_class_const:
6899 case dw_val_class_const_implicit:
6900 CHECKSUM (at->dw_attr_val.v.val_int);
6901 break;
6902 case dw_val_class_unsigned_const:
6903 case dw_val_class_unsigned_const_implicit:
6904 CHECKSUM (at->dw_attr_val.v.val_unsigned);
6905 break;
6906 case dw_val_class_const_double:
6907 CHECKSUM (at->dw_attr_val.v.val_double);
6908 break;
6909 case dw_val_class_wide_int:
6910 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
6911 get_full_len (*at->dw_attr_val.v.val_wide)
6912 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
6913 break;
6914 case dw_val_class_vec:
6915 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
6916 (at->dw_attr_val.v.val_vec.length
6917 * at->dw_attr_val.v.val_vec.elt_size));
6918 break;
6919 case dw_val_class_flag:
6920 CHECKSUM (at->dw_attr_val.v.val_flag);
6921 break;
6922 case dw_val_class_str:
6923 CHECKSUM_STRING (AT_string (at));
6924 break;
6925
6926 case dw_val_class_addr:
6927 r = AT_addr (at);
6928 gcc_assert (GET_CODE (r) == SYMBOL_REF);
6929 CHECKSUM_STRING (XSTR (r, 0));
6930 break;
6931
6932 case dw_val_class_offset:
6933 CHECKSUM (at->dw_attr_val.v.val_offset);
6934 break;
6935
6936 case dw_val_class_loc:
6937 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
6938 loc_checksum (loc, ctx);
6939 break;
6940
6941 case dw_val_class_die_ref:
6942 die_checksum (AT_ref (at), ctx, mark);
6943 break;
6944
6945 case dw_val_class_fde_ref:
6946 case dw_val_class_vms_delta:
6947 case dw_val_class_symview:
6948 case dw_val_class_lbl_id:
6949 case dw_val_class_lineptr:
6950 case dw_val_class_macptr:
6951 case dw_val_class_loclistsptr:
6952 case dw_val_class_high_pc:
6953 break;
6954
6955 case dw_val_class_file:
6956 case dw_val_class_file_implicit:
6957 CHECKSUM_STRING (AT_file (at)->filename);
6958 break;
6959
6960 case dw_val_class_data8:
6961 CHECKSUM (at->dw_attr_val.v.val_data8);
6962 break;
6963
6964 default:
6965 break;
6966 }
6967 }
6968
6969 /* Calculate the checksum of a DIE. */
6970
6971 static void
6972 die_checksum (dw_die_ref die, struct md5_ctx *ctx, int *mark)
6973 {
6974 dw_die_ref c;
6975 dw_attr_node *a;
6976 unsigned ix;
6977
6978 /* To avoid infinite recursion. */
6979 if (die->die_mark)
6980 {
6981 CHECKSUM (die->die_mark);
6982 return;
6983 }
6984 die->die_mark = ++(*mark);
6985
6986 CHECKSUM (die->die_tag);
6987
6988 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6989 attr_checksum (a, ctx, mark);
6990
6991 FOR_EACH_CHILD (die, c, die_checksum (c, ctx, mark));
6992 }
6993
6994 #undef CHECKSUM
6995 #undef CHECKSUM_BLOCK
6996 #undef CHECKSUM_STRING
6997
6998 /* For DWARF-4 types, include the trailing NULL when checksumming strings. */
6999 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
7000 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
7001 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO) + 1, ctx)
7002 #define CHECKSUM_SLEB128(FOO) checksum_sleb128 ((FOO), ctx)
7003 #define CHECKSUM_ULEB128(FOO) checksum_uleb128 ((FOO), ctx)
7004 #define CHECKSUM_ATTR(FOO) \
7005 if (FOO) attr_checksum_ordered (die->die_tag, (FOO), ctx, mark)
7006
7007 /* Calculate the checksum of a number in signed LEB128 format. */
7008
7009 static void
7010 checksum_sleb128 (HOST_WIDE_INT value, struct md5_ctx *ctx)
7011 {
7012 unsigned char byte;
7013 bool more;
7014
7015 while (1)
7016 {
7017 byte = (value & 0x7f);
7018 value >>= 7;
7019 more = !((value == 0 && (byte & 0x40) == 0)
7020 || (value == -1 && (byte & 0x40) != 0));
7021 if (more)
7022 byte |= 0x80;
7023 CHECKSUM (byte);
7024 if (!more)
7025 break;
7026 }
7027 }
7028
7029 /* Calculate the checksum of a number in unsigned LEB128 format. */
7030
7031 static void
7032 checksum_uleb128 (unsigned HOST_WIDE_INT value, struct md5_ctx *ctx)
7033 {
7034 while (1)
7035 {
7036 unsigned char byte = (value & 0x7f);
7037 value >>= 7;
7038 if (value != 0)
7039 /* More bytes to follow. */
7040 byte |= 0x80;
7041 CHECKSUM (byte);
7042 if (value == 0)
7043 break;
7044 }
7045 }
7046
7047 /* Checksum the context of the DIE. This adds the names of any
7048 surrounding namespaces or structures to the checksum. */
7049
7050 static void
7051 checksum_die_context (dw_die_ref die, struct md5_ctx *ctx)
7052 {
7053 const char *name;
7054 dw_die_ref spec;
7055 int tag = die->die_tag;
7056
7057 if (tag != DW_TAG_namespace
7058 && tag != DW_TAG_structure_type
7059 && tag != DW_TAG_class_type)
7060 return;
7061
7062 name = get_AT_string (die, DW_AT_name);
7063
7064 spec = get_AT_ref (die, DW_AT_specification);
7065 if (spec != NULL)
7066 die = spec;
7067
7068 if (die->die_parent != NULL)
7069 checksum_die_context (die->die_parent, ctx);
7070
7071 CHECKSUM_ULEB128 ('C');
7072 CHECKSUM_ULEB128 (tag);
7073 if (name != NULL)
7074 CHECKSUM_STRING (name);
7075 }
7076
7077 /* Calculate the checksum of a location expression. */
7078
7079 static inline void
7080 loc_checksum_ordered (dw_loc_descr_ref loc, struct md5_ctx *ctx)
7081 {
7082 /* Special case for lone DW_OP_plus_uconst: checksum as if the location
7083 were emitted as a DW_FORM_sdata instead of a location expression. */
7084 if (loc->dw_loc_opc == DW_OP_plus_uconst && loc->dw_loc_next == NULL)
7085 {
7086 CHECKSUM_ULEB128 (DW_FORM_sdata);
7087 CHECKSUM_SLEB128 ((HOST_WIDE_INT) loc->dw_loc_oprnd1.v.val_unsigned);
7088 return;
7089 }
7090
7091 /* Otherwise, just checksum the raw location expression. */
7092 while (loc != NULL)
7093 {
7094 inchash::hash hstate;
7095 hashval_t hash;
7096
7097 CHECKSUM_ULEB128 (loc->dtprel);
7098 CHECKSUM_ULEB128 (loc->dw_loc_opc);
7099 hash_loc_operands (loc, hstate);
7100 hash = hstate.end ();
7101 CHECKSUM (hash);
7102 loc = loc->dw_loc_next;
7103 }
7104 }
7105
7106 /* Calculate the checksum of an attribute. */
7107
7108 static void
7109 attr_checksum_ordered (enum dwarf_tag tag, dw_attr_node *at,
7110 struct md5_ctx *ctx, int *mark)
7111 {
7112 dw_loc_descr_ref loc;
7113 rtx r;
7114
7115 if (AT_class (at) == dw_val_class_die_ref)
7116 {
7117 dw_die_ref target_die = AT_ref (at);
7118
7119 /* For pointer and reference types, we checksum only the (qualified)
7120 name of the target type (if there is a name). For friend entries,
7121 we checksum only the (qualified) name of the target type or function.
7122 This allows the checksum to remain the same whether the target type
7123 is complete or not. */
7124 if ((at->dw_attr == DW_AT_type
7125 && (tag == DW_TAG_pointer_type
7126 || tag == DW_TAG_reference_type
7127 || tag == DW_TAG_rvalue_reference_type
7128 || tag == DW_TAG_ptr_to_member_type))
7129 || (at->dw_attr == DW_AT_friend
7130 && tag == DW_TAG_friend))
7131 {
7132 dw_attr_node *name_attr = get_AT (target_die, DW_AT_name);
7133
7134 if (name_attr != NULL)
7135 {
7136 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7137
7138 if (decl == NULL)
7139 decl = target_die;
7140 CHECKSUM_ULEB128 ('N');
7141 CHECKSUM_ULEB128 (at->dw_attr);
7142 if (decl->die_parent != NULL)
7143 checksum_die_context (decl->die_parent, ctx);
7144 CHECKSUM_ULEB128 ('E');
7145 CHECKSUM_STRING (AT_string (name_attr));
7146 return;
7147 }
7148 }
7149
7150 /* For all other references to another DIE, we check to see if the
7151 target DIE has already been visited. If it has, we emit a
7152 backward reference; if not, we descend recursively. */
7153 if (target_die->die_mark > 0)
7154 {
7155 CHECKSUM_ULEB128 ('R');
7156 CHECKSUM_ULEB128 (at->dw_attr);
7157 CHECKSUM_ULEB128 (target_die->die_mark);
7158 }
7159 else
7160 {
7161 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7162
7163 if (decl == NULL)
7164 decl = target_die;
7165 target_die->die_mark = ++(*mark);
7166 CHECKSUM_ULEB128 ('T');
7167 CHECKSUM_ULEB128 (at->dw_attr);
7168 if (decl->die_parent != NULL)
7169 checksum_die_context (decl->die_parent, ctx);
7170 die_checksum_ordered (target_die, ctx, mark);
7171 }
7172 return;
7173 }
7174
7175 CHECKSUM_ULEB128 ('A');
7176 CHECKSUM_ULEB128 (at->dw_attr);
7177
7178 switch (AT_class (at))
7179 {
7180 case dw_val_class_const:
7181 case dw_val_class_const_implicit:
7182 CHECKSUM_ULEB128 (DW_FORM_sdata);
7183 CHECKSUM_SLEB128 (at->dw_attr_val.v.val_int);
7184 break;
7185
7186 case dw_val_class_unsigned_const:
7187 case dw_val_class_unsigned_const_implicit:
7188 CHECKSUM_ULEB128 (DW_FORM_sdata);
7189 CHECKSUM_SLEB128 ((int) at->dw_attr_val.v.val_unsigned);
7190 break;
7191
7192 case dw_val_class_const_double:
7193 CHECKSUM_ULEB128 (DW_FORM_block);
7194 CHECKSUM_ULEB128 (sizeof (at->dw_attr_val.v.val_double));
7195 CHECKSUM (at->dw_attr_val.v.val_double);
7196 break;
7197
7198 case dw_val_class_wide_int:
7199 CHECKSUM_ULEB128 (DW_FORM_block);
7200 CHECKSUM_ULEB128 (get_full_len (*at->dw_attr_val.v.val_wide)
7201 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
7202 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
7203 get_full_len (*at->dw_attr_val.v.val_wide)
7204 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
7205 break;
7206
7207 case dw_val_class_vec:
7208 CHECKSUM_ULEB128 (DW_FORM_block);
7209 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_vec.length
7210 * at->dw_attr_val.v.val_vec.elt_size);
7211 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
7212 (at->dw_attr_val.v.val_vec.length
7213 * at->dw_attr_val.v.val_vec.elt_size));
7214 break;
7215
7216 case dw_val_class_flag:
7217 CHECKSUM_ULEB128 (DW_FORM_flag);
7218 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_flag ? 1 : 0);
7219 break;
7220
7221 case dw_val_class_str:
7222 CHECKSUM_ULEB128 (DW_FORM_string);
7223 CHECKSUM_STRING (AT_string (at));
7224 break;
7225
7226 case dw_val_class_addr:
7227 r = AT_addr (at);
7228 gcc_assert (GET_CODE (r) == SYMBOL_REF);
7229 CHECKSUM_ULEB128 (DW_FORM_string);
7230 CHECKSUM_STRING (XSTR (r, 0));
7231 break;
7232
7233 case dw_val_class_offset:
7234 CHECKSUM_ULEB128 (DW_FORM_sdata);
7235 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_offset);
7236 break;
7237
7238 case dw_val_class_loc:
7239 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
7240 loc_checksum_ordered (loc, ctx);
7241 break;
7242
7243 case dw_val_class_fde_ref:
7244 case dw_val_class_symview:
7245 case dw_val_class_lbl_id:
7246 case dw_val_class_lineptr:
7247 case dw_val_class_macptr:
7248 case dw_val_class_loclistsptr:
7249 case dw_val_class_high_pc:
7250 break;
7251
7252 case dw_val_class_file:
7253 case dw_val_class_file_implicit:
7254 CHECKSUM_ULEB128 (DW_FORM_string);
7255 CHECKSUM_STRING (AT_file (at)->filename);
7256 break;
7257
7258 case dw_val_class_data8:
7259 CHECKSUM (at->dw_attr_val.v.val_data8);
7260 break;
7261
7262 default:
7263 break;
7264 }
7265 }
7266
7267 struct checksum_attributes
7268 {
7269 dw_attr_node *at_name;
7270 dw_attr_node *at_type;
7271 dw_attr_node *at_friend;
7272 dw_attr_node *at_accessibility;
7273 dw_attr_node *at_address_class;
7274 dw_attr_node *at_alignment;
7275 dw_attr_node *at_allocated;
7276 dw_attr_node *at_artificial;
7277 dw_attr_node *at_associated;
7278 dw_attr_node *at_binary_scale;
7279 dw_attr_node *at_bit_offset;
7280 dw_attr_node *at_bit_size;
7281 dw_attr_node *at_bit_stride;
7282 dw_attr_node *at_byte_size;
7283 dw_attr_node *at_byte_stride;
7284 dw_attr_node *at_const_value;
7285 dw_attr_node *at_containing_type;
7286 dw_attr_node *at_count;
7287 dw_attr_node *at_data_location;
7288 dw_attr_node *at_data_member_location;
7289 dw_attr_node *at_decimal_scale;
7290 dw_attr_node *at_decimal_sign;
7291 dw_attr_node *at_default_value;
7292 dw_attr_node *at_digit_count;
7293 dw_attr_node *at_discr;
7294 dw_attr_node *at_discr_list;
7295 dw_attr_node *at_discr_value;
7296 dw_attr_node *at_encoding;
7297 dw_attr_node *at_endianity;
7298 dw_attr_node *at_explicit;
7299 dw_attr_node *at_is_optional;
7300 dw_attr_node *at_location;
7301 dw_attr_node *at_lower_bound;
7302 dw_attr_node *at_mutable;
7303 dw_attr_node *at_ordering;
7304 dw_attr_node *at_picture_string;
7305 dw_attr_node *at_prototyped;
7306 dw_attr_node *at_small;
7307 dw_attr_node *at_segment;
7308 dw_attr_node *at_string_length;
7309 dw_attr_node *at_string_length_bit_size;
7310 dw_attr_node *at_string_length_byte_size;
7311 dw_attr_node *at_threads_scaled;
7312 dw_attr_node *at_upper_bound;
7313 dw_attr_node *at_use_location;
7314 dw_attr_node *at_use_UTF8;
7315 dw_attr_node *at_variable_parameter;
7316 dw_attr_node *at_virtuality;
7317 dw_attr_node *at_visibility;
7318 dw_attr_node *at_vtable_elem_location;
7319 };
7320
7321 /* Collect the attributes that we will want to use for the checksum. */
7322
7323 static void
7324 collect_checksum_attributes (struct checksum_attributes *attrs, dw_die_ref die)
7325 {
7326 dw_attr_node *a;
7327 unsigned ix;
7328
7329 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7330 {
7331 switch (a->dw_attr)
7332 {
7333 case DW_AT_name:
7334 attrs->at_name = a;
7335 break;
7336 case DW_AT_type:
7337 attrs->at_type = a;
7338 break;
7339 case DW_AT_friend:
7340 attrs->at_friend = a;
7341 break;
7342 case DW_AT_accessibility:
7343 attrs->at_accessibility = a;
7344 break;
7345 case DW_AT_address_class:
7346 attrs->at_address_class = a;
7347 break;
7348 case DW_AT_alignment:
7349 attrs->at_alignment = a;
7350 break;
7351 case DW_AT_allocated:
7352 attrs->at_allocated = a;
7353 break;
7354 case DW_AT_artificial:
7355 attrs->at_artificial = a;
7356 break;
7357 case DW_AT_associated:
7358 attrs->at_associated = a;
7359 break;
7360 case DW_AT_binary_scale:
7361 attrs->at_binary_scale = a;
7362 break;
7363 case DW_AT_bit_offset:
7364 attrs->at_bit_offset = a;
7365 break;
7366 case DW_AT_bit_size:
7367 attrs->at_bit_size = a;
7368 break;
7369 case DW_AT_bit_stride:
7370 attrs->at_bit_stride = a;
7371 break;
7372 case DW_AT_byte_size:
7373 attrs->at_byte_size = a;
7374 break;
7375 case DW_AT_byte_stride:
7376 attrs->at_byte_stride = a;
7377 break;
7378 case DW_AT_const_value:
7379 attrs->at_const_value = a;
7380 break;
7381 case DW_AT_containing_type:
7382 attrs->at_containing_type = a;
7383 break;
7384 case DW_AT_count:
7385 attrs->at_count = a;
7386 break;
7387 case DW_AT_data_location:
7388 attrs->at_data_location = a;
7389 break;
7390 case DW_AT_data_member_location:
7391 attrs->at_data_member_location = a;
7392 break;
7393 case DW_AT_decimal_scale:
7394 attrs->at_decimal_scale = a;
7395 break;
7396 case DW_AT_decimal_sign:
7397 attrs->at_decimal_sign = a;
7398 break;
7399 case DW_AT_default_value:
7400 attrs->at_default_value = a;
7401 break;
7402 case DW_AT_digit_count:
7403 attrs->at_digit_count = a;
7404 break;
7405 case DW_AT_discr:
7406 attrs->at_discr = a;
7407 break;
7408 case DW_AT_discr_list:
7409 attrs->at_discr_list = a;
7410 break;
7411 case DW_AT_discr_value:
7412 attrs->at_discr_value = a;
7413 break;
7414 case DW_AT_encoding:
7415 attrs->at_encoding = a;
7416 break;
7417 case DW_AT_endianity:
7418 attrs->at_endianity = a;
7419 break;
7420 case DW_AT_explicit:
7421 attrs->at_explicit = a;
7422 break;
7423 case DW_AT_is_optional:
7424 attrs->at_is_optional = a;
7425 break;
7426 case DW_AT_location:
7427 attrs->at_location = a;
7428 break;
7429 case DW_AT_lower_bound:
7430 attrs->at_lower_bound = a;
7431 break;
7432 case DW_AT_mutable:
7433 attrs->at_mutable = a;
7434 break;
7435 case DW_AT_ordering:
7436 attrs->at_ordering = a;
7437 break;
7438 case DW_AT_picture_string:
7439 attrs->at_picture_string = a;
7440 break;
7441 case DW_AT_prototyped:
7442 attrs->at_prototyped = a;
7443 break;
7444 case DW_AT_small:
7445 attrs->at_small = a;
7446 break;
7447 case DW_AT_segment:
7448 attrs->at_segment = a;
7449 break;
7450 case DW_AT_string_length:
7451 attrs->at_string_length = a;
7452 break;
7453 case DW_AT_string_length_bit_size:
7454 attrs->at_string_length_bit_size = a;
7455 break;
7456 case DW_AT_string_length_byte_size:
7457 attrs->at_string_length_byte_size = a;
7458 break;
7459 case DW_AT_threads_scaled:
7460 attrs->at_threads_scaled = a;
7461 break;
7462 case DW_AT_upper_bound:
7463 attrs->at_upper_bound = a;
7464 break;
7465 case DW_AT_use_location:
7466 attrs->at_use_location = a;
7467 break;
7468 case DW_AT_use_UTF8:
7469 attrs->at_use_UTF8 = a;
7470 break;
7471 case DW_AT_variable_parameter:
7472 attrs->at_variable_parameter = a;
7473 break;
7474 case DW_AT_virtuality:
7475 attrs->at_virtuality = a;
7476 break;
7477 case DW_AT_visibility:
7478 attrs->at_visibility = a;
7479 break;
7480 case DW_AT_vtable_elem_location:
7481 attrs->at_vtable_elem_location = a;
7482 break;
7483 default:
7484 break;
7485 }
7486 }
7487 }
7488
7489 /* Calculate the checksum of a DIE, using an ordered subset of attributes. */
7490
7491 static void
7492 die_checksum_ordered (dw_die_ref die, struct md5_ctx *ctx, int *mark)
7493 {
7494 dw_die_ref c;
7495 dw_die_ref decl;
7496 struct checksum_attributes attrs;
7497
7498 CHECKSUM_ULEB128 ('D');
7499 CHECKSUM_ULEB128 (die->die_tag);
7500
7501 memset (&attrs, 0, sizeof (attrs));
7502
7503 decl = get_AT_ref (die, DW_AT_specification);
7504 if (decl != NULL)
7505 collect_checksum_attributes (&attrs, decl);
7506 collect_checksum_attributes (&attrs, die);
7507
7508 CHECKSUM_ATTR (attrs.at_name);
7509 CHECKSUM_ATTR (attrs.at_accessibility);
7510 CHECKSUM_ATTR (attrs.at_address_class);
7511 CHECKSUM_ATTR (attrs.at_allocated);
7512 CHECKSUM_ATTR (attrs.at_artificial);
7513 CHECKSUM_ATTR (attrs.at_associated);
7514 CHECKSUM_ATTR (attrs.at_binary_scale);
7515 CHECKSUM_ATTR (attrs.at_bit_offset);
7516 CHECKSUM_ATTR (attrs.at_bit_size);
7517 CHECKSUM_ATTR (attrs.at_bit_stride);
7518 CHECKSUM_ATTR (attrs.at_byte_size);
7519 CHECKSUM_ATTR (attrs.at_byte_stride);
7520 CHECKSUM_ATTR (attrs.at_const_value);
7521 CHECKSUM_ATTR (attrs.at_containing_type);
7522 CHECKSUM_ATTR (attrs.at_count);
7523 CHECKSUM_ATTR (attrs.at_data_location);
7524 CHECKSUM_ATTR (attrs.at_data_member_location);
7525 CHECKSUM_ATTR (attrs.at_decimal_scale);
7526 CHECKSUM_ATTR (attrs.at_decimal_sign);
7527 CHECKSUM_ATTR (attrs.at_default_value);
7528 CHECKSUM_ATTR (attrs.at_digit_count);
7529 CHECKSUM_ATTR (attrs.at_discr);
7530 CHECKSUM_ATTR (attrs.at_discr_list);
7531 CHECKSUM_ATTR (attrs.at_discr_value);
7532 CHECKSUM_ATTR (attrs.at_encoding);
7533 CHECKSUM_ATTR (attrs.at_endianity);
7534 CHECKSUM_ATTR (attrs.at_explicit);
7535 CHECKSUM_ATTR (attrs.at_is_optional);
7536 CHECKSUM_ATTR (attrs.at_location);
7537 CHECKSUM_ATTR (attrs.at_lower_bound);
7538 CHECKSUM_ATTR (attrs.at_mutable);
7539 CHECKSUM_ATTR (attrs.at_ordering);
7540 CHECKSUM_ATTR (attrs.at_picture_string);
7541 CHECKSUM_ATTR (attrs.at_prototyped);
7542 CHECKSUM_ATTR (attrs.at_small);
7543 CHECKSUM_ATTR (attrs.at_segment);
7544 CHECKSUM_ATTR (attrs.at_string_length);
7545 CHECKSUM_ATTR (attrs.at_string_length_bit_size);
7546 CHECKSUM_ATTR (attrs.at_string_length_byte_size);
7547 CHECKSUM_ATTR (attrs.at_threads_scaled);
7548 CHECKSUM_ATTR (attrs.at_upper_bound);
7549 CHECKSUM_ATTR (attrs.at_use_location);
7550 CHECKSUM_ATTR (attrs.at_use_UTF8);
7551 CHECKSUM_ATTR (attrs.at_variable_parameter);
7552 CHECKSUM_ATTR (attrs.at_virtuality);
7553 CHECKSUM_ATTR (attrs.at_visibility);
7554 CHECKSUM_ATTR (attrs.at_vtable_elem_location);
7555 CHECKSUM_ATTR (attrs.at_type);
7556 CHECKSUM_ATTR (attrs.at_friend);
7557 CHECKSUM_ATTR (attrs.at_alignment);
7558
7559 /* Checksum the child DIEs. */
7560 c = die->die_child;
7561 if (c) do {
7562 dw_attr_node *name_attr;
7563
7564 c = c->die_sib;
7565 name_attr = get_AT (c, DW_AT_name);
7566 if (is_template_instantiation (c))
7567 {
7568 /* Ignore instantiations of member type and function templates. */
7569 }
7570 else if (name_attr != NULL
7571 && (is_type_die (c) || c->die_tag == DW_TAG_subprogram))
7572 {
7573 /* Use a shallow checksum for named nested types and member
7574 functions. */
7575 CHECKSUM_ULEB128 ('S');
7576 CHECKSUM_ULEB128 (c->die_tag);
7577 CHECKSUM_STRING (AT_string (name_attr));
7578 }
7579 else
7580 {
7581 /* Use a deep checksum for other children. */
7582 /* Mark this DIE so it gets processed when unmarking. */
7583 if (c->die_mark == 0)
7584 c->die_mark = -1;
7585 die_checksum_ordered (c, ctx, mark);
7586 }
7587 } while (c != die->die_child);
7588
7589 CHECKSUM_ULEB128 (0);
7590 }
7591
7592 /* Add a type name and tag to a hash. */
7593 static void
7594 die_odr_checksum (int tag, const char *name, md5_ctx *ctx)
7595 {
7596 CHECKSUM_ULEB128 (tag);
7597 CHECKSUM_STRING (name);
7598 }
7599
7600 #undef CHECKSUM
7601 #undef CHECKSUM_STRING
7602 #undef CHECKSUM_ATTR
7603 #undef CHECKSUM_LEB128
7604 #undef CHECKSUM_ULEB128
7605
7606 /* Generate the type signature for DIE. This is computed by generating an
7607 MD5 checksum over the DIE's tag, its relevant attributes, and its
7608 children. Attributes that are references to other DIEs are processed
7609 by recursion, using the MARK field to prevent infinite recursion.
7610 If the DIE is nested inside a namespace or another type, we also
7611 need to include that context in the signature. The lower 64 bits
7612 of the resulting MD5 checksum comprise the signature. */
7613
7614 static void
7615 generate_type_signature (dw_die_ref die, comdat_type_node *type_node)
7616 {
7617 int mark;
7618 const char *name;
7619 unsigned char checksum[16];
7620 struct md5_ctx ctx;
7621 dw_die_ref decl;
7622 dw_die_ref parent;
7623
7624 name = get_AT_string (die, DW_AT_name);
7625 decl = get_AT_ref (die, DW_AT_specification);
7626 parent = get_die_parent (die);
7627
7628 /* First, compute a signature for just the type name (and its surrounding
7629 context, if any. This is stored in the type unit DIE for link-time
7630 ODR (one-definition rule) checking. */
7631
7632 if (is_cxx () && name != NULL)
7633 {
7634 md5_init_ctx (&ctx);
7635
7636 /* Checksum the names of surrounding namespaces and structures. */
7637 if (parent != NULL)
7638 checksum_die_context (parent, &ctx);
7639
7640 /* Checksum the current DIE. */
7641 die_odr_checksum (die->die_tag, name, &ctx);
7642 md5_finish_ctx (&ctx, checksum);
7643
7644 add_AT_data8 (type_node->root_die, DW_AT_GNU_odr_signature, &checksum[8]);
7645 }
7646
7647 /* Next, compute the complete type signature. */
7648
7649 md5_init_ctx (&ctx);
7650 mark = 1;
7651 die->die_mark = mark;
7652
7653 /* Checksum the names of surrounding namespaces and structures. */
7654 if (parent != NULL)
7655 checksum_die_context (parent, &ctx);
7656
7657 /* Checksum the DIE and its children. */
7658 die_checksum_ordered (die, &ctx, &mark);
7659 unmark_all_dies (die);
7660 md5_finish_ctx (&ctx, checksum);
7661
7662 /* Store the signature in the type node and link the type DIE and the
7663 type node together. */
7664 memcpy (type_node->signature, &checksum[16 - DWARF_TYPE_SIGNATURE_SIZE],
7665 DWARF_TYPE_SIGNATURE_SIZE);
7666 die->comdat_type_p = true;
7667 die->die_id.die_type_node = type_node;
7668 type_node->type_die = die;
7669
7670 /* If the DIE is a specification, link its declaration to the type node
7671 as well. */
7672 if (decl != NULL)
7673 {
7674 decl->comdat_type_p = true;
7675 decl->die_id.die_type_node = type_node;
7676 }
7677 }
7678
7679 /* Do the location expressions look same? */
7680 static inline int
7681 same_loc_p (dw_loc_descr_ref loc1, dw_loc_descr_ref loc2, int *mark)
7682 {
7683 return loc1->dw_loc_opc == loc2->dw_loc_opc
7684 && same_dw_val_p (&loc1->dw_loc_oprnd1, &loc2->dw_loc_oprnd1, mark)
7685 && same_dw_val_p (&loc1->dw_loc_oprnd2, &loc2->dw_loc_oprnd2, mark);
7686 }
7687
7688 /* Do the values look the same? */
7689 static int
7690 same_dw_val_p (const dw_val_node *v1, const dw_val_node *v2, int *mark)
7691 {
7692 dw_loc_descr_ref loc1, loc2;
7693 rtx r1, r2;
7694
7695 if (v1->val_class != v2->val_class)
7696 return 0;
7697
7698 switch (v1->val_class)
7699 {
7700 case dw_val_class_const:
7701 case dw_val_class_const_implicit:
7702 return v1->v.val_int == v2->v.val_int;
7703 case dw_val_class_unsigned_const:
7704 case dw_val_class_unsigned_const_implicit:
7705 return v1->v.val_unsigned == v2->v.val_unsigned;
7706 case dw_val_class_const_double:
7707 return v1->v.val_double.high == v2->v.val_double.high
7708 && v1->v.val_double.low == v2->v.val_double.low;
7709 case dw_val_class_wide_int:
7710 return *v1->v.val_wide == *v2->v.val_wide;
7711 case dw_val_class_vec:
7712 if (v1->v.val_vec.length != v2->v.val_vec.length
7713 || v1->v.val_vec.elt_size != v2->v.val_vec.elt_size)
7714 return 0;
7715 if (memcmp (v1->v.val_vec.array, v2->v.val_vec.array,
7716 v1->v.val_vec.length * v1->v.val_vec.elt_size))
7717 return 0;
7718 return 1;
7719 case dw_val_class_flag:
7720 return v1->v.val_flag == v2->v.val_flag;
7721 case dw_val_class_str:
7722 return !strcmp (v1->v.val_str->str, v2->v.val_str->str);
7723
7724 case dw_val_class_addr:
7725 r1 = v1->v.val_addr;
7726 r2 = v2->v.val_addr;
7727 if (GET_CODE (r1) != GET_CODE (r2))
7728 return 0;
7729 return !rtx_equal_p (r1, r2);
7730
7731 case dw_val_class_offset:
7732 return v1->v.val_offset == v2->v.val_offset;
7733
7734 case dw_val_class_loc:
7735 for (loc1 = v1->v.val_loc, loc2 = v2->v.val_loc;
7736 loc1 && loc2;
7737 loc1 = loc1->dw_loc_next, loc2 = loc2->dw_loc_next)
7738 if (!same_loc_p (loc1, loc2, mark))
7739 return 0;
7740 return !loc1 && !loc2;
7741
7742 case dw_val_class_die_ref:
7743 return same_die_p (v1->v.val_die_ref.die, v2->v.val_die_ref.die, mark);
7744
7745 case dw_val_class_symview:
7746 return strcmp (v1->v.val_symbolic_view, v2->v.val_symbolic_view) == 0;
7747
7748 case dw_val_class_fde_ref:
7749 case dw_val_class_vms_delta:
7750 case dw_val_class_lbl_id:
7751 case dw_val_class_lineptr:
7752 case dw_val_class_macptr:
7753 case dw_val_class_loclistsptr:
7754 case dw_val_class_high_pc:
7755 return 1;
7756
7757 case dw_val_class_file:
7758 case dw_val_class_file_implicit:
7759 return v1->v.val_file == v2->v.val_file;
7760
7761 case dw_val_class_data8:
7762 return !memcmp (v1->v.val_data8, v2->v.val_data8, 8);
7763
7764 default:
7765 return 1;
7766 }
7767 }
7768
7769 /* Do the attributes look the same? */
7770
7771 static int
7772 same_attr_p (dw_attr_node *at1, dw_attr_node *at2, int *mark)
7773 {
7774 if (at1->dw_attr != at2->dw_attr)
7775 return 0;
7776
7777 /* We don't care that this was compiled with a different compiler
7778 snapshot; if the output is the same, that's what matters. */
7779 if (at1->dw_attr == DW_AT_producer)
7780 return 1;
7781
7782 return same_dw_val_p (&at1->dw_attr_val, &at2->dw_attr_val, mark);
7783 }
7784
7785 /* Do the dies look the same? */
7786
7787 static int
7788 same_die_p (dw_die_ref die1, dw_die_ref die2, int *mark)
7789 {
7790 dw_die_ref c1, c2;
7791 dw_attr_node *a1;
7792 unsigned ix;
7793
7794 /* To avoid infinite recursion. */
7795 if (die1->die_mark)
7796 return die1->die_mark == die2->die_mark;
7797 die1->die_mark = die2->die_mark = ++(*mark);
7798
7799 if (die1->die_tag != die2->die_tag)
7800 return 0;
7801
7802 if (vec_safe_length (die1->die_attr) != vec_safe_length (die2->die_attr))
7803 return 0;
7804
7805 FOR_EACH_VEC_SAFE_ELT (die1->die_attr, ix, a1)
7806 if (!same_attr_p (a1, &(*die2->die_attr)[ix], mark))
7807 return 0;
7808
7809 c1 = die1->die_child;
7810 c2 = die2->die_child;
7811 if (! c1)
7812 {
7813 if (c2)
7814 return 0;
7815 }
7816 else
7817 for (;;)
7818 {
7819 if (!same_die_p (c1, c2, mark))
7820 return 0;
7821 c1 = c1->die_sib;
7822 c2 = c2->die_sib;
7823 if (c1 == die1->die_child)
7824 {
7825 if (c2 == die2->die_child)
7826 break;
7827 else
7828 return 0;
7829 }
7830 }
7831
7832 return 1;
7833 }
7834
7835 /* Calculate the MD5 checksum of the compilation unit DIE UNIT_DIE and its
7836 children, and set die_symbol. */
7837
7838 static void
7839 compute_comp_unit_symbol (dw_die_ref unit_die)
7840 {
7841 const char *die_name = get_AT_string (unit_die, DW_AT_name);
7842 const char *base = die_name ? lbasename (die_name) : "anonymous";
7843 char *name = XALLOCAVEC (char, strlen (base) + 64);
7844 char *p;
7845 int i, mark;
7846 unsigned char checksum[16];
7847 struct md5_ctx ctx;
7848
7849 /* Compute the checksum of the DIE, then append part of it as hex digits to
7850 the name filename of the unit. */
7851
7852 md5_init_ctx (&ctx);
7853 mark = 0;
7854 die_checksum (unit_die, &ctx, &mark);
7855 unmark_all_dies (unit_die);
7856 md5_finish_ctx (&ctx, checksum);
7857
7858 /* When we this for comp_unit_die () we have a DW_AT_name that might
7859 not start with a letter but with anything valid for filenames and
7860 clean_symbol_name doesn't fix that up. Prepend 'g' if the first
7861 character is not a letter. */
7862 sprintf (name, "%s%s.", ISALPHA (*base) ? "" : "g", base);
7863 clean_symbol_name (name);
7864
7865 p = name + strlen (name);
7866 for (i = 0; i < 4; i++)
7867 {
7868 sprintf (p, "%.2x", checksum[i]);
7869 p += 2;
7870 }
7871
7872 unit_die->die_id.die_symbol = xstrdup (name);
7873 }
7874
7875 /* Returns nonzero if DIE represents a type, in the sense of TYPE_P. */
7876
7877 static int
7878 is_type_die (dw_die_ref die)
7879 {
7880 switch (die->die_tag)
7881 {
7882 case DW_TAG_array_type:
7883 case DW_TAG_class_type:
7884 case DW_TAG_interface_type:
7885 case DW_TAG_enumeration_type:
7886 case DW_TAG_pointer_type:
7887 case DW_TAG_reference_type:
7888 case DW_TAG_rvalue_reference_type:
7889 case DW_TAG_string_type:
7890 case DW_TAG_structure_type:
7891 case DW_TAG_subroutine_type:
7892 case DW_TAG_union_type:
7893 case DW_TAG_ptr_to_member_type:
7894 case DW_TAG_set_type:
7895 case DW_TAG_subrange_type:
7896 case DW_TAG_base_type:
7897 case DW_TAG_const_type:
7898 case DW_TAG_file_type:
7899 case DW_TAG_packed_type:
7900 case DW_TAG_volatile_type:
7901 case DW_TAG_typedef:
7902 return 1;
7903 default:
7904 return 0;
7905 }
7906 }
7907
7908 /* Returns true iff C is a compile-unit DIE. */
7909
7910 static inline bool
7911 is_cu_die (dw_die_ref c)
7912 {
7913 return c && (c->die_tag == DW_TAG_compile_unit
7914 || c->die_tag == DW_TAG_skeleton_unit);
7915 }
7916
7917 /* Returns true iff C is a unit DIE of some sort. */
7918
7919 static inline bool
7920 is_unit_die (dw_die_ref c)
7921 {
7922 return c && (c->die_tag == DW_TAG_compile_unit
7923 || c->die_tag == DW_TAG_partial_unit
7924 || c->die_tag == DW_TAG_type_unit
7925 || c->die_tag == DW_TAG_skeleton_unit);
7926 }
7927
7928 /* Returns true iff C is a namespace DIE. */
7929
7930 static inline bool
7931 is_namespace_die (dw_die_ref c)
7932 {
7933 return c && c->die_tag == DW_TAG_namespace;
7934 }
7935
7936 /* Return non-zero if this DIE is a template parameter. */
7937
7938 static inline bool
7939 is_template_parameter (dw_die_ref die)
7940 {
7941 switch (die->die_tag)
7942 {
7943 case DW_TAG_template_type_param:
7944 case DW_TAG_template_value_param:
7945 case DW_TAG_GNU_template_template_param:
7946 case DW_TAG_GNU_template_parameter_pack:
7947 return true;
7948 default:
7949 return false;
7950 }
7951 }
7952
7953 /* Return non-zero if this DIE represents a template instantiation. */
7954
7955 static inline bool
7956 is_template_instantiation (dw_die_ref die)
7957 {
7958 dw_die_ref c;
7959
7960 if (!is_type_die (die) && die->die_tag != DW_TAG_subprogram)
7961 return false;
7962 FOR_EACH_CHILD (die, c, if (is_template_parameter (c)) return true);
7963 return false;
7964 }
7965
7966 static char *
7967 gen_internal_sym (const char *prefix)
7968 {
7969 char buf[MAX_ARTIFICIAL_LABEL_BYTES];
7970
7971 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, label_num++);
7972 return xstrdup (buf);
7973 }
7974
7975 /* Return non-zero if this DIE is a declaration. */
7976
7977 static int
7978 is_declaration_die (dw_die_ref die)
7979 {
7980 dw_attr_node *a;
7981 unsigned ix;
7982
7983 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7984 if (a->dw_attr == DW_AT_declaration)
7985 return 1;
7986
7987 return 0;
7988 }
7989
7990 /* Return non-zero if this DIE is nested inside a subprogram. */
7991
7992 static int
7993 is_nested_in_subprogram (dw_die_ref die)
7994 {
7995 dw_die_ref decl = get_AT_ref (die, DW_AT_specification);
7996
7997 if (decl == NULL)
7998 decl = die;
7999 return local_scope_p (decl);
8000 }
8001
8002 /* Return non-zero if this DIE contains a defining declaration of a
8003 subprogram. */
8004
8005 static int
8006 contains_subprogram_definition (dw_die_ref die)
8007 {
8008 dw_die_ref c;
8009
8010 if (die->die_tag == DW_TAG_subprogram && ! is_declaration_die (die))
8011 return 1;
8012 FOR_EACH_CHILD (die, c, if (contains_subprogram_definition (c)) return 1);
8013 return 0;
8014 }
8015
8016 /* Return non-zero if this is a type DIE that should be moved to a
8017 COMDAT .debug_types section or .debug_info section with DW_UT_*type
8018 unit type. */
8019
8020 static int
8021 should_move_die_to_comdat (dw_die_ref die)
8022 {
8023 switch (die->die_tag)
8024 {
8025 case DW_TAG_class_type:
8026 case DW_TAG_structure_type:
8027 case DW_TAG_enumeration_type:
8028 case DW_TAG_union_type:
8029 /* Don't move declarations, inlined instances, types nested in a
8030 subprogram, or types that contain subprogram definitions. */
8031 if (is_declaration_die (die)
8032 || get_AT (die, DW_AT_abstract_origin)
8033 || is_nested_in_subprogram (die)
8034 || contains_subprogram_definition (die))
8035 return 0;
8036 return 1;
8037 case DW_TAG_array_type:
8038 case DW_TAG_interface_type:
8039 case DW_TAG_pointer_type:
8040 case DW_TAG_reference_type:
8041 case DW_TAG_rvalue_reference_type:
8042 case DW_TAG_string_type:
8043 case DW_TAG_subroutine_type:
8044 case DW_TAG_ptr_to_member_type:
8045 case DW_TAG_set_type:
8046 case DW_TAG_subrange_type:
8047 case DW_TAG_base_type:
8048 case DW_TAG_const_type:
8049 case DW_TAG_file_type:
8050 case DW_TAG_packed_type:
8051 case DW_TAG_volatile_type:
8052 case DW_TAG_typedef:
8053 default:
8054 return 0;
8055 }
8056 }
8057
8058 /* Make a clone of DIE. */
8059
8060 static dw_die_ref
8061 clone_die (dw_die_ref die)
8062 {
8063 dw_die_ref clone = new_die_raw (die->die_tag);
8064 dw_attr_node *a;
8065 unsigned ix;
8066
8067 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8068 add_dwarf_attr (clone, a);
8069
8070 return clone;
8071 }
8072
8073 /* Make a clone of the tree rooted at DIE. */
8074
8075 static dw_die_ref
8076 clone_tree (dw_die_ref die)
8077 {
8078 dw_die_ref c;
8079 dw_die_ref clone = clone_die (die);
8080
8081 FOR_EACH_CHILD (die, c, add_child_die (clone, clone_tree (c)));
8082
8083 return clone;
8084 }
8085
8086 /* Make a clone of DIE as a declaration. */
8087
8088 static dw_die_ref
8089 clone_as_declaration (dw_die_ref die)
8090 {
8091 dw_die_ref clone;
8092 dw_die_ref decl;
8093 dw_attr_node *a;
8094 unsigned ix;
8095
8096 /* If the DIE is already a declaration, just clone it. */
8097 if (is_declaration_die (die))
8098 return clone_die (die);
8099
8100 /* If the DIE is a specification, just clone its declaration DIE. */
8101 decl = get_AT_ref (die, DW_AT_specification);
8102 if (decl != NULL)
8103 {
8104 clone = clone_die (decl);
8105 if (die->comdat_type_p)
8106 add_AT_die_ref (clone, DW_AT_signature, die);
8107 return clone;
8108 }
8109
8110 clone = new_die_raw (die->die_tag);
8111
8112 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8113 {
8114 /* We don't want to copy over all attributes.
8115 For example we don't want DW_AT_byte_size because otherwise we will no
8116 longer have a declaration and GDB will treat it as a definition. */
8117
8118 switch (a->dw_attr)
8119 {
8120 case DW_AT_abstract_origin:
8121 case DW_AT_artificial:
8122 case DW_AT_containing_type:
8123 case DW_AT_external:
8124 case DW_AT_name:
8125 case DW_AT_type:
8126 case DW_AT_virtuality:
8127 case DW_AT_linkage_name:
8128 case DW_AT_MIPS_linkage_name:
8129 add_dwarf_attr (clone, a);
8130 break;
8131 case DW_AT_byte_size:
8132 case DW_AT_alignment:
8133 default:
8134 break;
8135 }
8136 }
8137
8138 if (die->comdat_type_p)
8139 add_AT_die_ref (clone, DW_AT_signature, die);
8140
8141 add_AT_flag (clone, DW_AT_declaration, 1);
8142 return clone;
8143 }
8144
8145
8146 /* Structure to map a DIE in one CU to its copy in a comdat type unit. */
8147
8148 struct decl_table_entry
8149 {
8150 dw_die_ref orig;
8151 dw_die_ref copy;
8152 };
8153
8154 /* Helpers to manipulate hash table of copied declarations. */
8155
8156 /* Hashtable helpers. */
8157
8158 struct decl_table_entry_hasher : free_ptr_hash <decl_table_entry>
8159 {
8160 typedef die_struct *compare_type;
8161 static inline hashval_t hash (const decl_table_entry *);
8162 static inline bool equal (const decl_table_entry *, const die_struct *);
8163 };
8164
8165 inline hashval_t
8166 decl_table_entry_hasher::hash (const decl_table_entry *entry)
8167 {
8168 return htab_hash_pointer (entry->orig);
8169 }
8170
8171 inline bool
8172 decl_table_entry_hasher::equal (const decl_table_entry *entry1,
8173 const die_struct *entry2)
8174 {
8175 return entry1->orig == entry2;
8176 }
8177
8178 typedef hash_table<decl_table_entry_hasher> decl_hash_type;
8179
8180 /* Copy DIE and its ancestors, up to, but not including, the compile unit
8181 or type unit entry, to a new tree. Adds the new tree to UNIT and returns
8182 a pointer to the copy of DIE. If DECL_TABLE is provided, it is used
8183 to check if the ancestor has already been copied into UNIT. */
8184
8185 static dw_die_ref
8186 copy_ancestor_tree (dw_die_ref unit, dw_die_ref die,
8187 decl_hash_type *decl_table)
8188 {
8189 dw_die_ref parent = die->die_parent;
8190 dw_die_ref new_parent = unit;
8191 dw_die_ref copy;
8192 decl_table_entry **slot = NULL;
8193 struct decl_table_entry *entry = NULL;
8194
8195 /* If DIE refers to a stub unfold that so we get the appropriate
8196 DIE registered as orig in decl_table. */
8197 if (dw_die_ref c = get_AT_ref (die, DW_AT_signature))
8198 die = c;
8199
8200 if (decl_table)
8201 {
8202 /* Check if the entry has already been copied to UNIT. */
8203 slot = decl_table->find_slot_with_hash (die, htab_hash_pointer (die),
8204 INSERT);
8205 if (*slot != HTAB_EMPTY_ENTRY)
8206 {
8207 entry = *slot;
8208 return entry->copy;
8209 }
8210
8211 /* Record in DECL_TABLE that DIE has been copied to UNIT. */
8212 entry = XCNEW (struct decl_table_entry);
8213 entry->orig = die;
8214 entry->copy = NULL;
8215 *slot = entry;
8216 }
8217
8218 if (parent != NULL)
8219 {
8220 dw_die_ref spec = get_AT_ref (parent, DW_AT_specification);
8221 if (spec != NULL)
8222 parent = spec;
8223 if (!is_unit_die (parent))
8224 new_parent = copy_ancestor_tree (unit, parent, decl_table);
8225 }
8226
8227 copy = clone_as_declaration (die);
8228 add_child_die (new_parent, copy);
8229
8230 if (decl_table)
8231 {
8232 /* Record the pointer to the copy. */
8233 entry->copy = copy;
8234 }
8235
8236 return copy;
8237 }
8238 /* Copy the declaration context to the new type unit DIE. This includes
8239 any surrounding namespace or type declarations. If the DIE has an
8240 AT_specification attribute, it also includes attributes and children
8241 attached to the specification, and returns a pointer to the original
8242 parent of the declaration DIE. Returns NULL otherwise. */
8243
8244 static dw_die_ref
8245 copy_declaration_context (dw_die_ref unit, dw_die_ref die)
8246 {
8247 dw_die_ref decl;
8248 dw_die_ref new_decl;
8249 dw_die_ref orig_parent = NULL;
8250
8251 decl = get_AT_ref (die, DW_AT_specification);
8252 if (decl == NULL)
8253 decl = die;
8254 else
8255 {
8256 unsigned ix;
8257 dw_die_ref c;
8258 dw_attr_node *a;
8259
8260 /* The original DIE will be changed to a declaration, and must
8261 be moved to be a child of the original declaration DIE. */
8262 orig_parent = decl->die_parent;
8263
8264 /* Copy the type node pointer from the new DIE to the original
8265 declaration DIE so we can forward references later. */
8266 decl->comdat_type_p = true;
8267 decl->die_id.die_type_node = die->die_id.die_type_node;
8268
8269 remove_AT (die, DW_AT_specification);
8270
8271 FOR_EACH_VEC_SAFE_ELT (decl->die_attr, ix, a)
8272 {
8273 if (a->dw_attr != DW_AT_name
8274 && a->dw_attr != DW_AT_declaration
8275 && a->dw_attr != DW_AT_external)
8276 add_dwarf_attr (die, a);
8277 }
8278
8279 FOR_EACH_CHILD (decl, c, add_child_die (die, clone_tree (c)));
8280 }
8281
8282 if (decl->die_parent != NULL
8283 && !is_unit_die (decl->die_parent))
8284 {
8285 new_decl = copy_ancestor_tree (unit, decl, NULL);
8286 if (new_decl != NULL)
8287 {
8288 remove_AT (new_decl, DW_AT_signature);
8289 add_AT_specification (die, new_decl);
8290 }
8291 }
8292
8293 return orig_parent;
8294 }
8295
8296 /* Generate the skeleton ancestor tree for the given NODE, then clone
8297 the DIE and add the clone into the tree. */
8298
8299 static void
8300 generate_skeleton_ancestor_tree (skeleton_chain_node *node)
8301 {
8302 if (node->new_die != NULL)
8303 return;
8304
8305 node->new_die = clone_as_declaration (node->old_die);
8306
8307 if (node->parent != NULL)
8308 {
8309 generate_skeleton_ancestor_tree (node->parent);
8310 add_child_die (node->parent->new_die, node->new_die);
8311 }
8312 }
8313
8314 /* Generate a skeleton tree of DIEs containing any declarations that are
8315 found in the original tree. We traverse the tree looking for declaration
8316 DIEs, and construct the skeleton from the bottom up whenever we find one. */
8317
8318 static void
8319 generate_skeleton_bottom_up (skeleton_chain_node *parent)
8320 {
8321 skeleton_chain_node node;
8322 dw_die_ref c;
8323 dw_die_ref first;
8324 dw_die_ref prev = NULL;
8325 dw_die_ref next = NULL;
8326
8327 node.parent = parent;
8328
8329 first = c = parent->old_die->die_child;
8330 if (c)
8331 next = c->die_sib;
8332 if (c) do {
8333 if (prev == NULL || prev->die_sib == c)
8334 prev = c;
8335 c = next;
8336 next = (c == first ? NULL : c->die_sib);
8337 node.old_die = c;
8338 node.new_die = NULL;
8339 if (is_declaration_die (c))
8340 {
8341 if (is_template_instantiation (c))
8342 {
8343 /* Instantiated templates do not need to be cloned into the
8344 type unit. Just move the DIE and its children back to
8345 the skeleton tree (in the main CU). */
8346 remove_child_with_prev (c, prev);
8347 add_child_die (parent->new_die, c);
8348 c = prev;
8349 }
8350 else if (c->comdat_type_p)
8351 {
8352 /* This is the skeleton of earlier break_out_comdat_types
8353 type. Clone the existing DIE, but keep the children
8354 under the original (which is in the main CU). */
8355 dw_die_ref clone = clone_die (c);
8356
8357 replace_child (c, clone, prev);
8358 generate_skeleton_ancestor_tree (parent);
8359 add_child_die (parent->new_die, c);
8360 c = clone;
8361 continue;
8362 }
8363 else
8364 {
8365 /* Clone the existing DIE, move the original to the skeleton
8366 tree (which is in the main CU), and put the clone, with
8367 all the original's children, where the original came from
8368 (which is about to be moved to the type unit). */
8369 dw_die_ref clone = clone_die (c);
8370 move_all_children (c, clone);
8371
8372 /* If the original has a DW_AT_object_pointer attribute,
8373 it would now point to a child DIE just moved to the
8374 cloned tree, so we need to remove that attribute from
8375 the original. */
8376 remove_AT (c, DW_AT_object_pointer);
8377
8378 replace_child (c, clone, prev);
8379 generate_skeleton_ancestor_tree (parent);
8380 add_child_die (parent->new_die, c);
8381 node.old_die = clone;
8382 node.new_die = c;
8383 c = clone;
8384 }
8385 }
8386 generate_skeleton_bottom_up (&node);
8387 } while (next != NULL);
8388 }
8389
8390 /* Wrapper function for generate_skeleton_bottom_up. */
8391
8392 static dw_die_ref
8393 generate_skeleton (dw_die_ref die)
8394 {
8395 skeleton_chain_node node;
8396
8397 node.old_die = die;
8398 node.new_die = NULL;
8399 node.parent = NULL;
8400
8401 /* If this type definition is nested inside another type,
8402 and is not an instantiation of a template, always leave
8403 at least a declaration in its place. */
8404 if (die->die_parent != NULL
8405 && is_type_die (die->die_parent)
8406 && !is_template_instantiation (die))
8407 node.new_die = clone_as_declaration (die);
8408
8409 generate_skeleton_bottom_up (&node);
8410 return node.new_die;
8411 }
8412
8413 /* Remove the CHILD DIE from its parent, possibly replacing it with a cloned
8414 declaration. The original DIE is moved to a new compile unit so that
8415 existing references to it follow it to the new location. If any of the
8416 original DIE's descendants is a declaration, we need to replace the
8417 original DIE with a skeleton tree and move the declarations back into the
8418 skeleton tree. */
8419
8420 static dw_die_ref
8421 remove_child_or_replace_with_skeleton (dw_die_ref unit, dw_die_ref child,
8422 dw_die_ref prev)
8423 {
8424 dw_die_ref skeleton, orig_parent;
8425
8426 /* Copy the declaration context to the type unit DIE. If the returned
8427 ORIG_PARENT is not NULL, the skeleton needs to be added as a child of
8428 that DIE. */
8429 orig_parent = copy_declaration_context (unit, child);
8430
8431 skeleton = generate_skeleton (child);
8432 if (skeleton == NULL)
8433 remove_child_with_prev (child, prev);
8434 else
8435 {
8436 skeleton->comdat_type_p = true;
8437 skeleton->die_id.die_type_node = child->die_id.die_type_node;
8438
8439 /* If the original DIE was a specification, we need to put
8440 the skeleton under the parent DIE of the declaration.
8441 This leaves the original declaration in the tree, but
8442 it will be pruned later since there are no longer any
8443 references to it. */
8444 if (orig_parent != NULL)
8445 {
8446 remove_child_with_prev (child, prev);
8447 add_child_die (orig_parent, skeleton);
8448 }
8449 else
8450 replace_child (child, skeleton, prev);
8451 }
8452
8453 return skeleton;
8454 }
8455
8456 static void
8457 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8458 comdat_type_node *type_node,
8459 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs);
8460
8461 /* Helper for copy_dwarf_procs_ref_in_dies. Make a copy of the DIE DWARF
8462 procedure, put it under TYPE_NODE and return the copy. Continue looking for
8463 DWARF procedure references in the DW_AT_location attribute. */
8464
8465 static dw_die_ref
8466 copy_dwarf_procedure (dw_die_ref die,
8467 comdat_type_node *type_node,
8468 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8469 {
8470 gcc_assert (die->die_tag == DW_TAG_dwarf_procedure);
8471
8472 /* DWARF procedures are not supposed to have children... */
8473 gcc_assert (die->die_child == NULL);
8474
8475 /* ... and they are supposed to have only one attribute: DW_AT_location. */
8476 gcc_assert (vec_safe_length (die->die_attr) == 1
8477 && ((*die->die_attr)[0].dw_attr == DW_AT_location));
8478
8479 /* Do not copy more than once DWARF procedures. */
8480 bool existed;
8481 dw_die_ref &die_copy = copied_dwarf_procs.get_or_insert (die, &existed);
8482 if (existed)
8483 return die_copy;
8484
8485 die_copy = clone_die (die);
8486 add_child_die (type_node->root_die, die_copy);
8487 copy_dwarf_procs_ref_in_attrs (die_copy, type_node, copied_dwarf_procs);
8488 return die_copy;
8489 }
8490
8491 /* Helper for copy_dwarf_procs_ref_in_dies. Look for references to DWARF
8492 procedures in DIE's attributes. */
8493
8494 static void
8495 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8496 comdat_type_node *type_node,
8497 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8498 {
8499 dw_attr_node *a;
8500 unsigned i;
8501
8502 FOR_EACH_VEC_SAFE_ELT (die->die_attr, i, a)
8503 {
8504 dw_loc_descr_ref loc;
8505
8506 if (a->dw_attr_val.val_class != dw_val_class_loc)
8507 continue;
8508
8509 for (loc = a->dw_attr_val.v.val_loc; loc != NULL; loc = loc->dw_loc_next)
8510 {
8511 switch (loc->dw_loc_opc)
8512 {
8513 case DW_OP_call2:
8514 case DW_OP_call4:
8515 case DW_OP_call_ref:
8516 gcc_assert (loc->dw_loc_oprnd1.val_class
8517 == dw_val_class_die_ref);
8518 loc->dw_loc_oprnd1.v.val_die_ref.die
8519 = copy_dwarf_procedure (loc->dw_loc_oprnd1.v.val_die_ref.die,
8520 type_node,
8521 copied_dwarf_procs);
8522
8523 default:
8524 break;
8525 }
8526 }
8527 }
8528 }
8529
8530 /* Copy DWARF procedures that are referenced by the DIE tree to TREE_NODE and
8531 rewrite references to point to the copies.
8532
8533 References are looked for in DIE's attributes and recursively in all its
8534 children attributes that are location descriptions. COPIED_DWARF_PROCS is a
8535 mapping from old DWARF procedures to their copy. It is used not to copy
8536 twice the same DWARF procedure under TYPE_NODE. */
8537
8538 static void
8539 copy_dwarf_procs_ref_in_dies (dw_die_ref die,
8540 comdat_type_node *type_node,
8541 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8542 {
8543 dw_die_ref c;
8544
8545 copy_dwarf_procs_ref_in_attrs (die, type_node, copied_dwarf_procs);
8546 FOR_EACH_CHILD (die, c, copy_dwarf_procs_ref_in_dies (c,
8547 type_node,
8548 copied_dwarf_procs));
8549 }
8550
8551 /* Traverse the DIE and set up additional .debug_types or .debug_info
8552 DW_UT_*type sections for each type worthy of being placed in a COMDAT
8553 section. */
8554
8555 static void
8556 break_out_comdat_types (dw_die_ref die)
8557 {
8558 dw_die_ref c;
8559 dw_die_ref first;
8560 dw_die_ref prev = NULL;
8561 dw_die_ref next = NULL;
8562 dw_die_ref unit = NULL;
8563
8564 first = c = die->die_child;
8565 if (c)
8566 next = c->die_sib;
8567 if (c) do {
8568 if (prev == NULL || prev->die_sib == c)
8569 prev = c;
8570 c = next;
8571 next = (c == first ? NULL : c->die_sib);
8572 if (should_move_die_to_comdat (c))
8573 {
8574 dw_die_ref replacement;
8575 comdat_type_node *type_node;
8576
8577 /* Break out nested types into their own type units. */
8578 break_out_comdat_types (c);
8579
8580 /* Create a new type unit DIE as the root for the new tree. */
8581 unit = new_die (DW_TAG_type_unit, NULL, NULL);
8582 add_AT_unsigned (unit, DW_AT_language,
8583 get_AT_unsigned (comp_unit_die (), DW_AT_language));
8584
8585 /* Add the new unit's type DIE into the comdat type list. */
8586 type_node = ggc_cleared_alloc<comdat_type_node> ();
8587 type_node->root_die = unit;
8588 type_node->next = comdat_type_list;
8589 comdat_type_list = type_node;
8590
8591 /* Generate the type signature. */
8592 generate_type_signature (c, type_node);
8593
8594 /* Copy the declaration context, attributes, and children of the
8595 declaration into the new type unit DIE, then remove this DIE
8596 from the main CU (or replace it with a skeleton if necessary). */
8597 replacement = remove_child_or_replace_with_skeleton (unit, c, prev);
8598 type_node->skeleton_die = replacement;
8599
8600 /* Add the DIE to the new compunit. */
8601 add_child_die (unit, c);
8602
8603 /* Types can reference DWARF procedures for type size or data location
8604 expressions. Calls in DWARF expressions cannot target procedures
8605 that are not in the same section. So we must copy DWARF procedures
8606 along with this type and then rewrite references to them. */
8607 hash_map<dw_die_ref, dw_die_ref> copied_dwarf_procs;
8608 copy_dwarf_procs_ref_in_dies (c, type_node, copied_dwarf_procs);
8609
8610 if (replacement != NULL)
8611 c = replacement;
8612 }
8613 else if (c->die_tag == DW_TAG_namespace
8614 || c->die_tag == DW_TAG_class_type
8615 || c->die_tag == DW_TAG_structure_type
8616 || c->die_tag == DW_TAG_union_type)
8617 {
8618 /* Look for nested types that can be broken out. */
8619 break_out_comdat_types (c);
8620 }
8621 } while (next != NULL);
8622 }
8623
8624 /* Like clone_tree, but copy DW_TAG_subprogram DIEs as declarations.
8625 Enter all the cloned children into the hash table decl_table. */
8626
8627 static dw_die_ref
8628 clone_tree_partial (dw_die_ref die, decl_hash_type *decl_table)
8629 {
8630 dw_die_ref c;
8631 dw_die_ref clone;
8632 struct decl_table_entry *entry;
8633 decl_table_entry **slot;
8634
8635 if (die->die_tag == DW_TAG_subprogram)
8636 clone = clone_as_declaration (die);
8637 else
8638 clone = clone_die (die);
8639
8640 slot = decl_table->find_slot_with_hash (die,
8641 htab_hash_pointer (die), INSERT);
8642
8643 /* Assert that DIE isn't in the hash table yet. If it would be there
8644 before, the ancestors would be necessarily there as well, therefore
8645 clone_tree_partial wouldn't be called. */
8646 gcc_assert (*slot == HTAB_EMPTY_ENTRY);
8647
8648 entry = XCNEW (struct decl_table_entry);
8649 entry->orig = die;
8650 entry->copy = clone;
8651 *slot = entry;
8652
8653 if (die->die_tag != DW_TAG_subprogram)
8654 FOR_EACH_CHILD (die, c,
8655 add_child_die (clone, clone_tree_partial (c, decl_table)));
8656
8657 return clone;
8658 }
8659
8660 /* Walk the DIE and its children, looking for references to incomplete
8661 or trivial types that are unmarked (i.e., that are not in the current
8662 type_unit). */
8663
8664 static void
8665 copy_decls_walk (dw_die_ref unit, dw_die_ref die, decl_hash_type *decl_table)
8666 {
8667 dw_die_ref c;
8668 dw_attr_node *a;
8669 unsigned ix;
8670
8671 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8672 {
8673 if (AT_class (a) == dw_val_class_die_ref)
8674 {
8675 dw_die_ref targ = AT_ref (a);
8676 decl_table_entry **slot;
8677 struct decl_table_entry *entry;
8678
8679 if (targ->die_mark != 0 || targ->comdat_type_p)
8680 continue;
8681
8682 slot = decl_table->find_slot_with_hash (targ,
8683 htab_hash_pointer (targ),
8684 INSERT);
8685
8686 if (*slot != HTAB_EMPTY_ENTRY)
8687 {
8688 /* TARG has already been copied, so we just need to
8689 modify the reference to point to the copy. */
8690 entry = *slot;
8691 a->dw_attr_val.v.val_die_ref.die = entry->copy;
8692 }
8693 else
8694 {
8695 dw_die_ref parent = unit;
8696 dw_die_ref copy = clone_die (targ);
8697
8698 /* Record in DECL_TABLE that TARG has been copied.
8699 Need to do this now, before the recursive call,
8700 because DECL_TABLE may be expanded and SLOT
8701 would no longer be a valid pointer. */
8702 entry = XCNEW (struct decl_table_entry);
8703 entry->orig = targ;
8704 entry->copy = copy;
8705 *slot = entry;
8706
8707 /* If TARG is not a declaration DIE, we need to copy its
8708 children. */
8709 if (!is_declaration_die (targ))
8710 {
8711 FOR_EACH_CHILD (
8712 targ, c,
8713 add_child_die (copy,
8714 clone_tree_partial (c, decl_table)));
8715 }
8716
8717 /* Make sure the cloned tree is marked as part of the
8718 type unit. */
8719 mark_dies (copy);
8720
8721 /* If TARG has surrounding context, copy its ancestor tree
8722 into the new type unit. */
8723 if (targ->die_parent != NULL
8724 && !is_unit_die (targ->die_parent))
8725 parent = copy_ancestor_tree (unit, targ->die_parent,
8726 decl_table);
8727
8728 add_child_die (parent, copy);
8729 a->dw_attr_val.v.val_die_ref.die = copy;
8730
8731 /* Make sure the newly-copied DIE is walked. If it was
8732 installed in a previously-added context, it won't
8733 get visited otherwise. */
8734 if (parent != unit)
8735 {
8736 /* Find the highest point of the newly-added tree,
8737 mark each node along the way, and walk from there. */
8738 parent->die_mark = 1;
8739 while (parent->die_parent
8740 && parent->die_parent->die_mark == 0)
8741 {
8742 parent = parent->die_parent;
8743 parent->die_mark = 1;
8744 }
8745 copy_decls_walk (unit, parent, decl_table);
8746 }
8747 }
8748 }
8749 }
8750
8751 FOR_EACH_CHILD (die, c, copy_decls_walk (unit, c, decl_table));
8752 }
8753
8754 /* Collect skeleton dies in DIE created by break_out_comdat_types already
8755 and record them in DECL_TABLE. */
8756
8757 static void
8758 collect_skeleton_dies (dw_die_ref die, decl_hash_type *decl_table)
8759 {
8760 dw_die_ref c;
8761
8762 if (dw_attr_node *a = get_AT (die, DW_AT_signature))
8763 {
8764 dw_die_ref targ = AT_ref (a);
8765 gcc_assert (targ->die_mark == 0 && targ->comdat_type_p);
8766 decl_table_entry **slot
8767 = decl_table->find_slot_with_hash (targ,
8768 htab_hash_pointer (targ),
8769 INSERT);
8770 gcc_assert (*slot == HTAB_EMPTY_ENTRY);
8771 /* Record in DECL_TABLE that TARG has been already copied
8772 by remove_child_or_replace_with_skeleton. */
8773 decl_table_entry *entry = XCNEW (struct decl_table_entry);
8774 entry->orig = targ;
8775 entry->copy = die;
8776 *slot = entry;
8777 }
8778 FOR_EACH_CHILD (die, c, collect_skeleton_dies (c, decl_table));
8779 }
8780
8781 /* Copy declarations for "unworthy" types into the new comdat section.
8782 Incomplete types, modified types, and certain other types aren't broken
8783 out into comdat sections of their own, so they don't have a signature,
8784 and we need to copy the declaration into the same section so that we
8785 don't have an external reference. */
8786
8787 static void
8788 copy_decls_for_unworthy_types (dw_die_ref unit)
8789 {
8790 mark_dies (unit);
8791 decl_hash_type decl_table (10);
8792 collect_skeleton_dies (unit, &decl_table);
8793 copy_decls_walk (unit, unit, &decl_table);
8794 unmark_dies (unit);
8795 }
8796
8797 /* Traverse the DIE and add a sibling attribute if it may have the
8798 effect of speeding up access to siblings. To save some space,
8799 avoid generating sibling attributes for DIE's without children. */
8800
8801 static void
8802 add_sibling_attributes (dw_die_ref die)
8803 {
8804 dw_die_ref c;
8805
8806 if (! die->die_child)
8807 return;
8808
8809 if (die->die_parent && die != die->die_parent->die_child)
8810 add_AT_die_ref (die, DW_AT_sibling, die->die_sib);
8811
8812 FOR_EACH_CHILD (die, c, add_sibling_attributes (c));
8813 }
8814
8815 /* Output all location lists for the DIE and its children. */
8816
8817 static void
8818 output_location_lists (dw_die_ref die)
8819 {
8820 dw_die_ref c;
8821 dw_attr_node *a;
8822 unsigned ix;
8823
8824 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8825 if (AT_class (a) == dw_val_class_loc_list)
8826 output_loc_list (AT_loc_list (a));
8827
8828 FOR_EACH_CHILD (die, c, output_location_lists (c));
8829 }
8830
8831 /* During assign_location_list_indexes and output_loclists_offset the
8832 current index, after it the number of assigned indexes (i.e. how
8833 large the .debug_loclists* offset table should be). */
8834 static unsigned int loc_list_idx;
8835
8836 /* Output all location list offsets for the DIE and its children. */
8837
8838 static void
8839 output_loclists_offsets (dw_die_ref die)
8840 {
8841 dw_die_ref c;
8842 dw_attr_node *a;
8843 unsigned ix;
8844
8845 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8846 if (AT_class (a) == dw_val_class_loc_list)
8847 {
8848 dw_loc_list_ref l = AT_loc_list (a);
8849 if (l->offset_emitted)
8850 continue;
8851 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l->ll_symbol,
8852 loc_section_label, NULL);
8853 gcc_assert (l->hash == loc_list_idx);
8854 loc_list_idx++;
8855 l->offset_emitted = true;
8856 }
8857
8858 FOR_EACH_CHILD (die, c, output_loclists_offsets (c));
8859 }
8860
8861 /* Recursively set indexes of location lists. */
8862
8863 static void
8864 assign_location_list_indexes (dw_die_ref die)
8865 {
8866 dw_die_ref c;
8867 dw_attr_node *a;
8868 unsigned ix;
8869
8870 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8871 if (AT_class (a) == dw_val_class_loc_list)
8872 {
8873 dw_loc_list_ref list = AT_loc_list (a);
8874 if (!list->num_assigned)
8875 {
8876 list->num_assigned = true;
8877 list->hash = loc_list_idx++;
8878 }
8879 }
8880
8881 FOR_EACH_CHILD (die, c, assign_location_list_indexes (c));
8882 }
8883
8884 /* We want to limit the number of external references, because they are
8885 larger than local references: a relocation takes multiple words, and
8886 even a sig8 reference is always eight bytes, whereas a local reference
8887 can be as small as one byte (though DW_FORM_ref is usually 4 in GCC).
8888 So if we encounter multiple external references to the same type DIE, we
8889 make a local typedef stub for it and redirect all references there.
8890
8891 This is the element of the hash table for keeping track of these
8892 references. */
8893
8894 struct external_ref
8895 {
8896 dw_die_ref type;
8897 dw_die_ref stub;
8898 unsigned n_refs;
8899 };
8900
8901 /* Hashtable helpers. */
8902
8903 struct external_ref_hasher : free_ptr_hash <external_ref>
8904 {
8905 static inline hashval_t hash (const external_ref *);
8906 static inline bool equal (const external_ref *, const external_ref *);
8907 };
8908
8909 inline hashval_t
8910 external_ref_hasher::hash (const external_ref *r)
8911 {
8912 dw_die_ref die = r->type;
8913 hashval_t h = 0;
8914
8915 /* We can't use the address of the DIE for hashing, because
8916 that will make the order of the stub DIEs non-deterministic. */
8917 if (! die->comdat_type_p)
8918 /* We have a symbol; use it to compute a hash. */
8919 h = htab_hash_string (die->die_id.die_symbol);
8920 else
8921 {
8922 /* We have a type signature; use a subset of the bits as the hash.
8923 The 8-byte signature is at least as large as hashval_t. */
8924 comdat_type_node *type_node = die->die_id.die_type_node;
8925 memcpy (&h, type_node->signature, sizeof (h));
8926 }
8927 return h;
8928 }
8929
8930 inline bool
8931 external_ref_hasher::equal (const external_ref *r1, const external_ref *r2)
8932 {
8933 return r1->type == r2->type;
8934 }
8935
8936 typedef hash_table<external_ref_hasher> external_ref_hash_type;
8937
8938 /* Return a pointer to the external_ref for references to DIE. */
8939
8940 static struct external_ref *
8941 lookup_external_ref (external_ref_hash_type *map, dw_die_ref die)
8942 {
8943 struct external_ref ref, *ref_p;
8944 external_ref **slot;
8945
8946 ref.type = die;
8947 slot = map->find_slot (&ref, INSERT);
8948 if (*slot != HTAB_EMPTY_ENTRY)
8949 return *slot;
8950
8951 ref_p = XCNEW (struct external_ref);
8952 ref_p->type = die;
8953 *slot = ref_p;
8954 return ref_p;
8955 }
8956
8957 /* Subroutine of optimize_external_refs, below.
8958
8959 If we see a type skeleton, record it as our stub. If we see external
8960 references, remember how many we've seen. */
8961
8962 static void
8963 optimize_external_refs_1 (dw_die_ref die, external_ref_hash_type *map)
8964 {
8965 dw_die_ref c;
8966 dw_attr_node *a;
8967 unsigned ix;
8968 struct external_ref *ref_p;
8969
8970 if (is_type_die (die)
8971 && (c = get_AT_ref (die, DW_AT_signature)))
8972 {
8973 /* This is a local skeleton; use it for local references. */
8974 ref_p = lookup_external_ref (map, c);
8975 ref_p->stub = die;
8976 }
8977
8978 /* Scan the DIE references, and remember any that refer to DIEs from
8979 other CUs (i.e. those which are not marked). */
8980 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8981 if (AT_class (a) == dw_val_class_die_ref
8982 && (c = AT_ref (a))->die_mark == 0
8983 && is_type_die (c))
8984 {
8985 ref_p = lookup_external_ref (map, c);
8986 ref_p->n_refs++;
8987 }
8988
8989 FOR_EACH_CHILD (die, c, optimize_external_refs_1 (c, map));
8990 }
8991
8992 /* htab_traverse callback function for optimize_external_refs, below. SLOT
8993 points to an external_ref, DATA is the CU we're processing. If we don't
8994 already have a local stub, and we have multiple refs, build a stub. */
8995
8996 int
8997 dwarf2_build_local_stub (external_ref **slot, dw_die_ref data)
8998 {
8999 struct external_ref *ref_p = *slot;
9000
9001 if (ref_p->stub == NULL && ref_p->n_refs > 1 && !dwarf_strict)
9002 {
9003 /* We have multiple references to this type, so build a small stub.
9004 Both of these forms are a bit dodgy from the perspective of the
9005 DWARF standard, since technically they should have names. */
9006 dw_die_ref cu = data;
9007 dw_die_ref type = ref_p->type;
9008 dw_die_ref stub = NULL;
9009
9010 if (type->comdat_type_p)
9011 {
9012 /* If we refer to this type via sig8, use AT_signature. */
9013 stub = new_die (type->die_tag, cu, NULL_TREE);
9014 add_AT_die_ref (stub, DW_AT_signature, type);
9015 }
9016 else
9017 {
9018 /* Otherwise, use a typedef with no name. */
9019 stub = new_die (DW_TAG_typedef, cu, NULL_TREE);
9020 add_AT_die_ref (stub, DW_AT_type, type);
9021 }
9022
9023 stub->die_mark++;
9024 ref_p->stub = stub;
9025 }
9026 return 1;
9027 }
9028
9029 /* DIE is a unit; look through all the DIE references to see if there are
9030 any external references to types, and if so, create local stubs for
9031 them which will be applied in build_abbrev_table. This is useful because
9032 references to local DIEs are smaller. */
9033
9034 static external_ref_hash_type *
9035 optimize_external_refs (dw_die_ref die)
9036 {
9037 external_ref_hash_type *map = new external_ref_hash_type (10);
9038 optimize_external_refs_1 (die, map);
9039 map->traverse <dw_die_ref, dwarf2_build_local_stub> (die);
9040 return map;
9041 }
9042
9043 /* The following 3 variables are temporaries that are computed only during the
9044 build_abbrev_table call and used and released during the following
9045 optimize_abbrev_table call. */
9046
9047 /* First abbrev_id that can be optimized based on usage. */
9048 static unsigned int abbrev_opt_start;
9049
9050 /* Maximum abbrev_id of a base type plus one (we can't optimize DIEs with
9051 abbrev_id smaller than this, because they must be already sized
9052 during build_abbrev_table). */
9053 static unsigned int abbrev_opt_base_type_end;
9054
9055 /* Vector of usage counts during build_abbrev_table. Indexed by
9056 abbrev_id - abbrev_opt_start. */
9057 static vec<unsigned int> abbrev_usage_count;
9058
9059 /* Vector of all DIEs added with die_abbrev >= abbrev_opt_start. */
9060 static vec<dw_die_ref> sorted_abbrev_dies;
9061
9062 /* The format of each DIE (and its attribute value pairs) is encoded in an
9063 abbreviation table. This routine builds the abbreviation table and assigns
9064 a unique abbreviation id for each abbreviation entry. The children of each
9065 die are visited recursively. */
9066
9067 static void
9068 build_abbrev_table (dw_die_ref die, external_ref_hash_type *extern_map)
9069 {
9070 unsigned int abbrev_id = 0;
9071 dw_die_ref c;
9072 dw_attr_node *a;
9073 unsigned ix;
9074 dw_die_ref abbrev;
9075
9076 /* Scan the DIE references, and replace any that refer to
9077 DIEs from other CUs (i.e. those which are not marked) with
9078 the local stubs we built in optimize_external_refs. */
9079 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9080 if (AT_class (a) == dw_val_class_die_ref
9081 && (c = AT_ref (a))->die_mark == 0)
9082 {
9083 struct external_ref *ref_p;
9084 gcc_assert (AT_ref (a)->comdat_type_p || AT_ref (a)->die_id.die_symbol);
9085
9086 if (is_type_die (c)
9087 && (ref_p = lookup_external_ref (extern_map, c))
9088 && ref_p->stub && ref_p->stub != die)
9089 {
9090 gcc_assert (a->dw_attr != DW_AT_signature);
9091 change_AT_die_ref (a, ref_p->stub);
9092 }
9093 else
9094 /* We aren't changing this reference, so mark it external. */
9095 set_AT_ref_external (a, 1);
9096 }
9097
9098 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
9099 {
9100 dw_attr_node *die_a, *abbrev_a;
9101 unsigned ix;
9102 bool ok = true;
9103
9104 if (abbrev_id == 0)
9105 continue;
9106 if (abbrev->die_tag != die->die_tag)
9107 continue;
9108 if ((abbrev->die_child != NULL) != (die->die_child != NULL))
9109 continue;
9110
9111 if (vec_safe_length (abbrev->die_attr) != vec_safe_length (die->die_attr))
9112 continue;
9113
9114 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, die_a)
9115 {
9116 abbrev_a = &(*abbrev->die_attr)[ix];
9117 if ((abbrev_a->dw_attr != die_a->dw_attr)
9118 || (value_format (abbrev_a) != value_format (die_a)))
9119 {
9120 ok = false;
9121 break;
9122 }
9123 }
9124 if (ok)
9125 break;
9126 }
9127
9128 if (abbrev_id >= vec_safe_length (abbrev_die_table))
9129 {
9130 vec_safe_push (abbrev_die_table, die);
9131 if (abbrev_opt_start)
9132 abbrev_usage_count.safe_push (0);
9133 }
9134 if (abbrev_opt_start && abbrev_id >= abbrev_opt_start)
9135 {
9136 abbrev_usage_count[abbrev_id - abbrev_opt_start]++;
9137 sorted_abbrev_dies.safe_push (die);
9138 }
9139
9140 die->die_abbrev = abbrev_id;
9141 FOR_EACH_CHILD (die, c, build_abbrev_table (c, extern_map));
9142 }
9143
9144 /* Callback function for sorted_abbrev_dies vector sorting. We sort
9145 by die_abbrev's usage count, from the most commonly used
9146 abbreviation to the least. */
9147
9148 static int
9149 die_abbrev_cmp (const void *p1, const void *p2)
9150 {
9151 dw_die_ref die1 = *(const dw_die_ref *) p1;
9152 dw_die_ref die2 = *(const dw_die_ref *) p2;
9153
9154 gcc_checking_assert (die1->die_abbrev >= abbrev_opt_start);
9155 gcc_checking_assert (die2->die_abbrev >= abbrev_opt_start);
9156
9157 if (die1->die_abbrev >= abbrev_opt_base_type_end
9158 && die2->die_abbrev >= abbrev_opt_base_type_end)
9159 {
9160 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9161 > abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9162 return -1;
9163 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9164 < abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9165 return 1;
9166 }
9167
9168 /* Stabilize the sort. */
9169 if (die1->die_abbrev < die2->die_abbrev)
9170 return -1;
9171 if (die1->die_abbrev > die2->die_abbrev)
9172 return 1;
9173
9174 return 0;
9175 }
9176
9177 /* Convert dw_val_class_const and dw_val_class_unsigned_const class attributes
9178 of DIEs in between sorted_abbrev_dies[first_id] and abbrev_dies[end_id - 1]
9179 into dw_val_class_const_implicit or
9180 dw_val_class_unsigned_const_implicit. */
9181
9182 static void
9183 optimize_implicit_const (unsigned int first_id, unsigned int end,
9184 vec<bool> &implicit_consts)
9185 {
9186 /* It never makes sense if there is just one DIE using the abbreviation. */
9187 if (end < first_id + 2)
9188 return;
9189
9190 dw_attr_node *a;
9191 unsigned ix, i;
9192 dw_die_ref die = sorted_abbrev_dies[first_id];
9193 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9194 if (implicit_consts[ix])
9195 {
9196 enum dw_val_class new_class = dw_val_class_none;
9197 switch (AT_class (a))
9198 {
9199 case dw_val_class_unsigned_const:
9200 if ((HOST_WIDE_INT) AT_unsigned (a) < 0)
9201 continue;
9202
9203 /* The .debug_abbrev section will grow by
9204 size_of_sleb128 (AT_unsigned (a)) and we avoid the constants
9205 in all the DIEs using that abbreviation. */
9206 if (constant_size (AT_unsigned (a)) * (end - first_id)
9207 <= (unsigned) size_of_sleb128 (AT_unsigned (a)))
9208 continue;
9209
9210 new_class = dw_val_class_unsigned_const_implicit;
9211 break;
9212
9213 case dw_val_class_const:
9214 new_class = dw_val_class_const_implicit;
9215 break;
9216
9217 case dw_val_class_file:
9218 new_class = dw_val_class_file_implicit;
9219 break;
9220
9221 default:
9222 continue;
9223 }
9224 for (i = first_id; i < end; i++)
9225 (*sorted_abbrev_dies[i]->die_attr)[ix].dw_attr_val.val_class
9226 = new_class;
9227 }
9228 }
9229
9230 /* Attempt to optimize abbreviation table from abbrev_opt_start
9231 abbreviation above. */
9232
9233 static void
9234 optimize_abbrev_table (void)
9235 {
9236 if (abbrev_opt_start
9237 && vec_safe_length (abbrev_die_table) > abbrev_opt_start
9238 && (dwarf_version >= 5 || vec_safe_length (abbrev_die_table) > 127))
9239 {
9240 auto_vec<bool, 32> implicit_consts;
9241 sorted_abbrev_dies.qsort (die_abbrev_cmp);
9242
9243 unsigned int abbrev_id = abbrev_opt_start - 1;
9244 unsigned int first_id = ~0U;
9245 unsigned int last_abbrev_id = 0;
9246 unsigned int i;
9247 dw_die_ref die;
9248 if (abbrev_opt_base_type_end > abbrev_opt_start)
9249 abbrev_id = abbrev_opt_base_type_end - 1;
9250 /* Reassign abbreviation ids from abbrev_opt_start above, so that
9251 most commonly used abbreviations come first. */
9252 FOR_EACH_VEC_ELT (sorted_abbrev_dies, i, die)
9253 {
9254 dw_attr_node *a;
9255 unsigned ix;
9256
9257 /* If calc_base_type_die_sizes has been called, the CU and
9258 base types after it can't be optimized, because we've already
9259 calculated their DIE offsets. We've sorted them first. */
9260 if (die->die_abbrev < abbrev_opt_base_type_end)
9261 continue;
9262 if (die->die_abbrev != last_abbrev_id)
9263 {
9264 last_abbrev_id = die->die_abbrev;
9265 if (dwarf_version >= 5 && first_id != ~0U)
9266 optimize_implicit_const (first_id, i, implicit_consts);
9267 abbrev_id++;
9268 (*abbrev_die_table)[abbrev_id] = die;
9269 if (dwarf_version >= 5)
9270 {
9271 first_id = i;
9272 implicit_consts.truncate (0);
9273
9274 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9275 switch (AT_class (a))
9276 {
9277 case dw_val_class_const:
9278 case dw_val_class_unsigned_const:
9279 case dw_val_class_file:
9280 implicit_consts.safe_push (true);
9281 break;
9282 default:
9283 implicit_consts.safe_push (false);
9284 break;
9285 }
9286 }
9287 }
9288 else if (dwarf_version >= 5)
9289 {
9290 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9291 if (!implicit_consts[ix])
9292 continue;
9293 else
9294 {
9295 dw_attr_node *other_a
9296 = &(*(*abbrev_die_table)[abbrev_id]->die_attr)[ix];
9297 if (!dw_val_equal_p (&a->dw_attr_val,
9298 &other_a->dw_attr_val))
9299 implicit_consts[ix] = false;
9300 }
9301 }
9302 die->die_abbrev = abbrev_id;
9303 }
9304 gcc_assert (abbrev_id == vec_safe_length (abbrev_die_table) - 1);
9305 if (dwarf_version >= 5 && first_id != ~0U)
9306 optimize_implicit_const (first_id, i, implicit_consts);
9307 }
9308
9309 abbrev_opt_start = 0;
9310 abbrev_opt_base_type_end = 0;
9311 abbrev_usage_count.release ();
9312 sorted_abbrev_dies.release ();
9313 }
9314 \f
9315 /* Return the power-of-two number of bytes necessary to represent VALUE. */
9316
9317 static int
9318 constant_size (unsigned HOST_WIDE_INT value)
9319 {
9320 int log;
9321
9322 if (value == 0)
9323 log = 0;
9324 else
9325 log = floor_log2 (value);
9326
9327 log = log / 8;
9328 log = 1 << (floor_log2 (log) + 1);
9329
9330 return log;
9331 }
9332
9333 /* Return the size of a DIE as it is represented in the
9334 .debug_info section. */
9335
9336 static unsigned long
9337 size_of_die (dw_die_ref die)
9338 {
9339 unsigned long size = 0;
9340 dw_attr_node *a;
9341 unsigned ix;
9342 enum dwarf_form form;
9343
9344 size += size_of_uleb128 (die->die_abbrev);
9345 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9346 {
9347 switch (AT_class (a))
9348 {
9349 case dw_val_class_addr:
9350 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9351 {
9352 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9353 size += size_of_uleb128 (AT_index (a));
9354 }
9355 else
9356 size += DWARF2_ADDR_SIZE;
9357 break;
9358 case dw_val_class_offset:
9359 size += DWARF_OFFSET_SIZE;
9360 break;
9361 case dw_val_class_loc:
9362 {
9363 unsigned long lsize = size_of_locs (AT_loc (a));
9364
9365 /* Block length. */
9366 if (dwarf_version >= 4)
9367 size += size_of_uleb128 (lsize);
9368 else
9369 size += constant_size (lsize);
9370 size += lsize;
9371 }
9372 break;
9373 case dw_val_class_loc_list:
9374 if (dwarf_split_debug_info && dwarf_version >= 5)
9375 {
9376 gcc_assert (AT_loc_list (a)->num_assigned);
9377 size += size_of_uleb128 (AT_loc_list (a)->hash);
9378 }
9379 else
9380 size += DWARF_OFFSET_SIZE;
9381 break;
9382 case dw_val_class_view_list:
9383 size += DWARF_OFFSET_SIZE;
9384 break;
9385 case dw_val_class_range_list:
9386 if (value_format (a) == DW_FORM_rnglistx)
9387 {
9388 gcc_assert (rnglist_idx);
9389 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
9390 size += size_of_uleb128 (r->idx);
9391 }
9392 else
9393 size += DWARF_OFFSET_SIZE;
9394 break;
9395 case dw_val_class_const:
9396 size += size_of_sleb128 (AT_int (a));
9397 break;
9398 case dw_val_class_unsigned_const:
9399 {
9400 int csize = constant_size (AT_unsigned (a));
9401 if (dwarf_version == 3
9402 && a->dw_attr == DW_AT_data_member_location
9403 && csize >= 4)
9404 size += size_of_uleb128 (AT_unsigned (a));
9405 else
9406 size += csize;
9407 }
9408 break;
9409 case dw_val_class_symview:
9410 if (symview_upper_bound <= 0xff)
9411 size += 1;
9412 else if (symview_upper_bound <= 0xffff)
9413 size += 2;
9414 else if (symview_upper_bound <= 0xffffffff)
9415 size += 4;
9416 else
9417 size += 8;
9418 break;
9419 case dw_val_class_const_implicit:
9420 case dw_val_class_unsigned_const_implicit:
9421 case dw_val_class_file_implicit:
9422 /* These occupy no size in the DIE, just an extra sleb128 in
9423 .debug_abbrev. */
9424 break;
9425 case dw_val_class_const_double:
9426 size += HOST_BITS_PER_DOUBLE_INT / HOST_BITS_PER_CHAR;
9427 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
9428 size++; /* block */
9429 break;
9430 case dw_val_class_wide_int:
9431 size += (get_full_len (*a->dw_attr_val.v.val_wide)
9432 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
9433 if (get_full_len (*a->dw_attr_val.v.val_wide)
9434 * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
9435 size++; /* block */
9436 break;
9437 case dw_val_class_vec:
9438 size += constant_size (a->dw_attr_val.v.val_vec.length
9439 * a->dw_attr_val.v.val_vec.elt_size)
9440 + a->dw_attr_val.v.val_vec.length
9441 * a->dw_attr_val.v.val_vec.elt_size; /* block */
9442 break;
9443 case dw_val_class_flag:
9444 if (dwarf_version >= 4)
9445 /* Currently all add_AT_flag calls pass in 1 as last argument,
9446 so DW_FORM_flag_present can be used. If that ever changes,
9447 we'll need to use DW_FORM_flag and have some optimization
9448 in build_abbrev_table that will change those to
9449 DW_FORM_flag_present if it is set to 1 in all DIEs using
9450 the same abbrev entry. */
9451 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9452 else
9453 size += 1;
9454 break;
9455 case dw_val_class_die_ref:
9456 if (AT_ref_external (a))
9457 {
9458 /* In DWARF4, we use DW_FORM_ref_sig8; for earlier versions
9459 we use DW_FORM_ref_addr. In DWARF2, DW_FORM_ref_addr
9460 is sized by target address length, whereas in DWARF3
9461 it's always sized as an offset. */
9462 if (AT_ref (a)->comdat_type_p)
9463 size += DWARF_TYPE_SIGNATURE_SIZE;
9464 else if (dwarf_version == 2)
9465 size += DWARF2_ADDR_SIZE;
9466 else
9467 size += DWARF_OFFSET_SIZE;
9468 }
9469 else
9470 size += DWARF_OFFSET_SIZE;
9471 break;
9472 case dw_val_class_fde_ref:
9473 size += DWARF_OFFSET_SIZE;
9474 break;
9475 case dw_val_class_lbl_id:
9476 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9477 {
9478 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9479 size += size_of_uleb128 (AT_index (a));
9480 }
9481 else
9482 size += DWARF2_ADDR_SIZE;
9483 break;
9484 case dw_val_class_lineptr:
9485 case dw_val_class_macptr:
9486 case dw_val_class_loclistsptr:
9487 size += DWARF_OFFSET_SIZE;
9488 break;
9489 case dw_val_class_str:
9490 form = AT_string_form (a);
9491 if (form == DW_FORM_strp || form == DW_FORM_line_strp)
9492 size += DWARF_OFFSET_SIZE;
9493 else if (form == dwarf_FORM (DW_FORM_strx))
9494 size += size_of_uleb128 (AT_index (a));
9495 else
9496 size += strlen (a->dw_attr_val.v.val_str->str) + 1;
9497 break;
9498 case dw_val_class_file:
9499 size += constant_size (maybe_emit_file (a->dw_attr_val.v.val_file));
9500 break;
9501 case dw_val_class_data8:
9502 size += 8;
9503 break;
9504 case dw_val_class_vms_delta:
9505 size += DWARF_OFFSET_SIZE;
9506 break;
9507 case dw_val_class_high_pc:
9508 size += DWARF2_ADDR_SIZE;
9509 break;
9510 case dw_val_class_discr_value:
9511 size += size_of_discr_value (&a->dw_attr_val.v.val_discr_value);
9512 break;
9513 case dw_val_class_discr_list:
9514 {
9515 unsigned block_size = size_of_discr_list (AT_discr_list (a));
9516
9517 /* This is a block, so we have the block length and then its
9518 data. */
9519 size += constant_size (block_size) + block_size;
9520 }
9521 break;
9522 default:
9523 gcc_unreachable ();
9524 }
9525 }
9526
9527 return size;
9528 }
9529
9530 /* Size the debugging information associated with a given DIE. Visits the
9531 DIE's children recursively. Updates the global variable next_die_offset, on
9532 each time through. Uses the current value of next_die_offset to update the
9533 die_offset field in each DIE. */
9534
9535 static void
9536 calc_die_sizes (dw_die_ref die)
9537 {
9538 dw_die_ref c;
9539
9540 gcc_assert (die->die_offset == 0
9541 || (unsigned long int) die->die_offset == next_die_offset);
9542 die->die_offset = next_die_offset;
9543 next_die_offset += size_of_die (die);
9544
9545 FOR_EACH_CHILD (die, c, calc_die_sizes (c));
9546
9547 if (die->die_child != NULL)
9548 /* Count the null byte used to terminate sibling lists. */
9549 next_die_offset += 1;
9550 }
9551
9552 /* Size just the base type children at the start of the CU.
9553 This is needed because build_abbrev needs to size locs
9554 and sizing of type based stack ops needs to know die_offset
9555 values for the base types. */
9556
9557 static void
9558 calc_base_type_die_sizes (void)
9559 {
9560 unsigned long die_offset = (dwarf_split_debug_info
9561 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
9562 : DWARF_COMPILE_UNIT_HEADER_SIZE);
9563 unsigned int i;
9564 dw_die_ref base_type;
9565 #if ENABLE_ASSERT_CHECKING
9566 dw_die_ref prev = comp_unit_die ()->die_child;
9567 #endif
9568
9569 die_offset += size_of_die (comp_unit_die ());
9570 for (i = 0; base_types.iterate (i, &base_type); i++)
9571 {
9572 #if ENABLE_ASSERT_CHECKING
9573 gcc_assert (base_type->die_offset == 0
9574 && prev->die_sib == base_type
9575 && base_type->die_child == NULL
9576 && base_type->die_abbrev);
9577 prev = base_type;
9578 #endif
9579 if (abbrev_opt_start
9580 && base_type->die_abbrev >= abbrev_opt_base_type_end)
9581 abbrev_opt_base_type_end = base_type->die_abbrev + 1;
9582 base_type->die_offset = die_offset;
9583 die_offset += size_of_die (base_type);
9584 }
9585 }
9586
9587 /* Set the marks for a die and its children. We do this so
9588 that we know whether or not a reference needs to use FORM_ref_addr; only
9589 DIEs in the same CU will be marked. We used to clear out the offset
9590 and use that as the flag, but ran into ordering problems. */
9591
9592 static void
9593 mark_dies (dw_die_ref die)
9594 {
9595 dw_die_ref c;
9596
9597 gcc_assert (!die->die_mark);
9598
9599 die->die_mark = 1;
9600 FOR_EACH_CHILD (die, c, mark_dies (c));
9601 }
9602
9603 /* Clear the marks for a die and its children. */
9604
9605 static void
9606 unmark_dies (dw_die_ref die)
9607 {
9608 dw_die_ref c;
9609
9610 if (! use_debug_types)
9611 gcc_assert (die->die_mark);
9612
9613 die->die_mark = 0;
9614 FOR_EACH_CHILD (die, c, unmark_dies (c));
9615 }
9616
9617 /* Clear the marks for a die, its children and referred dies. */
9618
9619 static void
9620 unmark_all_dies (dw_die_ref die)
9621 {
9622 dw_die_ref c;
9623 dw_attr_node *a;
9624 unsigned ix;
9625
9626 if (!die->die_mark)
9627 return;
9628 die->die_mark = 0;
9629
9630 FOR_EACH_CHILD (die, c, unmark_all_dies (c));
9631
9632 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9633 if (AT_class (a) == dw_val_class_die_ref)
9634 unmark_all_dies (AT_ref (a));
9635 }
9636
9637 /* Calculate if the entry should appear in the final output file. It may be
9638 from a pruned a type. */
9639
9640 static bool
9641 include_pubname_in_output (vec<pubname_entry, va_gc> *table, pubname_entry *p)
9642 {
9643 /* By limiting gnu pubnames to definitions only, gold can generate a
9644 gdb index without entries for declarations, which don't include
9645 enough information to be useful. */
9646 if (debug_generate_pub_sections == 2 && is_declaration_die (p->die))
9647 return false;
9648
9649 if (table == pubname_table)
9650 {
9651 /* Enumerator names are part of the pubname table, but the
9652 parent DW_TAG_enumeration_type die may have been pruned.
9653 Don't output them if that is the case. */
9654 if (p->die->die_tag == DW_TAG_enumerator &&
9655 (p->die->die_parent == NULL
9656 || !p->die->die_parent->die_perennial_p))
9657 return false;
9658
9659 /* Everything else in the pubname table is included. */
9660 return true;
9661 }
9662
9663 /* The pubtypes table shouldn't include types that have been
9664 pruned. */
9665 return (p->die->die_offset != 0
9666 || !flag_eliminate_unused_debug_types);
9667 }
9668
9669 /* Return the size of the .debug_pubnames or .debug_pubtypes table
9670 generated for the compilation unit. */
9671
9672 static unsigned long
9673 size_of_pubnames (vec<pubname_entry, va_gc> *names)
9674 {
9675 unsigned long size;
9676 unsigned i;
9677 pubname_entry *p;
9678 int space_for_flags = (debug_generate_pub_sections == 2) ? 1 : 0;
9679
9680 size = DWARF_PUBNAMES_HEADER_SIZE;
9681 FOR_EACH_VEC_ELT (*names, i, p)
9682 if (include_pubname_in_output (names, p))
9683 size += strlen (p->name) + DWARF_OFFSET_SIZE + 1 + space_for_flags;
9684
9685 size += DWARF_OFFSET_SIZE;
9686 return size;
9687 }
9688
9689 /* Return the size of the information in the .debug_aranges section. */
9690
9691 static unsigned long
9692 size_of_aranges (void)
9693 {
9694 unsigned long size;
9695
9696 size = DWARF_ARANGES_HEADER_SIZE;
9697
9698 /* Count the address/length pair for this compilation unit. */
9699 if (text_section_used)
9700 size += 2 * DWARF2_ADDR_SIZE;
9701 if (cold_text_section_used)
9702 size += 2 * DWARF2_ADDR_SIZE;
9703 if (have_multiple_function_sections)
9704 {
9705 unsigned fde_idx;
9706 dw_fde_ref fde;
9707
9708 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
9709 {
9710 if (DECL_IGNORED_P (fde->decl))
9711 continue;
9712 if (!fde->in_std_section)
9713 size += 2 * DWARF2_ADDR_SIZE;
9714 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
9715 size += 2 * DWARF2_ADDR_SIZE;
9716 }
9717 }
9718
9719 /* Count the two zero words used to terminated the address range table. */
9720 size += 2 * DWARF2_ADDR_SIZE;
9721 return size;
9722 }
9723 \f
9724 /* Select the encoding of an attribute value. */
9725
9726 static enum dwarf_form
9727 value_format (dw_attr_node *a)
9728 {
9729 switch (AT_class (a))
9730 {
9731 case dw_val_class_addr:
9732 /* Only very few attributes allow DW_FORM_addr. */
9733 switch (a->dw_attr)
9734 {
9735 case DW_AT_low_pc:
9736 case DW_AT_high_pc:
9737 case DW_AT_entry_pc:
9738 case DW_AT_trampoline:
9739 return (AT_index (a) == NOT_INDEXED
9740 ? DW_FORM_addr : dwarf_FORM (DW_FORM_addrx));
9741 default:
9742 break;
9743 }
9744 switch (DWARF2_ADDR_SIZE)
9745 {
9746 case 1:
9747 return DW_FORM_data1;
9748 case 2:
9749 return DW_FORM_data2;
9750 case 4:
9751 return DW_FORM_data4;
9752 case 8:
9753 return DW_FORM_data8;
9754 default:
9755 gcc_unreachable ();
9756 }
9757 case dw_val_class_loc_list:
9758 if (dwarf_split_debug_info
9759 && dwarf_version >= 5
9760 && AT_loc_list (a)->num_assigned)
9761 return DW_FORM_loclistx;
9762 /* FALLTHRU */
9763 case dw_val_class_view_list:
9764 case dw_val_class_range_list:
9765 /* For range lists in DWARF 5, use DW_FORM_rnglistx from .debug_info.dwo
9766 but in .debug_info use DW_FORM_sec_offset, which is shorter if we
9767 care about sizes of .debug* sections in shared libraries and
9768 executables and don't take into account relocations that affect just
9769 relocatable objects - for DW_FORM_rnglistx we'd have to emit offset
9770 table in the .debug_rnglists section. */
9771 if (dwarf_split_debug_info
9772 && dwarf_version >= 5
9773 && AT_class (a) == dw_val_class_range_list
9774 && rnglist_idx
9775 && a->dw_attr_val.val_entry != RELOCATED_OFFSET)
9776 return DW_FORM_rnglistx;
9777 if (dwarf_version >= 4)
9778 return DW_FORM_sec_offset;
9779 /* FALLTHRU */
9780 case dw_val_class_vms_delta:
9781 case dw_val_class_offset:
9782 switch (DWARF_OFFSET_SIZE)
9783 {
9784 case 4:
9785 return DW_FORM_data4;
9786 case 8:
9787 return DW_FORM_data8;
9788 default:
9789 gcc_unreachable ();
9790 }
9791 case dw_val_class_loc:
9792 if (dwarf_version >= 4)
9793 return DW_FORM_exprloc;
9794 switch (constant_size (size_of_locs (AT_loc (a))))
9795 {
9796 case 1:
9797 return DW_FORM_block1;
9798 case 2:
9799 return DW_FORM_block2;
9800 case 4:
9801 return DW_FORM_block4;
9802 default:
9803 gcc_unreachable ();
9804 }
9805 case dw_val_class_const:
9806 return DW_FORM_sdata;
9807 case dw_val_class_unsigned_const:
9808 switch (constant_size (AT_unsigned (a)))
9809 {
9810 case 1:
9811 return DW_FORM_data1;
9812 case 2:
9813 return DW_FORM_data2;
9814 case 4:
9815 /* In DWARF3 DW_AT_data_member_location with
9816 DW_FORM_data4 or DW_FORM_data8 is a loclistptr, not
9817 constant, so we need to use DW_FORM_udata if we need
9818 a large constant. */
9819 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9820 return DW_FORM_udata;
9821 return DW_FORM_data4;
9822 case 8:
9823 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9824 return DW_FORM_udata;
9825 return DW_FORM_data8;
9826 default:
9827 gcc_unreachable ();
9828 }
9829 case dw_val_class_const_implicit:
9830 case dw_val_class_unsigned_const_implicit:
9831 case dw_val_class_file_implicit:
9832 return DW_FORM_implicit_const;
9833 case dw_val_class_const_double:
9834 switch (HOST_BITS_PER_WIDE_INT)
9835 {
9836 case 8:
9837 return DW_FORM_data2;
9838 case 16:
9839 return DW_FORM_data4;
9840 case 32:
9841 return DW_FORM_data8;
9842 case 64:
9843 if (dwarf_version >= 5)
9844 return DW_FORM_data16;
9845 /* FALLTHRU */
9846 default:
9847 return DW_FORM_block1;
9848 }
9849 case dw_val_class_wide_int:
9850 switch (get_full_len (*a->dw_attr_val.v.val_wide) * HOST_BITS_PER_WIDE_INT)
9851 {
9852 case 8:
9853 return DW_FORM_data1;
9854 case 16:
9855 return DW_FORM_data2;
9856 case 32:
9857 return DW_FORM_data4;
9858 case 64:
9859 return DW_FORM_data8;
9860 case 128:
9861 if (dwarf_version >= 5)
9862 return DW_FORM_data16;
9863 /* FALLTHRU */
9864 default:
9865 return DW_FORM_block1;
9866 }
9867 case dw_val_class_symview:
9868 /* ??? We might use uleb128, but then we'd have to compute
9869 .debug_info offsets in the assembler. */
9870 if (symview_upper_bound <= 0xff)
9871 return DW_FORM_data1;
9872 else if (symview_upper_bound <= 0xffff)
9873 return DW_FORM_data2;
9874 else if (symview_upper_bound <= 0xffffffff)
9875 return DW_FORM_data4;
9876 else
9877 return DW_FORM_data8;
9878 case dw_val_class_vec:
9879 switch (constant_size (a->dw_attr_val.v.val_vec.length
9880 * a->dw_attr_val.v.val_vec.elt_size))
9881 {
9882 case 1:
9883 return DW_FORM_block1;
9884 case 2:
9885 return DW_FORM_block2;
9886 case 4:
9887 return DW_FORM_block4;
9888 default:
9889 gcc_unreachable ();
9890 }
9891 case dw_val_class_flag:
9892 if (dwarf_version >= 4)
9893 {
9894 /* Currently all add_AT_flag calls pass in 1 as last argument,
9895 so DW_FORM_flag_present can be used. If that ever changes,
9896 we'll need to use DW_FORM_flag and have some optimization
9897 in build_abbrev_table that will change those to
9898 DW_FORM_flag_present if it is set to 1 in all DIEs using
9899 the same abbrev entry. */
9900 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9901 return DW_FORM_flag_present;
9902 }
9903 return DW_FORM_flag;
9904 case dw_val_class_die_ref:
9905 if (AT_ref_external (a))
9906 {
9907 if (AT_ref (a)->comdat_type_p)
9908 return DW_FORM_ref_sig8;
9909 else
9910 return DW_FORM_ref_addr;
9911 }
9912 else
9913 return DW_FORM_ref;
9914 case dw_val_class_fde_ref:
9915 return DW_FORM_data;
9916 case dw_val_class_lbl_id:
9917 return (AT_index (a) == NOT_INDEXED
9918 ? DW_FORM_addr : dwarf_FORM (DW_FORM_addrx));
9919 case dw_val_class_lineptr:
9920 case dw_val_class_macptr:
9921 case dw_val_class_loclistsptr:
9922 return dwarf_version >= 4 ? DW_FORM_sec_offset : DW_FORM_data;
9923 case dw_val_class_str:
9924 return AT_string_form (a);
9925 case dw_val_class_file:
9926 switch (constant_size (maybe_emit_file (a->dw_attr_val.v.val_file)))
9927 {
9928 case 1:
9929 return DW_FORM_data1;
9930 case 2:
9931 return DW_FORM_data2;
9932 case 4:
9933 return DW_FORM_data4;
9934 default:
9935 gcc_unreachable ();
9936 }
9937
9938 case dw_val_class_data8:
9939 return DW_FORM_data8;
9940
9941 case dw_val_class_high_pc:
9942 switch (DWARF2_ADDR_SIZE)
9943 {
9944 case 1:
9945 return DW_FORM_data1;
9946 case 2:
9947 return DW_FORM_data2;
9948 case 4:
9949 return DW_FORM_data4;
9950 case 8:
9951 return DW_FORM_data8;
9952 default:
9953 gcc_unreachable ();
9954 }
9955
9956 case dw_val_class_discr_value:
9957 return (a->dw_attr_val.v.val_discr_value.pos
9958 ? DW_FORM_udata
9959 : DW_FORM_sdata);
9960 case dw_val_class_discr_list:
9961 switch (constant_size (size_of_discr_list (AT_discr_list (a))))
9962 {
9963 case 1:
9964 return DW_FORM_block1;
9965 case 2:
9966 return DW_FORM_block2;
9967 case 4:
9968 return DW_FORM_block4;
9969 default:
9970 gcc_unreachable ();
9971 }
9972
9973 default:
9974 gcc_unreachable ();
9975 }
9976 }
9977
9978 /* Output the encoding of an attribute value. */
9979
9980 static void
9981 output_value_format (dw_attr_node *a)
9982 {
9983 enum dwarf_form form = value_format (a);
9984
9985 dw2_asm_output_data_uleb128 (form, "(%s)", dwarf_form_name (form));
9986 }
9987
9988 /* Given a die and id, produce the appropriate abbreviations. */
9989
9990 static void
9991 output_die_abbrevs (unsigned long abbrev_id, dw_die_ref abbrev)
9992 {
9993 unsigned ix;
9994 dw_attr_node *a_attr;
9995
9996 dw2_asm_output_data_uleb128 (abbrev_id, "(abbrev code)");
9997 dw2_asm_output_data_uleb128 (abbrev->die_tag, "(TAG: %s)",
9998 dwarf_tag_name (abbrev->die_tag));
9999
10000 if (abbrev->die_child != NULL)
10001 dw2_asm_output_data (1, DW_children_yes, "DW_children_yes");
10002 else
10003 dw2_asm_output_data (1, DW_children_no, "DW_children_no");
10004
10005 for (ix = 0; vec_safe_iterate (abbrev->die_attr, ix, &a_attr); ix++)
10006 {
10007 dw2_asm_output_data_uleb128 (a_attr->dw_attr, "(%s)",
10008 dwarf_attr_name (a_attr->dw_attr));
10009 output_value_format (a_attr);
10010 if (value_format (a_attr) == DW_FORM_implicit_const)
10011 {
10012 if (AT_class (a_attr) == dw_val_class_file_implicit)
10013 {
10014 int f = maybe_emit_file (a_attr->dw_attr_val.v.val_file);
10015 const char *filename = a_attr->dw_attr_val.v.val_file->filename;
10016 dw2_asm_output_data_sleb128 (f, "(%s)", filename);
10017 }
10018 else
10019 dw2_asm_output_data_sleb128 (a_attr->dw_attr_val.v.val_int, NULL);
10020 }
10021 }
10022
10023 dw2_asm_output_data (1, 0, NULL);
10024 dw2_asm_output_data (1, 0, NULL);
10025 }
10026
10027
10028 /* Output the .debug_abbrev section which defines the DIE abbreviation
10029 table. */
10030
10031 static void
10032 output_abbrev_section (void)
10033 {
10034 unsigned int abbrev_id;
10035 dw_die_ref abbrev;
10036
10037 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
10038 if (abbrev_id != 0)
10039 output_die_abbrevs (abbrev_id, abbrev);
10040
10041 /* Terminate the table. */
10042 dw2_asm_output_data (1, 0, NULL);
10043 }
10044
10045 /* Return a new location list, given the begin and end range, and the
10046 expression. */
10047
10048 static inline dw_loc_list_ref
10049 new_loc_list (dw_loc_descr_ref expr, const char *begin, var_loc_view vbegin,
10050 const char *end, var_loc_view vend,
10051 const char *section)
10052 {
10053 dw_loc_list_ref retlist = ggc_cleared_alloc<dw_loc_list_node> ();
10054
10055 retlist->begin = begin;
10056 retlist->begin_entry = NULL;
10057 retlist->end = end;
10058 retlist->expr = expr;
10059 retlist->section = section;
10060 retlist->vbegin = vbegin;
10061 retlist->vend = vend;
10062
10063 return retlist;
10064 }
10065
10066 /* Return true iff there's any nonzero view number in the loc list.
10067
10068 ??? When views are not enabled, we'll often extend a single range
10069 to the entire function, so that we emit a single location
10070 expression rather than a location list. With views, even with a
10071 single range, we'll output a list if start or end have a nonzero
10072 view. If we change this, we may want to stop splitting a single
10073 range in dw_loc_list just because of a nonzero view, even if it
10074 straddles across hot/cold partitions. */
10075
10076 static bool
10077 loc_list_has_views (dw_loc_list_ref list)
10078 {
10079 if (!debug_variable_location_views)
10080 return false;
10081
10082 for (dw_loc_list_ref loc = list;
10083 loc != NULL; loc = loc->dw_loc_next)
10084 if (!ZERO_VIEW_P (loc->vbegin) || !ZERO_VIEW_P (loc->vend))
10085 return true;
10086
10087 return false;
10088 }
10089
10090 /* Generate a new internal symbol for this location list node, if it
10091 hasn't got one yet. */
10092
10093 static inline void
10094 gen_llsym (dw_loc_list_ref list)
10095 {
10096 gcc_assert (!list->ll_symbol);
10097 list->ll_symbol = gen_internal_sym ("LLST");
10098
10099 if (!loc_list_has_views (list))
10100 return;
10101
10102 if (dwarf2out_locviews_in_attribute ())
10103 {
10104 /* Use the same label_num for the view list. */
10105 label_num--;
10106 list->vl_symbol = gen_internal_sym ("LVUS");
10107 }
10108 else
10109 list->vl_symbol = list->ll_symbol;
10110 }
10111
10112 /* Generate a symbol for the list, but only if we really want to emit
10113 it as a list. */
10114
10115 static inline void
10116 maybe_gen_llsym (dw_loc_list_ref list)
10117 {
10118 if (!list || (!list->dw_loc_next && !loc_list_has_views (list)))
10119 return;
10120
10121 gen_llsym (list);
10122 }
10123
10124 /* Determine whether or not to skip loc_list entry CURR. If SIZEP is
10125 NULL, don't consider size of the location expression. If we're not
10126 to skip it, and SIZEP is non-null, store the size of CURR->expr's
10127 representation in *SIZEP. */
10128
10129 static bool
10130 skip_loc_list_entry (dw_loc_list_ref curr, unsigned long *sizep = NULL)
10131 {
10132 /* Don't output an entry that starts and ends at the same address. */
10133 if (strcmp (curr->begin, curr->end) == 0
10134 && curr->vbegin == curr->vend && !curr->force)
10135 return true;
10136
10137 if (!sizep)
10138 return false;
10139
10140 unsigned long size = size_of_locs (curr->expr);
10141
10142 /* If the expression is too large, drop it on the floor. We could
10143 perhaps put it into DW_TAG_dwarf_procedure and refer to that
10144 in the expression, but >= 64KB expressions for a single value
10145 in a single range are unlikely very useful. */
10146 if (dwarf_version < 5 && size > 0xffff)
10147 return true;
10148
10149 *sizep = size;
10150
10151 return false;
10152 }
10153
10154 /* Output a view pair loclist entry for CURR, if it requires one. */
10155
10156 static void
10157 dwarf2out_maybe_output_loclist_view_pair (dw_loc_list_ref curr)
10158 {
10159 if (!dwarf2out_locviews_in_loclist ())
10160 return;
10161
10162 if (ZERO_VIEW_P (curr->vbegin) && ZERO_VIEW_P (curr->vend))
10163 return;
10164
10165 #ifdef DW_LLE_view_pair
10166 dw2_asm_output_data (1, DW_LLE_view_pair, "DW_LLE_view_pair");
10167
10168 if (dwarf2out_as_locview_support)
10169 {
10170 if (ZERO_VIEW_P (curr->vbegin))
10171 dw2_asm_output_data_uleb128 (0, "Location view begin");
10172 else
10173 {
10174 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10175 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10176 dw2_asm_output_symname_uleb128 (label, "Location view begin");
10177 }
10178
10179 if (ZERO_VIEW_P (curr->vend))
10180 dw2_asm_output_data_uleb128 (0, "Location view end");
10181 else
10182 {
10183 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10184 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10185 dw2_asm_output_symname_uleb128 (label, "Location view end");
10186 }
10187 }
10188 else
10189 {
10190 dw2_asm_output_data_uleb128 (curr->vbegin, "Location view begin");
10191 dw2_asm_output_data_uleb128 (curr->vend, "Location view end");
10192 }
10193 #endif /* DW_LLE_view_pair */
10194
10195 return;
10196 }
10197
10198 /* Output the location list given to us. */
10199
10200 static void
10201 output_loc_list (dw_loc_list_ref list_head)
10202 {
10203 int vcount = 0, lcount = 0;
10204
10205 if (list_head->emitted)
10206 return;
10207 list_head->emitted = true;
10208
10209 if (list_head->vl_symbol && dwarf2out_locviews_in_attribute ())
10210 {
10211 ASM_OUTPUT_LABEL (asm_out_file, list_head->vl_symbol);
10212
10213 for (dw_loc_list_ref curr = list_head; curr != NULL;
10214 curr = curr->dw_loc_next)
10215 {
10216 unsigned long size;
10217
10218 if (skip_loc_list_entry (curr, &size))
10219 continue;
10220
10221 vcount++;
10222
10223 /* ?? dwarf_split_debug_info? */
10224 if (dwarf2out_as_locview_support)
10225 {
10226 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10227
10228 if (!ZERO_VIEW_P (curr->vbegin))
10229 {
10230 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10231 dw2_asm_output_symname_uleb128 (label,
10232 "View list begin (%s)",
10233 list_head->vl_symbol);
10234 }
10235 else
10236 dw2_asm_output_data_uleb128 (0,
10237 "View list begin (%s)",
10238 list_head->vl_symbol);
10239
10240 if (!ZERO_VIEW_P (curr->vend))
10241 {
10242 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10243 dw2_asm_output_symname_uleb128 (label,
10244 "View list end (%s)",
10245 list_head->vl_symbol);
10246 }
10247 else
10248 dw2_asm_output_data_uleb128 (0,
10249 "View list end (%s)",
10250 list_head->vl_symbol);
10251 }
10252 else
10253 {
10254 dw2_asm_output_data_uleb128 (curr->vbegin,
10255 "View list begin (%s)",
10256 list_head->vl_symbol);
10257 dw2_asm_output_data_uleb128 (curr->vend,
10258 "View list end (%s)",
10259 list_head->vl_symbol);
10260 }
10261 }
10262 }
10263
10264 ASM_OUTPUT_LABEL (asm_out_file, list_head->ll_symbol);
10265
10266 const char *last_section = NULL;
10267 const char *base_label = NULL;
10268
10269 /* Walk the location list, and output each range + expression. */
10270 for (dw_loc_list_ref curr = list_head; curr != NULL;
10271 curr = curr->dw_loc_next)
10272 {
10273 unsigned long size;
10274
10275 /* Skip this entry? If we skip it here, we must skip it in the
10276 view list above as well. */
10277 if (skip_loc_list_entry (curr, &size))
10278 continue;
10279
10280 lcount++;
10281
10282 if (dwarf_version >= 5)
10283 {
10284 if (dwarf_split_debug_info)
10285 {
10286 dwarf2out_maybe_output_loclist_view_pair (curr);
10287 /* For -gsplit-dwarf, emit DW_LLE_starx_length, which has
10288 uleb128 index into .debug_addr and uleb128 length. */
10289 dw2_asm_output_data (1, DW_LLE_startx_length,
10290 "DW_LLE_startx_length (%s)",
10291 list_head->ll_symbol);
10292 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10293 "Location list range start index "
10294 "(%s)", curr->begin);
10295 /* FIXME: This will ICE ifndef HAVE_AS_LEB128.
10296 For that case we probably need to emit DW_LLE_startx_endx,
10297 but we'd need 2 .debug_addr entries rather than just one. */
10298 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10299 "Location list length (%s)",
10300 list_head->ll_symbol);
10301 }
10302 else if (!have_multiple_function_sections && HAVE_AS_LEB128)
10303 {
10304 dwarf2out_maybe_output_loclist_view_pair (curr);
10305 /* If all code is in .text section, the base address is
10306 already provided by the CU attributes. Use
10307 DW_LLE_offset_pair where both addresses are uleb128 encoded
10308 offsets against that base. */
10309 dw2_asm_output_data (1, DW_LLE_offset_pair,
10310 "DW_LLE_offset_pair (%s)",
10311 list_head->ll_symbol);
10312 dw2_asm_output_delta_uleb128 (curr->begin, curr->section,
10313 "Location list begin address (%s)",
10314 list_head->ll_symbol);
10315 dw2_asm_output_delta_uleb128 (curr->end, curr->section,
10316 "Location list end address (%s)",
10317 list_head->ll_symbol);
10318 }
10319 else if (HAVE_AS_LEB128)
10320 {
10321 /* Otherwise, find out how many consecutive entries could share
10322 the same base entry. If just one, emit DW_LLE_start_length,
10323 otherwise emit DW_LLE_base_address for the base address
10324 followed by a series of DW_LLE_offset_pair. */
10325 if (last_section == NULL || curr->section != last_section)
10326 {
10327 dw_loc_list_ref curr2;
10328 for (curr2 = curr->dw_loc_next; curr2 != NULL;
10329 curr2 = curr2->dw_loc_next)
10330 {
10331 if (strcmp (curr2->begin, curr2->end) == 0
10332 && !curr2->force)
10333 continue;
10334 break;
10335 }
10336 if (curr2 == NULL || curr->section != curr2->section)
10337 last_section = NULL;
10338 else
10339 {
10340 last_section = curr->section;
10341 base_label = curr->begin;
10342 dw2_asm_output_data (1, DW_LLE_base_address,
10343 "DW_LLE_base_address (%s)",
10344 list_head->ll_symbol);
10345 dw2_asm_output_addr (DWARF2_ADDR_SIZE, base_label,
10346 "Base address (%s)",
10347 list_head->ll_symbol);
10348 }
10349 }
10350 /* Only one entry with the same base address. Use
10351 DW_LLE_start_length with absolute address and uleb128
10352 length. */
10353 if (last_section == NULL)
10354 {
10355 dwarf2out_maybe_output_loclist_view_pair (curr);
10356 dw2_asm_output_data (1, DW_LLE_start_length,
10357 "DW_LLE_start_length (%s)",
10358 list_head->ll_symbol);
10359 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10360 "Location list begin address (%s)",
10361 list_head->ll_symbol);
10362 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10363 "Location list length "
10364 "(%s)", list_head->ll_symbol);
10365 }
10366 /* Otherwise emit DW_LLE_offset_pair, relative to above emitted
10367 DW_LLE_base_address. */
10368 else
10369 {
10370 dwarf2out_maybe_output_loclist_view_pair (curr);
10371 dw2_asm_output_data (1, DW_LLE_offset_pair,
10372 "DW_LLE_offset_pair (%s)",
10373 list_head->ll_symbol);
10374 dw2_asm_output_delta_uleb128 (curr->begin, base_label,
10375 "Location list begin address "
10376 "(%s)", list_head->ll_symbol);
10377 dw2_asm_output_delta_uleb128 (curr->end, base_label,
10378 "Location list end address "
10379 "(%s)", list_head->ll_symbol);
10380 }
10381 }
10382 /* The assembler does not support .uleb128 directive. Emit
10383 DW_LLE_start_end with a pair of absolute addresses. */
10384 else
10385 {
10386 dwarf2out_maybe_output_loclist_view_pair (curr);
10387 dw2_asm_output_data (1, DW_LLE_start_end,
10388 "DW_LLE_start_end (%s)",
10389 list_head->ll_symbol);
10390 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10391 "Location list begin address (%s)",
10392 list_head->ll_symbol);
10393 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10394 "Location list end address (%s)",
10395 list_head->ll_symbol);
10396 }
10397 }
10398 else if (dwarf_split_debug_info)
10399 {
10400 /* For -gsplit-dwarf -gdwarf-{2,3,4} emit index into .debug_addr
10401 and 4 byte length. */
10402 dw2_asm_output_data (1, DW_LLE_GNU_start_length_entry,
10403 "Location list start/length entry (%s)",
10404 list_head->ll_symbol);
10405 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10406 "Location list range start index (%s)",
10407 curr->begin);
10408 /* The length field is 4 bytes. If we ever need to support
10409 an 8-byte length, we can add a new DW_LLE code or fall back
10410 to DW_LLE_GNU_start_end_entry. */
10411 dw2_asm_output_delta (4, curr->end, curr->begin,
10412 "Location list range length (%s)",
10413 list_head->ll_symbol);
10414 }
10415 else if (!have_multiple_function_sections)
10416 {
10417 /* Pair of relative addresses against start of text section. */
10418 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->begin, curr->section,
10419 "Location list begin address (%s)",
10420 list_head->ll_symbol);
10421 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->end, curr->section,
10422 "Location list end address (%s)",
10423 list_head->ll_symbol);
10424 }
10425 else
10426 {
10427 /* Pair of absolute addresses. */
10428 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10429 "Location list begin address (%s)",
10430 list_head->ll_symbol);
10431 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10432 "Location list end address (%s)",
10433 list_head->ll_symbol);
10434 }
10435
10436 /* Output the block length for this list of location operations. */
10437 if (dwarf_version >= 5)
10438 dw2_asm_output_data_uleb128 (size, "Location expression size");
10439 else
10440 {
10441 gcc_assert (size <= 0xffff);
10442 dw2_asm_output_data (2, size, "Location expression size");
10443 }
10444
10445 output_loc_sequence (curr->expr, -1);
10446 }
10447
10448 /* And finally list termination. */
10449 if (dwarf_version >= 5)
10450 dw2_asm_output_data (1, DW_LLE_end_of_list,
10451 "DW_LLE_end_of_list (%s)", list_head->ll_symbol);
10452 else if (dwarf_split_debug_info)
10453 dw2_asm_output_data (1, DW_LLE_GNU_end_of_list_entry,
10454 "Location list terminator (%s)",
10455 list_head->ll_symbol);
10456 else
10457 {
10458 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10459 "Location list terminator begin (%s)",
10460 list_head->ll_symbol);
10461 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10462 "Location list terminator end (%s)",
10463 list_head->ll_symbol);
10464 }
10465
10466 gcc_assert (!list_head->vl_symbol
10467 || vcount == lcount * (dwarf2out_locviews_in_attribute () ? 1 : 0));
10468 }
10469
10470 /* Output a range_list offset into the .debug_ranges or .debug_rnglists
10471 section. Emit a relocated reference if val_entry is NULL, otherwise,
10472 emit an indirect reference. */
10473
10474 static void
10475 output_range_list_offset (dw_attr_node *a)
10476 {
10477 const char *name = dwarf_attr_name (a->dw_attr);
10478
10479 if (a->dw_attr_val.val_entry == RELOCATED_OFFSET)
10480 {
10481 if (dwarf_version >= 5)
10482 {
10483 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10484 dw2_asm_output_offset (DWARF_OFFSET_SIZE, r->label,
10485 debug_ranges_section, "%s", name);
10486 }
10487 else
10488 {
10489 char *p = strchr (ranges_section_label, '\0');
10490 sprintf (p, "+" HOST_WIDE_INT_PRINT_HEX,
10491 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE);
10492 dw2_asm_output_offset (DWARF_OFFSET_SIZE, ranges_section_label,
10493 debug_ranges_section, "%s", name);
10494 *p = '\0';
10495 }
10496 }
10497 else if (dwarf_version >= 5)
10498 {
10499 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10500 gcc_assert (rnglist_idx);
10501 dw2_asm_output_data_uleb128 (r->idx, "%s", name);
10502 }
10503 else
10504 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10505 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE,
10506 "%s (offset from %s)", name, ranges_section_label);
10507 }
10508
10509 /* Output the offset into the debug_loc section. */
10510
10511 static void
10512 output_loc_list_offset (dw_attr_node *a)
10513 {
10514 char *sym = AT_loc_list (a)->ll_symbol;
10515
10516 gcc_assert (sym);
10517 if (!dwarf_split_debug_info)
10518 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10519 "%s", dwarf_attr_name (a->dw_attr));
10520 else if (dwarf_version >= 5)
10521 {
10522 gcc_assert (AT_loc_list (a)->num_assigned);
10523 dw2_asm_output_data_uleb128 (AT_loc_list (a)->hash, "%s (%s)",
10524 dwarf_attr_name (a->dw_attr),
10525 sym);
10526 }
10527 else
10528 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10529 "%s", dwarf_attr_name (a->dw_attr));
10530 }
10531
10532 /* Output the offset into the debug_loc section. */
10533
10534 static void
10535 output_view_list_offset (dw_attr_node *a)
10536 {
10537 char *sym = (*AT_loc_list_ptr (a))->vl_symbol;
10538
10539 gcc_assert (sym);
10540 if (dwarf_split_debug_info)
10541 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10542 "%s", dwarf_attr_name (a->dw_attr));
10543 else
10544 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10545 "%s", dwarf_attr_name (a->dw_attr));
10546 }
10547
10548 /* Output an attribute's index or value appropriately. */
10549
10550 static void
10551 output_attr_index_or_value (dw_attr_node *a)
10552 {
10553 const char *name = dwarf_attr_name (a->dw_attr);
10554
10555 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
10556 {
10557 dw2_asm_output_data_uleb128 (AT_index (a), "%s", name);
10558 return;
10559 }
10560 switch (AT_class (a))
10561 {
10562 case dw_val_class_addr:
10563 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, AT_addr (a), "%s", name);
10564 break;
10565 case dw_val_class_high_pc:
10566 case dw_val_class_lbl_id:
10567 dw2_asm_output_addr (DWARF2_ADDR_SIZE, AT_lbl (a), "%s", name);
10568 break;
10569 default:
10570 gcc_unreachable ();
10571 }
10572 }
10573
10574 /* Output a type signature. */
10575
10576 static inline void
10577 output_signature (const char *sig, const char *name)
10578 {
10579 int i;
10580
10581 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
10582 dw2_asm_output_data (1, sig[i], i == 0 ? "%s" : NULL, name);
10583 }
10584
10585 /* Output a discriminant value. */
10586
10587 static inline void
10588 output_discr_value (dw_discr_value *discr_value, const char *name)
10589 {
10590 if (discr_value->pos)
10591 dw2_asm_output_data_uleb128 (discr_value->v.uval, "%s", name);
10592 else
10593 dw2_asm_output_data_sleb128 (discr_value->v.sval, "%s", name);
10594 }
10595
10596 /* Output the DIE and its attributes. Called recursively to generate
10597 the definitions of each child DIE. */
10598
10599 static void
10600 output_die (dw_die_ref die)
10601 {
10602 dw_attr_node *a;
10603 dw_die_ref c;
10604 unsigned long size;
10605 unsigned ix;
10606
10607 dw2_asm_output_data_uleb128 (die->die_abbrev, "(DIE (%#lx) %s)",
10608 (unsigned long)die->die_offset,
10609 dwarf_tag_name (die->die_tag));
10610
10611 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
10612 {
10613 const char *name = dwarf_attr_name (a->dw_attr);
10614
10615 switch (AT_class (a))
10616 {
10617 case dw_val_class_addr:
10618 output_attr_index_or_value (a);
10619 break;
10620
10621 case dw_val_class_offset:
10622 dw2_asm_output_data (DWARF_OFFSET_SIZE, a->dw_attr_val.v.val_offset,
10623 "%s", name);
10624 break;
10625
10626 case dw_val_class_range_list:
10627 output_range_list_offset (a);
10628 break;
10629
10630 case dw_val_class_loc:
10631 size = size_of_locs (AT_loc (a));
10632
10633 /* Output the block length for this list of location operations. */
10634 if (dwarf_version >= 4)
10635 dw2_asm_output_data_uleb128 (size, "%s", name);
10636 else
10637 dw2_asm_output_data (constant_size (size), size, "%s", name);
10638
10639 output_loc_sequence (AT_loc (a), -1);
10640 break;
10641
10642 case dw_val_class_const:
10643 /* ??? It would be slightly more efficient to use a scheme like is
10644 used for unsigned constants below, but gdb 4.x does not sign
10645 extend. Gdb 5.x does sign extend. */
10646 dw2_asm_output_data_sleb128 (AT_int (a), "%s", name);
10647 break;
10648
10649 case dw_val_class_unsigned_const:
10650 {
10651 int csize = constant_size (AT_unsigned (a));
10652 if (dwarf_version == 3
10653 && a->dw_attr == DW_AT_data_member_location
10654 && csize >= 4)
10655 dw2_asm_output_data_uleb128 (AT_unsigned (a), "%s", name);
10656 else
10657 dw2_asm_output_data (csize, AT_unsigned (a), "%s", name);
10658 }
10659 break;
10660
10661 case dw_val_class_symview:
10662 {
10663 int vsize;
10664 if (symview_upper_bound <= 0xff)
10665 vsize = 1;
10666 else if (symview_upper_bound <= 0xffff)
10667 vsize = 2;
10668 else if (symview_upper_bound <= 0xffffffff)
10669 vsize = 4;
10670 else
10671 vsize = 8;
10672 dw2_asm_output_addr (vsize, a->dw_attr_val.v.val_symbolic_view,
10673 "%s", name);
10674 }
10675 break;
10676
10677 case dw_val_class_const_implicit:
10678 if (flag_debug_asm)
10679 fprintf (asm_out_file, "\t\t\t%s %s ("
10680 HOST_WIDE_INT_PRINT_DEC ")\n",
10681 ASM_COMMENT_START, name, AT_int (a));
10682 break;
10683
10684 case dw_val_class_unsigned_const_implicit:
10685 if (flag_debug_asm)
10686 fprintf (asm_out_file, "\t\t\t%s %s ("
10687 HOST_WIDE_INT_PRINT_HEX ")\n",
10688 ASM_COMMENT_START, name, AT_unsigned (a));
10689 break;
10690
10691 case dw_val_class_const_double:
10692 {
10693 unsigned HOST_WIDE_INT first, second;
10694
10695 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
10696 dw2_asm_output_data (1,
10697 HOST_BITS_PER_DOUBLE_INT
10698 / HOST_BITS_PER_CHAR,
10699 NULL);
10700
10701 if (WORDS_BIG_ENDIAN)
10702 {
10703 first = a->dw_attr_val.v.val_double.high;
10704 second = a->dw_attr_val.v.val_double.low;
10705 }
10706 else
10707 {
10708 first = a->dw_attr_val.v.val_double.low;
10709 second = a->dw_attr_val.v.val_double.high;
10710 }
10711
10712 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10713 first, "%s", name);
10714 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10715 second, NULL);
10716 }
10717 break;
10718
10719 case dw_val_class_wide_int:
10720 {
10721 int i;
10722 int len = get_full_len (*a->dw_attr_val.v.val_wide);
10723 int l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
10724 if (len * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
10725 dw2_asm_output_data (1, get_full_len (*a->dw_attr_val.v.val_wide)
10726 * l, NULL);
10727
10728 if (WORDS_BIG_ENDIAN)
10729 for (i = len - 1; i >= 0; --i)
10730 {
10731 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10732 "%s", name);
10733 name = "";
10734 }
10735 else
10736 for (i = 0; i < len; ++i)
10737 {
10738 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10739 "%s", name);
10740 name = "";
10741 }
10742 }
10743 break;
10744
10745 case dw_val_class_vec:
10746 {
10747 unsigned int elt_size = a->dw_attr_val.v.val_vec.elt_size;
10748 unsigned int len = a->dw_attr_val.v.val_vec.length;
10749 unsigned int i;
10750 unsigned char *p;
10751
10752 dw2_asm_output_data (constant_size (len * elt_size),
10753 len * elt_size, "%s", name);
10754 if (elt_size > sizeof (HOST_WIDE_INT))
10755 {
10756 elt_size /= 2;
10757 len *= 2;
10758 }
10759 for (i = 0, p = (unsigned char *) a->dw_attr_val.v.val_vec.array;
10760 i < len;
10761 i++, p += elt_size)
10762 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
10763 "fp or vector constant word %u", i);
10764 break;
10765 }
10766
10767 case dw_val_class_flag:
10768 if (dwarf_version >= 4)
10769 {
10770 /* Currently all add_AT_flag calls pass in 1 as last argument,
10771 so DW_FORM_flag_present can be used. If that ever changes,
10772 we'll need to use DW_FORM_flag and have some optimization
10773 in build_abbrev_table that will change those to
10774 DW_FORM_flag_present if it is set to 1 in all DIEs using
10775 the same abbrev entry. */
10776 gcc_assert (AT_flag (a) == 1);
10777 if (flag_debug_asm)
10778 fprintf (asm_out_file, "\t\t\t%s %s\n",
10779 ASM_COMMENT_START, name);
10780 break;
10781 }
10782 dw2_asm_output_data (1, AT_flag (a), "%s", name);
10783 break;
10784
10785 case dw_val_class_loc_list:
10786 output_loc_list_offset (a);
10787 break;
10788
10789 case dw_val_class_view_list:
10790 output_view_list_offset (a);
10791 break;
10792
10793 case dw_val_class_die_ref:
10794 if (AT_ref_external (a))
10795 {
10796 if (AT_ref (a)->comdat_type_p)
10797 {
10798 comdat_type_node *type_node
10799 = AT_ref (a)->die_id.die_type_node;
10800
10801 gcc_assert (type_node);
10802 output_signature (type_node->signature, name);
10803 }
10804 else
10805 {
10806 const char *sym = AT_ref (a)->die_id.die_symbol;
10807 int size;
10808
10809 gcc_assert (sym);
10810 /* In DWARF2, DW_FORM_ref_addr is sized by target address
10811 length, whereas in DWARF3 it's always sized as an
10812 offset. */
10813 if (dwarf_version == 2)
10814 size = DWARF2_ADDR_SIZE;
10815 else
10816 size = DWARF_OFFSET_SIZE;
10817 /* ??? We cannot unconditionally output die_offset if
10818 non-zero - others might create references to those
10819 DIEs via symbols.
10820 And we do not clear its DIE offset after outputting it
10821 (and the label refers to the actual DIEs, not the
10822 DWARF CU unit header which is when using label + offset
10823 would be the correct thing to do).
10824 ??? This is the reason for the with_offset flag. */
10825 if (AT_ref (a)->with_offset)
10826 dw2_asm_output_offset (size, sym, AT_ref (a)->die_offset,
10827 debug_info_section, "%s", name);
10828 else
10829 dw2_asm_output_offset (size, sym, debug_info_section, "%s",
10830 name);
10831 }
10832 }
10833 else
10834 {
10835 gcc_assert (AT_ref (a)->die_offset);
10836 dw2_asm_output_data (DWARF_OFFSET_SIZE, AT_ref (a)->die_offset,
10837 "%s", name);
10838 }
10839 break;
10840
10841 case dw_val_class_fde_ref:
10842 {
10843 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
10844
10845 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_LABEL,
10846 a->dw_attr_val.v.val_fde_index * 2);
10847 dw2_asm_output_offset (DWARF_OFFSET_SIZE, l1, debug_frame_section,
10848 "%s", name);
10849 }
10850 break;
10851
10852 case dw_val_class_vms_delta:
10853 #ifdef ASM_OUTPUT_DWARF_VMS_DELTA
10854 dw2_asm_output_vms_delta (DWARF_OFFSET_SIZE,
10855 AT_vms_delta2 (a), AT_vms_delta1 (a),
10856 "%s", name);
10857 #else
10858 dw2_asm_output_delta (DWARF_OFFSET_SIZE,
10859 AT_vms_delta2 (a), AT_vms_delta1 (a),
10860 "%s", name);
10861 #endif
10862 break;
10863
10864 case dw_val_class_lbl_id:
10865 output_attr_index_or_value (a);
10866 break;
10867
10868 case dw_val_class_lineptr:
10869 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10870 debug_line_section, "%s", name);
10871 break;
10872
10873 case dw_val_class_macptr:
10874 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10875 debug_macinfo_section, "%s", name);
10876 break;
10877
10878 case dw_val_class_loclistsptr:
10879 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10880 debug_loc_section, "%s", name);
10881 break;
10882
10883 case dw_val_class_str:
10884 if (a->dw_attr_val.v.val_str->form == DW_FORM_strp)
10885 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10886 a->dw_attr_val.v.val_str->label,
10887 debug_str_section,
10888 "%s: \"%s\"", name, AT_string (a));
10889 else if (a->dw_attr_val.v.val_str->form == DW_FORM_line_strp)
10890 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10891 a->dw_attr_val.v.val_str->label,
10892 debug_line_str_section,
10893 "%s: \"%s\"", name, AT_string (a));
10894 else if (a->dw_attr_val.v.val_str->form == dwarf_FORM (DW_FORM_strx))
10895 dw2_asm_output_data_uleb128 (AT_index (a),
10896 "%s: \"%s\"", name, AT_string (a));
10897 else
10898 dw2_asm_output_nstring (AT_string (a), -1, "%s", name);
10899 break;
10900
10901 case dw_val_class_file:
10902 {
10903 int f = maybe_emit_file (a->dw_attr_val.v.val_file);
10904
10905 dw2_asm_output_data (constant_size (f), f, "%s (%s)", name,
10906 a->dw_attr_val.v.val_file->filename);
10907 break;
10908 }
10909
10910 case dw_val_class_file_implicit:
10911 if (flag_debug_asm)
10912 fprintf (asm_out_file, "\t\t\t%s %s (%d, %s)\n",
10913 ASM_COMMENT_START, name,
10914 maybe_emit_file (a->dw_attr_val.v.val_file),
10915 a->dw_attr_val.v.val_file->filename);
10916 break;
10917
10918 case dw_val_class_data8:
10919 {
10920 int i;
10921
10922 for (i = 0; i < 8; i++)
10923 dw2_asm_output_data (1, a->dw_attr_val.v.val_data8[i],
10924 i == 0 ? "%s" : NULL, name);
10925 break;
10926 }
10927
10928 case dw_val_class_high_pc:
10929 dw2_asm_output_delta (DWARF2_ADDR_SIZE, AT_lbl (a),
10930 get_AT_low_pc (die), "DW_AT_high_pc");
10931 break;
10932
10933 case dw_val_class_discr_value:
10934 output_discr_value (&a->dw_attr_val.v.val_discr_value, name);
10935 break;
10936
10937 case dw_val_class_discr_list:
10938 {
10939 dw_discr_list_ref list = AT_discr_list (a);
10940 const int size = size_of_discr_list (list);
10941
10942 /* This is a block, so output its length first. */
10943 dw2_asm_output_data (constant_size (size), size,
10944 "%s: block size", name);
10945
10946 for (; list != NULL; list = list->dw_discr_next)
10947 {
10948 /* One byte for the discriminant value descriptor, and then as
10949 many LEB128 numbers as required. */
10950 if (list->dw_discr_range)
10951 dw2_asm_output_data (1, DW_DSC_range,
10952 "%s: DW_DSC_range", name);
10953 else
10954 dw2_asm_output_data (1, DW_DSC_label,
10955 "%s: DW_DSC_label", name);
10956
10957 output_discr_value (&list->dw_discr_lower_bound, name);
10958 if (list->dw_discr_range)
10959 output_discr_value (&list->dw_discr_upper_bound, name);
10960 }
10961 break;
10962 }
10963
10964 default:
10965 gcc_unreachable ();
10966 }
10967 }
10968
10969 FOR_EACH_CHILD (die, c, output_die (c));
10970
10971 /* Add null byte to terminate sibling list. */
10972 if (die->die_child != NULL)
10973 dw2_asm_output_data (1, 0, "end of children of DIE %#lx",
10974 (unsigned long) die->die_offset);
10975 }
10976
10977 /* Output the dwarf version number. */
10978
10979 static void
10980 output_dwarf_version ()
10981 {
10982 /* ??? For now, if -gdwarf-6 is specified, we output version 5 with
10983 views in loclist. That will change eventually. */
10984 if (dwarf_version == 6)
10985 {
10986 static bool once;
10987 if (!once)
10988 {
10989 warning (0, "%<-gdwarf-6%> is output as version 5 with "
10990 "incompatibilities");
10991 once = true;
10992 }
10993 dw2_asm_output_data (2, 5, "DWARF version number");
10994 }
10995 else
10996 dw2_asm_output_data (2, dwarf_version, "DWARF version number");
10997 }
10998
10999 /* Output the compilation unit that appears at the beginning of the
11000 .debug_info section, and precedes the DIE descriptions. */
11001
11002 static void
11003 output_compilation_unit_header (enum dwarf_unit_type ut)
11004 {
11005 if (!XCOFF_DEBUGGING_INFO)
11006 {
11007 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11008 dw2_asm_output_data (4, 0xffffffff,
11009 "Initial length escape value indicating 64-bit DWARF extension");
11010 dw2_asm_output_data (DWARF_OFFSET_SIZE,
11011 next_die_offset - DWARF_INITIAL_LENGTH_SIZE,
11012 "Length of Compilation Unit Info");
11013 }
11014
11015 output_dwarf_version ();
11016 if (dwarf_version >= 5)
11017 {
11018 const char *name;
11019 switch (ut)
11020 {
11021 case DW_UT_compile: name = "DW_UT_compile"; break;
11022 case DW_UT_type: name = "DW_UT_type"; break;
11023 case DW_UT_split_compile: name = "DW_UT_split_compile"; break;
11024 case DW_UT_split_type: name = "DW_UT_split_type"; break;
11025 default: gcc_unreachable ();
11026 }
11027 dw2_asm_output_data (1, ut, "%s", name);
11028 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11029 }
11030 dw2_asm_output_offset (DWARF_OFFSET_SIZE, abbrev_section_label,
11031 debug_abbrev_section,
11032 "Offset Into Abbrev. Section");
11033 if (dwarf_version < 5)
11034 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11035 }
11036
11037 /* Output the compilation unit DIE and its children. */
11038
11039 static void
11040 output_comp_unit (dw_die_ref die, int output_if_empty,
11041 const unsigned char *dwo_id)
11042 {
11043 const char *secname, *oldsym;
11044 char *tmp;
11045
11046 /* Unless we are outputting main CU, we may throw away empty ones. */
11047 if (!output_if_empty && die->die_child == NULL)
11048 return;
11049
11050 /* Even if there are no children of this DIE, we must output the information
11051 about the compilation unit. Otherwise, on an empty translation unit, we
11052 will generate a present, but empty, .debug_info section. IRIX 6.5 `nm'
11053 will then complain when examining the file. First mark all the DIEs in
11054 this CU so we know which get local refs. */
11055 mark_dies (die);
11056
11057 external_ref_hash_type *extern_map = optimize_external_refs (die);
11058
11059 /* For now, optimize only the main CU, in order to optimize the rest
11060 we'd need to see all of them earlier. Leave the rest for post-linking
11061 tools like DWZ. */
11062 if (die == comp_unit_die ())
11063 abbrev_opt_start = vec_safe_length (abbrev_die_table);
11064
11065 build_abbrev_table (die, extern_map);
11066
11067 optimize_abbrev_table ();
11068
11069 delete extern_map;
11070
11071 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11072 next_die_offset = (dwo_id
11073 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11074 : DWARF_COMPILE_UNIT_HEADER_SIZE);
11075 calc_die_sizes (die);
11076
11077 oldsym = die->die_id.die_symbol;
11078 if (oldsym && die->comdat_type_p)
11079 {
11080 tmp = XALLOCAVEC (char, strlen (oldsym) + 24);
11081
11082 sprintf (tmp, ".gnu.linkonce.wi.%s", oldsym);
11083 secname = tmp;
11084 die->die_id.die_symbol = NULL;
11085 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11086 }
11087 else
11088 {
11089 switch_to_section (debug_info_section);
11090 ASM_OUTPUT_LABEL (asm_out_file, debug_info_section_label);
11091 info_section_emitted = true;
11092 }
11093
11094 /* For LTO cross unit DIE refs we want a symbol on the start of the
11095 debuginfo section, not on the CU DIE. */
11096 if ((flag_generate_lto || flag_generate_offload) && oldsym)
11097 {
11098 /* ??? No way to get visibility assembled without a decl. */
11099 tree decl = build_decl (UNKNOWN_LOCATION, VAR_DECL,
11100 get_identifier (oldsym), char_type_node);
11101 TREE_PUBLIC (decl) = true;
11102 TREE_STATIC (decl) = true;
11103 DECL_ARTIFICIAL (decl) = true;
11104 DECL_VISIBILITY (decl) = VISIBILITY_HIDDEN;
11105 DECL_VISIBILITY_SPECIFIED (decl) = true;
11106 targetm.asm_out.assemble_visibility (decl, VISIBILITY_HIDDEN);
11107 #ifdef ASM_WEAKEN_LABEL
11108 /* We prefer a .weak because that handles duplicates from duplicate
11109 archive members in a graceful way. */
11110 ASM_WEAKEN_LABEL (asm_out_file, oldsym);
11111 #else
11112 targetm.asm_out.globalize_label (asm_out_file, oldsym);
11113 #endif
11114 ASM_OUTPUT_LABEL (asm_out_file, oldsym);
11115 }
11116
11117 /* Output debugging information. */
11118 output_compilation_unit_header (dwo_id
11119 ? DW_UT_split_compile : DW_UT_compile);
11120 if (dwarf_version >= 5)
11121 {
11122 if (dwo_id != NULL)
11123 for (int i = 0; i < 8; i++)
11124 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11125 }
11126 output_die (die);
11127
11128 /* Leave the marks on the main CU, so we can check them in
11129 output_pubnames. */
11130 if (oldsym)
11131 {
11132 unmark_dies (die);
11133 die->die_id.die_symbol = oldsym;
11134 }
11135 }
11136
11137 /* Whether to generate the DWARF accelerator tables in .debug_pubnames
11138 and .debug_pubtypes. This is configured per-target, but can be
11139 overridden by the -gpubnames or -gno-pubnames options. */
11140
11141 static inline bool
11142 want_pubnames (void)
11143 {
11144 if (debug_info_level <= DINFO_LEVEL_TERSE
11145 /* Names and types go to the early debug part only. */
11146 || in_lto_p)
11147 return false;
11148 if (debug_generate_pub_sections != -1)
11149 return debug_generate_pub_sections;
11150 return targetm.want_debug_pub_sections;
11151 }
11152
11153 /* Add the DW_AT_GNU_pubnames and DW_AT_GNU_pubtypes attributes. */
11154
11155 static void
11156 add_AT_pubnames (dw_die_ref die)
11157 {
11158 if (want_pubnames ())
11159 add_AT_flag (die, DW_AT_GNU_pubnames, 1);
11160 }
11161
11162 /* Add a string attribute value to a skeleton DIE. */
11163
11164 static inline void
11165 add_skeleton_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind,
11166 const char *str)
11167 {
11168 dw_attr_node attr;
11169 struct indirect_string_node *node;
11170
11171 if (! skeleton_debug_str_hash)
11172 skeleton_debug_str_hash
11173 = hash_table<indirect_string_hasher>::create_ggc (10);
11174
11175 node = find_AT_string_in_table (str, skeleton_debug_str_hash);
11176 find_string_form (node);
11177 if (node->form == dwarf_FORM (DW_FORM_strx))
11178 node->form = DW_FORM_strp;
11179
11180 attr.dw_attr = attr_kind;
11181 attr.dw_attr_val.val_class = dw_val_class_str;
11182 attr.dw_attr_val.val_entry = NULL;
11183 attr.dw_attr_val.v.val_str = node;
11184 add_dwarf_attr (die, &attr);
11185 }
11186
11187 /* Helper function to generate top-level dies for skeleton debug_info and
11188 debug_types. */
11189
11190 static void
11191 add_top_level_skeleton_die_attrs (dw_die_ref die)
11192 {
11193 const char *dwo_file_name = concat (aux_base_name, ".dwo", NULL);
11194 const char *comp_dir = comp_dir_string ();
11195
11196 add_skeleton_AT_string (die, dwarf_AT (DW_AT_dwo_name), dwo_file_name);
11197 if (comp_dir != NULL)
11198 add_skeleton_AT_string (die, DW_AT_comp_dir, comp_dir);
11199 add_AT_pubnames (die);
11200 if (addr_index_table != NULL && addr_index_table->size () > 0)
11201 add_AT_lineptr (die, dwarf_AT (DW_AT_addr_base), debug_addr_section_label);
11202 }
11203
11204 /* Output skeleton debug sections that point to the dwo file. */
11205
11206 static void
11207 output_skeleton_debug_sections (dw_die_ref comp_unit,
11208 const unsigned char *dwo_id)
11209 {
11210 /* These attributes will be found in the full debug_info section. */
11211 remove_AT (comp_unit, DW_AT_producer);
11212 remove_AT (comp_unit, DW_AT_language);
11213
11214 switch_to_section (debug_skeleton_info_section);
11215 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_info_section_label);
11216
11217 /* Produce the skeleton compilation-unit header. This one differs enough from
11218 a normal CU header that it's better not to call output_compilation_unit
11219 header. */
11220 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11221 dw2_asm_output_data (4, 0xffffffff,
11222 "Initial length escape value indicating 64-bit "
11223 "DWARF extension");
11224
11225 dw2_asm_output_data (DWARF_OFFSET_SIZE,
11226 DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11227 - DWARF_INITIAL_LENGTH_SIZE
11228 + size_of_die (comp_unit),
11229 "Length of Compilation Unit Info");
11230 output_dwarf_version ();
11231 if (dwarf_version >= 5)
11232 {
11233 dw2_asm_output_data (1, DW_UT_skeleton, "DW_UT_skeleton");
11234 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11235 }
11236 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_abbrev_section_label,
11237 debug_skeleton_abbrev_section,
11238 "Offset Into Abbrev. Section");
11239 if (dwarf_version < 5)
11240 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11241 else
11242 for (int i = 0; i < 8; i++)
11243 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11244
11245 comp_unit->die_abbrev = SKELETON_COMP_DIE_ABBREV;
11246 output_die (comp_unit);
11247
11248 /* Build the skeleton debug_abbrev section. */
11249 switch_to_section (debug_skeleton_abbrev_section);
11250 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_abbrev_section_label);
11251
11252 output_die_abbrevs (SKELETON_COMP_DIE_ABBREV, comp_unit);
11253
11254 dw2_asm_output_data (1, 0, "end of skeleton .debug_abbrev");
11255 }
11256
11257 /* Output a comdat type unit DIE and its children. */
11258
11259 static void
11260 output_comdat_type_unit (comdat_type_node *node,
11261 bool early_lto_debug ATTRIBUTE_UNUSED)
11262 {
11263 const char *secname;
11264 char *tmp;
11265 int i;
11266 #if defined (OBJECT_FORMAT_ELF)
11267 tree comdat_key;
11268 #endif
11269
11270 /* First mark all the DIEs in this CU so we know which get local refs. */
11271 mark_dies (node->root_die);
11272
11273 external_ref_hash_type *extern_map = optimize_external_refs (node->root_die);
11274
11275 build_abbrev_table (node->root_die, extern_map);
11276
11277 delete extern_map;
11278 extern_map = NULL;
11279
11280 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11281 next_die_offset = DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE;
11282 calc_die_sizes (node->root_die);
11283
11284 #if defined (OBJECT_FORMAT_ELF)
11285 if (dwarf_version >= 5)
11286 {
11287 if (!dwarf_split_debug_info)
11288 secname = early_lto_debug ? DEBUG_LTO_INFO_SECTION : DEBUG_INFO_SECTION;
11289 else
11290 secname = (early_lto_debug
11291 ? DEBUG_LTO_DWO_INFO_SECTION : DEBUG_DWO_INFO_SECTION);
11292 }
11293 else if (!dwarf_split_debug_info)
11294 secname = early_lto_debug ? ".gnu.debuglto_.debug_types" : ".debug_types";
11295 else
11296 secname = (early_lto_debug
11297 ? ".gnu.debuglto_.debug_types.dwo" : ".debug_types.dwo");
11298
11299 tmp = XALLOCAVEC (char, 4 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11300 sprintf (tmp, dwarf_version >= 5 ? "wi." : "wt.");
11301 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11302 sprintf (tmp + 3 + i * 2, "%02x", node->signature[i] & 0xff);
11303 comdat_key = get_identifier (tmp);
11304 targetm.asm_out.named_section (secname,
11305 SECTION_DEBUG | SECTION_LINKONCE,
11306 comdat_key);
11307 #else
11308 tmp = XALLOCAVEC (char, 18 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11309 sprintf (tmp, (dwarf_version >= 5
11310 ? ".gnu.linkonce.wi." : ".gnu.linkonce.wt."));
11311 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11312 sprintf (tmp + 17 + i * 2, "%02x", node->signature[i] & 0xff);
11313 secname = tmp;
11314 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11315 #endif
11316
11317 /* Output debugging information. */
11318 output_compilation_unit_header (dwarf_split_debug_info
11319 ? DW_UT_split_type : DW_UT_type);
11320 output_signature (node->signature, "Type Signature");
11321 dw2_asm_output_data (DWARF_OFFSET_SIZE, node->type_die->die_offset,
11322 "Offset to Type DIE");
11323 output_die (node->root_die);
11324
11325 unmark_dies (node->root_die);
11326 }
11327
11328 /* Return the DWARF2/3 pubname associated with a decl. */
11329
11330 static const char *
11331 dwarf2_name (tree decl, int scope)
11332 {
11333 if (DECL_NAMELESS (decl))
11334 return NULL;
11335 return lang_hooks.dwarf_name (decl, scope ? 1 : 0);
11336 }
11337
11338 /* Add a new entry to .debug_pubnames if appropriate. */
11339
11340 static void
11341 add_pubname_string (const char *str, dw_die_ref die)
11342 {
11343 pubname_entry e;
11344
11345 e.die = die;
11346 e.name = xstrdup (str);
11347 vec_safe_push (pubname_table, e);
11348 }
11349
11350 static void
11351 add_pubname (tree decl, dw_die_ref die)
11352 {
11353 if (!want_pubnames ())
11354 return;
11355
11356 /* Don't add items to the table when we expect that the consumer will have
11357 just read the enclosing die. For example, if the consumer is looking at a
11358 class_member, it will either be inside the class already, or will have just
11359 looked up the class to find the member. Either way, searching the class is
11360 faster than searching the index. */
11361 if ((TREE_PUBLIC (decl) && !class_scope_p (die->die_parent))
11362 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11363 {
11364 const char *name = dwarf2_name (decl, 1);
11365
11366 if (name)
11367 add_pubname_string (name, die);
11368 }
11369 }
11370
11371 /* Add an enumerator to the pubnames section. */
11372
11373 static void
11374 add_enumerator_pubname (const char *scope_name, dw_die_ref die)
11375 {
11376 pubname_entry e;
11377
11378 gcc_assert (scope_name);
11379 e.name = concat (scope_name, get_AT_string (die, DW_AT_name), NULL);
11380 e.die = die;
11381 vec_safe_push (pubname_table, e);
11382 }
11383
11384 /* Add a new entry to .debug_pubtypes if appropriate. */
11385
11386 static void
11387 add_pubtype (tree decl, dw_die_ref die)
11388 {
11389 pubname_entry e;
11390
11391 if (!want_pubnames ())
11392 return;
11393
11394 if ((TREE_PUBLIC (decl)
11395 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11396 && (die->die_tag == DW_TAG_typedef || COMPLETE_TYPE_P (decl)))
11397 {
11398 tree scope = NULL;
11399 const char *scope_name = "";
11400 const char *sep = is_cxx () ? "::" : ".";
11401 const char *name;
11402
11403 scope = TYPE_P (decl) ? TYPE_CONTEXT (decl) : NULL;
11404 if (scope && TREE_CODE (scope) == NAMESPACE_DECL)
11405 {
11406 scope_name = lang_hooks.dwarf_name (scope, 1);
11407 if (scope_name != NULL && scope_name[0] != '\0')
11408 scope_name = concat (scope_name, sep, NULL);
11409 else
11410 scope_name = "";
11411 }
11412
11413 if (TYPE_P (decl))
11414 name = type_tag (decl);
11415 else
11416 name = lang_hooks.dwarf_name (decl, 1);
11417
11418 /* If we don't have a name for the type, there's no point in adding
11419 it to the table. */
11420 if (name != NULL && name[0] != '\0')
11421 {
11422 e.die = die;
11423 e.name = concat (scope_name, name, NULL);
11424 vec_safe_push (pubtype_table, e);
11425 }
11426
11427 /* Although it might be more consistent to add the pubinfo for the
11428 enumerators as their dies are created, they should only be added if the
11429 enum type meets the criteria above. So rather than re-check the parent
11430 enum type whenever an enumerator die is created, just output them all
11431 here. This isn't protected by the name conditional because anonymous
11432 enums don't have names. */
11433 if (die->die_tag == DW_TAG_enumeration_type)
11434 {
11435 dw_die_ref c;
11436
11437 FOR_EACH_CHILD (die, c, add_enumerator_pubname (scope_name, c));
11438 }
11439 }
11440 }
11441
11442 /* Output a single entry in the pubnames table. */
11443
11444 static void
11445 output_pubname (dw_offset die_offset, pubname_entry *entry)
11446 {
11447 dw_die_ref die = entry->die;
11448 int is_static = get_AT_flag (die, DW_AT_external) ? 0 : 1;
11449
11450 dw2_asm_output_data (DWARF_OFFSET_SIZE, die_offset, "DIE offset");
11451
11452 if (debug_generate_pub_sections == 2)
11453 {
11454 /* This logic follows gdb's method for determining the value of the flag
11455 byte. */
11456 uint32_t flags = GDB_INDEX_SYMBOL_KIND_NONE;
11457 switch (die->die_tag)
11458 {
11459 case DW_TAG_typedef:
11460 case DW_TAG_base_type:
11461 case DW_TAG_subrange_type:
11462 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11463 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11464 break;
11465 case DW_TAG_enumerator:
11466 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11467 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11468 if (!is_cxx ())
11469 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11470 break;
11471 case DW_TAG_subprogram:
11472 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11473 GDB_INDEX_SYMBOL_KIND_FUNCTION);
11474 if (!is_ada ())
11475 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11476 break;
11477 case DW_TAG_constant:
11478 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11479 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11480 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11481 break;
11482 case DW_TAG_variable:
11483 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11484 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11485 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11486 break;
11487 case DW_TAG_namespace:
11488 case DW_TAG_imported_declaration:
11489 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11490 break;
11491 case DW_TAG_class_type:
11492 case DW_TAG_interface_type:
11493 case DW_TAG_structure_type:
11494 case DW_TAG_union_type:
11495 case DW_TAG_enumeration_type:
11496 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11497 if (!is_cxx ())
11498 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11499 break;
11500 default:
11501 /* An unusual tag. Leave the flag-byte empty. */
11502 break;
11503 }
11504 dw2_asm_output_data (1, flags >> GDB_INDEX_CU_BITSIZE,
11505 "GDB-index flags");
11506 }
11507
11508 dw2_asm_output_nstring (entry->name, -1, "external name");
11509 }
11510
11511
11512 /* Output the public names table used to speed up access to externally
11513 visible names; or the public types table used to find type definitions. */
11514
11515 static void
11516 output_pubnames (vec<pubname_entry, va_gc> *names)
11517 {
11518 unsigned i;
11519 unsigned long pubnames_length = size_of_pubnames (names);
11520 pubname_entry *pub;
11521
11522 if (!XCOFF_DEBUGGING_INFO)
11523 {
11524 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11525 dw2_asm_output_data (4, 0xffffffff,
11526 "Initial length escape value indicating 64-bit DWARF extension");
11527 dw2_asm_output_data (DWARF_OFFSET_SIZE, pubnames_length,
11528 "Pub Info Length");
11529 }
11530
11531 /* Version number for pubnames/pubtypes is independent of dwarf version. */
11532 dw2_asm_output_data (2, 2, "DWARF pubnames/pubtypes version");
11533
11534 if (dwarf_split_debug_info)
11535 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11536 debug_skeleton_info_section,
11537 "Offset of Compilation Unit Info");
11538 else
11539 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11540 debug_info_section,
11541 "Offset of Compilation Unit Info");
11542 dw2_asm_output_data (DWARF_OFFSET_SIZE, next_die_offset,
11543 "Compilation Unit Length");
11544
11545 FOR_EACH_VEC_ELT (*names, i, pub)
11546 {
11547 if (include_pubname_in_output (names, pub))
11548 {
11549 dw_offset die_offset = pub->die->die_offset;
11550
11551 /* We shouldn't see pubnames for DIEs outside of the main CU. */
11552 if (names == pubname_table && pub->die->die_tag != DW_TAG_enumerator)
11553 gcc_assert (pub->die->die_mark);
11554
11555 /* If we're putting types in their own .debug_types sections,
11556 the .debug_pubtypes table will still point to the compile
11557 unit (not the type unit), so we want to use the offset of
11558 the skeleton DIE (if there is one). */
11559 if (pub->die->comdat_type_p && names == pubtype_table)
11560 {
11561 comdat_type_node *type_node = pub->die->die_id.die_type_node;
11562
11563 if (type_node != NULL)
11564 die_offset = (type_node->skeleton_die != NULL
11565 ? type_node->skeleton_die->die_offset
11566 : comp_unit_die ()->die_offset);
11567 }
11568
11569 output_pubname (die_offset, pub);
11570 }
11571 }
11572
11573 dw2_asm_output_data (DWARF_OFFSET_SIZE, 0, NULL);
11574 }
11575
11576 /* Output public names and types tables if necessary. */
11577
11578 static void
11579 output_pubtables (void)
11580 {
11581 if (!want_pubnames () || !info_section_emitted)
11582 return;
11583
11584 switch_to_section (debug_pubnames_section);
11585 output_pubnames (pubname_table);
11586 /* ??? Only defined by DWARF3, but emitted by Darwin for DWARF2.
11587 It shouldn't hurt to emit it always, since pure DWARF2 consumers
11588 simply won't look for the section. */
11589 switch_to_section (debug_pubtypes_section);
11590 output_pubnames (pubtype_table);
11591 }
11592
11593
11594 /* Output the information that goes into the .debug_aranges table.
11595 Namely, define the beginning and ending address range of the
11596 text section generated for this compilation unit. */
11597
11598 static void
11599 output_aranges (void)
11600 {
11601 unsigned i;
11602 unsigned long aranges_length = size_of_aranges ();
11603
11604 if (!XCOFF_DEBUGGING_INFO)
11605 {
11606 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11607 dw2_asm_output_data (4, 0xffffffff,
11608 "Initial length escape value indicating 64-bit DWARF extension");
11609 dw2_asm_output_data (DWARF_OFFSET_SIZE, aranges_length,
11610 "Length of Address Ranges Info");
11611 }
11612
11613 /* Version number for aranges is still 2, even up to DWARF5. */
11614 dw2_asm_output_data (2, 2, "DWARF aranges version");
11615 if (dwarf_split_debug_info)
11616 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11617 debug_skeleton_info_section,
11618 "Offset of Compilation Unit Info");
11619 else
11620 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11621 debug_info_section,
11622 "Offset of Compilation Unit Info");
11623 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
11624 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
11625
11626 /* We need to align to twice the pointer size here. */
11627 if (DWARF_ARANGES_PAD_SIZE)
11628 {
11629 /* Pad using a 2 byte words so that padding is correct for any
11630 pointer size. */
11631 dw2_asm_output_data (2, 0, "Pad to %d byte boundary",
11632 2 * DWARF2_ADDR_SIZE);
11633 for (i = 2; i < (unsigned) DWARF_ARANGES_PAD_SIZE; i += 2)
11634 dw2_asm_output_data (2, 0, NULL);
11635 }
11636
11637 /* It is necessary not to output these entries if the sections were
11638 not used; if the sections were not used, the length will be 0 and
11639 the address may end up as 0 if the section is discarded by ld
11640 --gc-sections, leaving an invalid (0, 0) entry that can be
11641 confused with the terminator. */
11642 if (text_section_used)
11643 {
11644 dw2_asm_output_addr (DWARF2_ADDR_SIZE, text_section_label, "Address");
11645 dw2_asm_output_delta (DWARF2_ADDR_SIZE, text_end_label,
11646 text_section_label, "Length");
11647 }
11648 if (cold_text_section_used)
11649 {
11650 dw2_asm_output_addr (DWARF2_ADDR_SIZE, cold_text_section_label,
11651 "Address");
11652 dw2_asm_output_delta (DWARF2_ADDR_SIZE, cold_end_label,
11653 cold_text_section_label, "Length");
11654 }
11655
11656 if (have_multiple_function_sections)
11657 {
11658 unsigned fde_idx;
11659 dw_fde_ref fde;
11660
11661 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
11662 {
11663 if (DECL_IGNORED_P (fde->decl))
11664 continue;
11665 if (!fde->in_std_section)
11666 {
11667 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_begin,
11668 "Address");
11669 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_end,
11670 fde->dw_fde_begin, "Length");
11671 }
11672 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
11673 {
11674 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_second_begin,
11675 "Address");
11676 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_second_end,
11677 fde->dw_fde_second_begin, "Length");
11678 }
11679 }
11680 }
11681
11682 /* Output the terminator words. */
11683 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11684 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11685 }
11686
11687 /* Add a new entry to .debug_ranges. Return its index into
11688 ranges_table vector. */
11689
11690 static unsigned int
11691 add_ranges_num (int num, bool maybe_new_sec)
11692 {
11693 dw_ranges r = { NULL, num, 0, maybe_new_sec };
11694 vec_safe_push (ranges_table, r);
11695 return vec_safe_length (ranges_table) - 1;
11696 }
11697
11698 /* Add a new entry to .debug_ranges corresponding to a block, or a
11699 range terminator if BLOCK is NULL. MAYBE_NEW_SEC is true if
11700 this entry might be in a different section from previous range. */
11701
11702 static unsigned int
11703 add_ranges (const_tree block, bool maybe_new_sec)
11704 {
11705 return add_ranges_num (block ? BLOCK_NUMBER (block) : 0, maybe_new_sec);
11706 }
11707
11708 /* Note that (*rnglist_table)[offset] is either a head of a rnglist
11709 chain, or middle entry of a chain that will be directly referred to. */
11710
11711 static void
11712 note_rnglist_head (unsigned int offset)
11713 {
11714 if (dwarf_version < 5 || (*ranges_table)[offset].label)
11715 return;
11716 (*ranges_table)[offset].label = gen_internal_sym ("LLRL");
11717 }
11718
11719 /* Add a new entry to .debug_ranges corresponding to a pair of labels.
11720 When using dwarf_split_debug_info, address attributes in dies destined
11721 for the final executable should be direct references--setting the
11722 parameter force_direct ensures this behavior. */
11723
11724 static void
11725 add_ranges_by_labels (dw_die_ref die, const char *begin, const char *end,
11726 bool *added, bool force_direct)
11727 {
11728 unsigned int in_use = vec_safe_length (ranges_by_label);
11729 unsigned int offset;
11730 dw_ranges_by_label rbl = { begin, end };
11731 vec_safe_push (ranges_by_label, rbl);
11732 offset = add_ranges_num (-(int)in_use - 1, true);
11733 if (!*added)
11734 {
11735 add_AT_range_list (die, DW_AT_ranges, offset, force_direct);
11736 *added = true;
11737 note_rnglist_head (offset);
11738 }
11739 }
11740
11741 /* Emit .debug_ranges section. */
11742
11743 static void
11744 output_ranges (void)
11745 {
11746 unsigned i;
11747 static const char *const start_fmt = "Offset %#x";
11748 const char *fmt = start_fmt;
11749 dw_ranges *r;
11750
11751 switch_to_section (debug_ranges_section);
11752 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11753 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11754 {
11755 int block_num = r->num;
11756
11757 if (block_num > 0)
11758 {
11759 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11760 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11761
11762 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11763 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11764
11765 /* If all code is in the text section, then the compilation
11766 unit base address defaults to DW_AT_low_pc, which is the
11767 base of the text section. */
11768 if (!have_multiple_function_sections)
11769 {
11770 dw2_asm_output_delta (DWARF2_ADDR_SIZE, blabel,
11771 text_section_label,
11772 fmt, i * 2 * DWARF2_ADDR_SIZE);
11773 dw2_asm_output_delta (DWARF2_ADDR_SIZE, elabel,
11774 text_section_label, NULL);
11775 }
11776
11777 /* Otherwise, the compilation unit base address is zero,
11778 which allows us to use absolute addresses, and not worry
11779 about whether the target supports cross-section
11780 arithmetic. */
11781 else
11782 {
11783 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11784 fmt, i * 2 * DWARF2_ADDR_SIZE);
11785 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel, NULL);
11786 }
11787
11788 fmt = NULL;
11789 }
11790
11791 /* Negative block_num stands for an index into ranges_by_label. */
11792 else if (block_num < 0)
11793 {
11794 int lab_idx = - block_num - 1;
11795
11796 if (!have_multiple_function_sections)
11797 {
11798 gcc_unreachable ();
11799 #if 0
11800 /* If we ever use add_ranges_by_labels () for a single
11801 function section, all we have to do is to take out
11802 the #if 0 above. */
11803 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11804 (*ranges_by_label)[lab_idx].begin,
11805 text_section_label,
11806 fmt, i * 2 * DWARF2_ADDR_SIZE);
11807 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11808 (*ranges_by_label)[lab_idx].end,
11809 text_section_label, NULL);
11810 #endif
11811 }
11812 else
11813 {
11814 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11815 (*ranges_by_label)[lab_idx].begin,
11816 fmt, i * 2 * DWARF2_ADDR_SIZE);
11817 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11818 (*ranges_by_label)[lab_idx].end,
11819 NULL);
11820 }
11821 }
11822 else
11823 {
11824 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11825 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11826 fmt = start_fmt;
11827 }
11828 }
11829 }
11830
11831 /* Non-zero if .debug_line_str should be used for .debug_line section
11832 strings or strings that are likely shareable with those. */
11833 #define DWARF5_USE_DEBUG_LINE_STR \
11834 (!DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET \
11835 && (DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) != 0 \
11836 /* FIXME: there is no .debug_line_str.dwo section, \
11837 for -gsplit-dwarf we should use DW_FORM_strx instead. */ \
11838 && !dwarf_split_debug_info)
11839
11840 /* Assign .debug_rnglists indexes. */
11841
11842 static void
11843 index_rnglists (void)
11844 {
11845 unsigned i;
11846 dw_ranges *r;
11847
11848 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11849 if (r->label)
11850 r->idx = rnglist_idx++;
11851 }
11852
11853 /* Emit .debug_rnglists section. */
11854
11855 static void
11856 output_rnglists (unsigned generation)
11857 {
11858 unsigned i;
11859 dw_ranges *r;
11860 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
11861 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
11862 char basebuf[MAX_ARTIFICIAL_LABEL_BYTES];
11863
11864 switch_to_section (debug_ranges_section);
11865 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11866 /* There are up to 4 unique ranges labels per generation.
11867 See also init_sections_and_labels. */
11868 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_RANGES_SECTION_LABEL,
11869 2 + generation * 4);
11870 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_RANGES_SECTION_LABEL,
11871 3 + generation * 4);
11872 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11873 dw2_asm_output_data (4, 0xffffffff,
11874 "Initial length escape value indicating "
11875 "64-bit DWARF extension");
11876 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
11877 "Length of Range Lists");
11878 ASM_OUTPUT_LABEL (asm_out_file, l1);
11879 output_dwarf_version ();
11880 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
11881 dw2_asm_output_data (1, 0, "Segment Size");
11882 /* Emit the offset table only for -gsplit-dwarf. If we don't care
11883 about relocation sizes and primarily care about the size of .debug*
11884 sections in linked shared libraries and executables, then
11885 the offset table plus corresponding DW_FORM_rnglistx uleb128 indexes
11886 into it are usually larger than just DW_FORM_sec_offset offsets
11887 into the .debug_rnglists section. */
11888 dw2_asm_output_data (4, dwarf_split_debug_info ? rnglist_idx : 0,
11889 "Offset Entry Count");
11890 if (dwarf_split_debug_info)
11891 {
11892 ASM_OUTPUT_LABEL (asm_out_file, ranges_base_label);
11893 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11894 if (r->label)
11895 dw2_asm_output_delta (DWARF_OFFSET_SIZE, r->label,
11896 ranges_base_label, NULL);
11897 }
11898
11899 const char *lab = "";
11900 unsigned int len = vec_safe_length (ranges_table);
11901 const char *base = NULL;
11902 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11903 {
11904 int block_num = r->num;
11905
11906 if (r->label)
11907 {
11908 ASM_OUTPUT_LABEL (asm_out_file, r->label);
11909 lab = r->label;
11910 }
11911 if (HAVE_AS_LEB128 && (r->label || r->maybe_new_sec))
11912 base = NULL;
11913 if (block_num > 0)
11914 {
11915 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11916 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11917
11918 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11919 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11920
11921 if (HAVE_AS_LEB128)
11922 {
11923 /* If all code is in the text section, then the compilation
11924 unit base address defaults to DW_AT_low_pc, which is the
11925 base of the text section. */
11926 if (!have_multiple_function_sections)
11927 {
11928 dw2_asm_output_data (1, DW_RLE_offset_pair,
11929 "DW_RLE_offset_pair (%s)", lab);
11930 dw2_asm_output_delta_uleb128 (blabel, text_section_label,
11931 "Range begin address (%s)", lab);
11932 dw2_asm_output_delta_uleb128 (elabel, text_section_label,
11933 "Range end address (%s)", lab);
11934 continue;
11935 }
11936 if (base == NULL)
11937 {
11938 dw_ranges *r2 = NULL;
11939 if (i < len - 1)
11940 r2 = &(*ranges_table)[i + 1];
11941 if (r2
11942 && r2->num != 0
11943 && r2->label == NULL
11944 && !r2->maybe_new_sec)
11945 {
11946 dw2_asm_output_data (1, DW_RLE_base_address,
11947 "DW_RLE_base_address (%s)", lab);
11948 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11949 "Base address (%s)", lab);
11950 strcpy (basebuf, blabel);
11951 base = basebuf;
11952 }
11953 }
11954 if (base)
11955 {
11956 dw2_asm_output_data (1, DW_RLE_offset_pair,
11957 "DW_RLE_offset_pair (%s)", lab);
11958 dw2_asm_output_delta_uleb128 (blabel, base,
11959 "Range begin address (%s)", lab);
11960 dw2_asm_output_delta_uleb128 (elabel, base,
11961 "Range end address (%s)", lab);
11962 continue;
11963 }
11964 dw2_asm_output_data (1, DW_RLE_start_length,
11965 "DW_RLE_start_length (%s)", lab);
11966 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11967 "Range begin address (%s)", lab);
11968 dw2_asm_output_delta_uleb128 (elabel, blabel,
11969 "Range length (%s)", lab);
11970 }
11971 else
11972 {
11973 dw2_asm_output_data (1, DW_RLE_start_end,
11974 "DW_RLE_start_end (%s)", lab);
11975 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11976 "Range begin address (%s)", lab);
11977 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11978 "Range end address (%s)", lab);
11979 }
11980 }
11981
11982 /* Negative block_num stands for an index into ranges_by_label. */
11983 else if (block_num < 0)
11984 {
11985 int lab_idx = - block_num - 1;
11986 const char *blabel = (*ranges_by_label)[lab_idx].begin;
11987 const char *elabel = (*ranges_by_label)[lab_idx].end;
11988
11989 if (!have_multiple_function_sections)
11990 gcc_unreachable ();
11991 if (HAVE_AS_LEB128)
11992 {
11993 dw2_asm_output_data (1, DW_RLE_start_length,
11994 "DW_RLE_start_length (%s)", lab);
11995 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11996 "Range begin address (%s)", lab);
11997 dw2_asm_output_delta_uleb128 (elabel, blabel,
11998 "Range length (%s)", lab);
11999 }
12000 else
12001 {
12002 dw2_asm_output_data (1, DW_RLE_start_end,
12003 "DW_RLE_start_end (%s)", lab);
12004 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
12005 "Range begin address (%s)", lab);
12006 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
12007 "Range end address (%s)", lab);
12008 }
12009 }
12010 else
12011 dw2_asm_output_data (1, DW_RLE_end_of_list,
12012 "DW_RLE_end_of_list (%s)", lab);
12013 }
12014 ASM_OUTPUT_LABEL (asm_out_file, l2);
12015 }
12016
12017 /* Data structure containing information about input files. */
12018 struct file_info
12019 {
12020 const char *path; /* Complete file name. */
12021 const char *fname; /* File name part. */
12022 int length; /* Length of entire string. */
12023 struct dwarf_file_data * file_idx; /* Index in input file table. */
12024 int dir_idx; /* Index in directory table. */
12025 };
12026
12027 /* Data structure containing information about directories with source
12028 files. */
12029 struct dir_info
12030 {
12031 const char *path; /* Path including directory name. */
12032 int length; /* Path length. */
12033 int prefix; /* Index of directory entry which is a prefix. */
12034 int count; /* Number of files in this directory. */
12035 int dir_idx; /* Index of directory used as base. */
12036 };
12037
12038 /* Callback function for file_info comparison. We sort by looking at
12039 the directories in the path. */
12040
12041 static int
12042 file_info_cmp (const void *p1, const void *p2)
12043 {
12044 const struct file_info *const s1 = (const struct file_info *) p1;
12045 const struct file_info *const s2 = (const struct file_info *) p2;
12046 const unsigned char *cp1;
12047 const unsigned char *cp2;
12048
12049 /* Take care of file names without directories. We need to make sure that
12050 we return consistent values to qsort since some will get confused if
12051 we return the same value when identical operands are passed in opposite
12052 orders. So if neither has a directory, return 0 and otherwise return
12053 1 or -1 depending on which one has the directory. We want the one with
12054 the directory to sort after the one without, so all no directory files
12055 are at the start (normally only the compilation unit file). */
12056 if ((s1->path == s1->fname || s2->path == s2->fname))
12057 return (s2->path == s2->fname) - (s1->path == s1->fname);
12058
12059 cp1 = (const unsigned char *) s1->path;
12060 cp2 = (const unsigned char *) s2->path;
12061
12062 while (1)
12063 {
12064 ++cp1;
12065 ++cp2;
12066 /* Reached the end of the first path? If so, handle like above,
12067 but now we want longer directory prefixes before shorter ones. */
12068 if ((cp1 == (const unsigned char *) s1->fname)
12069 || (cp2 == (const unsigned char *) s2->fname))
12070 return ((cp1 == (const unsigned char *) s1->fname)
12071 - (cp2 == (const unsigned char *) s2->fname));
12072
12073 /* Character of current path component the same? */
12074 else if (*cp1 != *cp2)
12075 return *cp1 - *cp2;
12076 }
12077 }
12078
12079 struct file_name_acquire_data
12080 {
12081 struct file_info *files;
12082 int used_files;
12083 int max_files;
12084 };
12085
12086 /* Traversal function for the hash table. */
12087
12088 int
12089 file_name_acquire (dwarf_file_data **slot, file_name_acquire_data *fnad)
12090 {
12091 struct dwarf_file_data *d = *slot;
12092 struct file_info *fi;
12093 const char *f;
12094
12095 gcc_assert (fnad->max_files >= d->emitted_number);
12096
12097 if (! d->emitted_number)
12098 return 1;
12099
12100 gcc_assert (fnad->max_files != fnad->used_files);
12101
12102 fi = fnad->files + fnad->used_files++;
12103
12104 f = remap_debug_filename (d->filename);
12105
12106 /* Skip all leading "./". */
12107 while (f[0] == '.' && IS_DIR_SEPARATOR (f[1]))
12108 f += 2;
12109
12110 /* Create a new array entry. */
12111 fi->path = f;
12112 fi->length = strlen (f);
12113 fi->file_idx = d;
12114
12115 /* Search for the file name part. */
12116 f = strrchr (f, DIR_SEPARATOR);
12117 #if defined (DIR_SEPARATOR_2)
12118 {
12119 char *g = strrchr (fi->path, DIR_SEPARATOR_2);
12120
12121 if (g != NULL)
12122 {
12123 if (f == NULL || f < g)
12124 f = g;
12125 }
12126 }
12127 #endif
12128
12129 fi->fname = f == NULL ? fi->path : f + 1;
12130 return 1;
12131 }
12132
12133 /* Helper function for output_file_names. Emit a FORM encoded
12134 string STR, with assembly comment start ENTRY_KIND and
12135 index IDX */
12136
12137 static void
12138 output_line_string (enum dwarf_form form, const char *str,
12139 const char *entry_kind, unsigned int idx)
12140 {
12141 switch (form)
12142 {
12143 case DW_FORM_string:
12144 dw2_asm_output_nstring (str, -1, "%s: %#x", entry_kind, idx);
12145 break;
12146 case DW_FORM_line_strp:
12147 if (!debug_line_str_hash)
12148 debug_line_str_hash
12149 = hash_table<indirect_string_hasher>::create_ggc (10);
12150
12151 struct indirect_string_node *node;
12152 node = find_AT_string_in_table (str, debug_line_str_hash);
12153 set_indirect_string (node);
12154 node->form = form;
12155 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
12156 debug_line_str_section, "%s: %#x: \"%s\"",
12157 entry_kind, 0, node->str);
12158 break;
12159 default:
12160 gcc_unreachable ();
12161 }
12162 }
12163
12164 /* Output the directory table and the file name table. We try to minimize
12165 the total amount of memory needed. A heuristic is used to avoid large
12166 slowdowns with many input files. */
12167
12168 static void
12169 output_file_names (void)
12170 {
12171 struct file_name_acquire_data fnad;
12172 int numfiles;
12173 struct file_info *files;
12174 struct dir_info *dirs;
12175 int *saved;
12176 int *savehere;
12177 int *backmap;
12178 int ndirs;
12179 int idx_offset;
12180 int i;
12181
12182 if (!last_emitted_file)
12183 {
12184 if (dwarf_version >= 5)
12185 {
12186 dw2_asm_output_data (1, 0, "Directory entry format count");
12187 dw2_asm_output_data_uleb128 (0, "Directories count");
12188 dw2_asm_output_data (1, 0, "File name entry format count");
12189 dw2_asm_output_data_uleb128 (0, "File names count");
12190 }
12191 else
12192 {
12193 dw2_asm_output_data (1, 0, "End directory table");
12194 dw2_asm_output_data (1, 0, "End file name table");
12195 }
12196 return;
12197 }
12198
12199 numfiles = last_emitted_file->emitted_number;
12200
12201 /* Allocate the various arrays we need. */
12202 files = XALLOCAVEC (struct file_info, numfiles);
12203 dirs = XALLOCAVEC (struct dir_info, numfiles);
12204
12205 fnad.files = files;
12206 fnad.used_files = 0;
12207 fnad.max_files = numfiles;
12208 file_table->traverse<file_name_acquire_data *, file_name_acquire> (&fnad);
12209 gcc_assert (fnad.used_files == fnad.max_files);
12210
12211 qsort (files, numfiles, sizeof (files[0]), file_info_cmp);
12212
12213 /* Find all the different directories used. */
12214 dirs[0].path = files[0].path;
12215 dirs[0].length = files[0].fname - files[0].path;
12216 dirs[0].prefix = -1;
12217 dirs[0].count = 1;
12218 dirs[0].dir_idx = 0;
12219 files[0].dir_idx = 0;
12220 ndirs = 1;
12221
12222 for (i = 1; i < numfiles; i++)
12223 if (files[i].fname - files[i].path == dirs[ndirs - 1].length
12224 && memcmp (dirs[ndirs - 1].path, files[i].path,
12225 dirs[ndirs - 1].length) == 0)
12226 {
12227 /* Same directory as last entry. */
12228 files[i].dir_idx = ndirs - 1;
12229 ++dirs[ndirs - 1].count;
12230 }
12231 else
12232 {
12233 int j;
12234
12235 /* This is a new directory. */
12236 dirs[ndirs].path = files[i].path;
12237 dirs[ndirs].length = files[i].fname - files[i].path;
12238 dirs[ndirs].count = 1;
12239 dirs[ndirs].dir_idx = ndirs;
12240 files[i].dir_idx = ndirs;
12241
12242 /* Search for a prefix. */
12243 dirs[ndirs].prefix = -1;
12244 for (j = 0; j < ndirs; j++)
12245 if (dirs[j].length < dirs[ndirs].length
12246 && dirs[j].length > 1
12247 && (dirs[ndirs].prefix == -1
12248 || dirs[j].length > dirs[dirs[ndirs].prefix].length)
12249 && memcmp (dirs[j].path, dirs[ndirs].path, dirs[j].length) == 0)
12250 dirs[ndirs].prefix = j;
12251
12252 ++ndirs;
12253 }
12254
12255 /* Now to the actual work. We have to find a subset of the directories which
12256 allow expressing the file name using references to the directory table
12257 with the least amount of characters. We do not do an exhaustive search
12258 where we would have to check out every combination of every single
12259 possible prefix. Instead we use a heuristic which provides nearly optimal
12260 results in most cases and never is much off. */
12261 saved = XALLOCAVEC (int, ndirs);
12262 savehere = XALLOCAVEC (int, ndirs);
12263
12264 memset (saved, '\0', ndirs * sizeof (saved[0]));
12265 for (i = 0; i < ndirs; i++)
12266 {
12267 int j;
12268 int total;
12269
12270 /* We can always save some space for the current directory. But this
12271 does not mean it will be enough to justify adding the directory. */
12272 savehere[i] = dirs[i].length;
12273 total = (savehere[i] - saved[i]) * dirs[i].count;
12274
12275 for (j = i + 1; j < ndirs; j++)
12276 {
12277 savehere[j] = 0;
12278 if (saved[j] < dirs[i].length)
12279 {
12280 /* Determine whether the dirs[i] path is a prefix of the
12281 dirs[j] path. */
12282 int k;
12283
12284 k = dirs[j].prefix;
12285 while (k != -1 && k != (int) i)
12286 k = dirs[k].prefix;
12287
12288 if (k == (int) i)
12289 {
12290 /* Yes it is. We can possibly save some memory by
12291 writing the filenames in dirs[j] relative to
12292 dirs[i]. */
12293 savehere[j] = dirs[i].length;
12294 total += (savehere[j] - saved[j]) * dirs[j].count;
12295 }
12296 }
12297 }
12298
12299 /* Check whether we can save enough to justify adding the dirs[i]
12300 directory. */
12301 if (total > dirs[i].length + 1)
12302 {
12303 /* It's worthwhile adding. */
12304 for (j = i; j < ndirs; j++)
12305 if (savehere[j] > 0)
12306 {
12307 /* Remember how much we saved for this directory so far. */
12308 saved[j] = savehere[j];
12309
12310 /* Remember the prefix directory. */
12311 dirs[j].dir_idx = i;
12312 }
12313 }
12314 }
12315
12316 /* Emit the directory name table. */
12317 idx_offset = dirs[0].length > 0 ? 1 : 0;
12318 enum dwarf_form str_form = DW_FORM_string;
12319 enum dwarf_form idx_form = DW_FORM_udata;
12320 if (dwarf_version >= 5)
12321 {
12322 const char *comp_dir = comp_dir_string ();
12323 if (comp_dir == NULL)
12324 comp_dir = "";
12325 dw2_asm_output_data (1, 1, "Directory entry format count");
12326 if (DWARF5_USE_DEBUG_LINE_STR)
12327 str_form = DW_FORM_line_strp;
12328 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12329 dw2_asm_output_data_uleb128 (str_form, "%s",
12330 get_DW_FORM_name (str_form));
12331 dw2_asm_output_data_uleb128 (ndirs + idx_offset, "Directories count");
12332 if (str_form == DW_FORM_string)
12333 {
12334 dw2_asm_output_nstring (comp_dir, -1, "Directory Entry: %#x", 0);
12335 for (i = 1 - idx_offset; i < ndirs; i++)
12336 dw2_asm_output_nstring (dirs[i].path,
12337 dirs[i].length
12338 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12339 "Directory Entry: %#x", i + idx_offset);
12340 }
12341 else
12342 {
12343 output_line_string (str_form, comp_dir, "Directory Entry", 0);
12344 for (i = 1 - idx_offset; i < ndirs; i++)
12345 {
12346 const char *str
12347 = ggc_alloc_string (dirs[i].path,
12348 dirs[i].length
12349 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR);
12350 output_line_string (str_form, str, "Directory Entry",
12351 (unsigned) i + idx_offset);
12352 }
12353 }
12354 }
12355 else
12356 {
12357 for (i = 1 - idx_offset; i < ndirs; i++)
12358 dw2_asm_output_nstring (dirs[i].path,
12359 dirs[i].length
12360 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12361 "Directory Entry: %#x", i + idx_offset);
12362
12363 dw2_asm_output_data (1, 0, "End directory table");
12364 }
12365
12366 /* We have to emit them in the order of emitted_number since that's
12367 used in the debug info generation. To do this efficiently we
12368 generate a back-mapping of the indices first. */
12369 backmap = XALLOCAVEC (int, numfiles);
12370 for (i = 0; i < numfiles; i++)
12371 backmap[files[i].file_idx->emitted_number - 1] = i;
12372
12373 if (dwarf_version >= 5)
12374 {
12375 const char *filename0 = get_AT_string (comp_unit_die (), DW_AT_name);
12376 if (filename0 == NULL)
12377 filename0 = "";
12378 /* DW_LNCT_directory_index can use DW_FORM_udata, DW_FORM_data1 and
12379 DW_FORM_data2. Choose one based on the number of directories
12380 and how much space would they occupy in each encoding.
12381 If we have at most 256 directories, all indexes fit into
12382 a single byte, so DW_FORM_data1 is most compact (if there
12383 are at most 128 directories, DW_FORM_udata would be as
12384 compact as that, but not shorter and slower to decode). */
12385 if (ndirs + idx_offset <= 256)
12386 idx_form = DW_FORM_data1;
12387 /* If there are more than 65536 directories, we have to use
12388 DW_FORM_udata, DW_FORM_data2 can't refer to them.
12389 Otherwise, compute what space would occupy if all the indexes
12390 used DW_FORM_udata - sum - and compare that to how large would
12391 be DW_FORM_data2 encoding, and pick the more efficient one. */
12392 else if (ndirs + idx_offset <= 65536)
12393 {
12394 unsigned HOST_WIDE_INT sum = 1;
12395 for (i = 0; i < numfiles; i++)
12396 {
12397 int file_idx = backmap[i];
12398 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12399 sum += size_of_uleb128 (dir_idx);
12400 }
12401 if (sum >= HOST_WIDE_INT_UC (2) * (numfiles + 1))
12402 idx_form = DW_FORM_data2;
12403 }
12404 #ifdef VMS_DEBUGGING_INFO
12405 dw2_asm_output_data (1, 4, "File name entry format count");
12406 #else
12407 dw2_asm_output_data (1, 2, "File name entry format count");
12408 #endif
12409 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12410 dw2_asm_output_data_uleb128 (str_form, "%s",
12411 get_DW_FORM_name (str_form));
12412 dw2_asm_output_data_uleb128 (DW_LNCT_directory_index,
12413 "DW_LNCT_directory_index");
12414 dw2_asm_output_data_uleb128 (idx_form, "%s",
12415 get_DW_FORM_name (idx_form));
12416 #ifdef VMS_DEBUGGING_INFO
12417 dw2_asm_output_data_uleb128 (DW_LNCT_timestamp, "DW_LNCT_timestamp");
12418 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12419 dw2_asm_output_data_uleb128 (DW_LNCT_size, "DW_LNCT_size");
12420 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12421 #endif
12422 dw2_asm_output_data_uleb128 (numfiles + 1, "File names count");
12423
12424 output_line_string (str_form, filename0, "File Entry", 0);
12425
12426 /* Include directory index. */
12427 if (idx_form != DW_FORM_udata)
12428 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12429 0, NULL);
12430 else
12431 dw2_asm_output_data_uleb128 (0, NULL);
12432
12433 #ifdef VMS_DEBUGGING_INFO
12434 dw2_asm_output_data_uleb128 (0, NULL);
12435 dw2_asm_output_data_uleb128 (0, NULL);
12436 #endif
12437 }
12438
12439 /* Now write all the file names. */
12440 for (i = 0; i < numfiles; i++)
12441 {
12442 int file_idx = backmap[i];
12443 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12444
12445 #ifdef VMS_DEBUGGING_INFO
12446 #define MAX_VMS_VERSION_LEN 6 /* ";32768" */
12447
12448 /* Setting these fields can lead to debugger miscomparisons,
12449 but VMS Debug requires them to be set correctly. */
12450
12451 int ver;
12452 long long cdt;
12453 long siz;
12454 int maxfilelen = (strlen (files[file_idx].path)
12455 + dirs[dir_idx].length
12456 + MAX_VMS_VERSION_LEN + 1);
12457 char *filebuf = XALLOCAVEC (char, maxfilelen);
12458
12459 vms_file_stats_name (files[file_idx].path, 0, 0, 0, &ver);
12460 snprintf (filebuf, maxfilelen, "%s;%d",
12461 files[file_idx].path + dirs[dir_idx].length, ver);
12462
12463 output_line_string (str_form, filebuf, "File Entry", (unsigned) i + 1);
12464
12465 /* Include directory index. */
12466 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12467 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12468 dir_idx + idx_offset, NULL);
12469 else
12470 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12471
12472 /* Modification time. */
12473 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12474 &cdt, 0, 0, 0) == 0)
12475 ? cdt : 0, NULL);
12476
12477 /* File length in bytes. */
12478 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12479 0, &siz, 0, 0) == 0)
12480 ? siz : 0, NULL);
12481 #else
12482 output_line_string (str_form,
12483 files[file_idx].path + dirs[dir_idx].length,
12484 "File Entry", (unsigned) i + 1);
12485
12486 /* Include directory index. */
12487 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12488 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12489 dir_idx + idx_offset, NULL);
12490 else
12491 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12492
12493 if (dwarf_version >= 5)
12494 continue;
12495
12496 /* Modification time. */
12497 dw2_asm_output_data_uleb128 (0, NULL);
12498
12499 /* File length in bytes. */
12500 dw2_asm_output_data_uleb128 (0, NULL);
12501 #endif /* VMS_DEBUGGING_INFO */
12502 }
12503
12504 if (dwarf_version < 5)
12505 dw2_asm_output_data (1, 0, "End file name table");
12506 }
12507
12508
12509 /* Output one line number table into the .debug_line section. */
12510
12511 static void
12512 output_one_line_info_table (dw_line_info_table *table)
12513 {
12514 char line_label[MAX_ARTIFICIAL_LABEL_BYTES];
12515 unsigned int current_line = 1;
12516 bool current_is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
12517 dw_line_info_entry *ent, *prev_addr;
12518 size_t i;
12519 unsigned int view;
12520
12521 view = 0;
12522
12523 FOR_EACH_VEC_SAFE_ELT (table->entries, i, ent)
12524 {
12525 switch (ent->opcode)
12526 {
12527 case LI_set_address:
12528 /* ??? Unfortunately, we have little choice here currently, and
12529 must always use the most general form. GCC does not know the
12530 address delta itself, so we can't use DW_LNS_advance_pc. Many
12531 ports do have length attributes which will give an upper bound
12532 on the address range. We could perhaps use length attributes
12533 to determine when it is safe to use DW_LNS_fixed_advance_pc. */
12534 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12535
12536 view = 0;
12537
12538 /* This can handle any delta. This takes
12539 4+DWARF2_ADDR_SIZE bytes. */
12540 dw2_asm_output_data (1, 0, "set address %s%s", line_label,
12541 debug_variable_location_views
12542 ? ", reset view to 0" : "");
12543 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12544 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12545 dw2_asm_output_addr (DWARF2_ADDR_SIZE, line_label, NULL);
12546
12547 prev_addr = ent;
12548 break;
12549
12550 case LI_adv_address:
12551 {
12552 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12553 char prev_label[MAX_ARTIFICIAL_LABEL_BYTES];
12554 ASM_GENERATE_INTERNAL_LABEL (prev_label, LINE_CODE_LABEL, prev_addr->val);
12555
12556 view++;
12557
12558 dw2_asm_output_data (1, DW_LNS_fixed_advance_pc, "fixed advance PC, increment view to %i", view);
12559 dw2_asm_output_delta (2, line_label, prev_label,
12560 "from %s to %s", prev_label, line_label);
12561
12562 prev_addr = ent;
12563 break;
12564 }
12565
12566 case LI_set_line:
12567 if (ent->val == current_line)
12568 {
12569 /* We still need to start a new row, so output a copy insn. */
12570 dw2_asm_output_data (1, DW_LNS_copy,
12571 "copy line %u", current_line);
12572 }
12573 else
12574 {
12575 int line_offset = ent->val - current_line;
12576 int line_delta = line_offset - DWARF_LINE_BASE;
12577
12578 current_line = ent->val;
12579 if (line_delta >= 0 && line_delta < (DWARF_LINE_RANGE - 1))
12580 {
12581 /* This can handle deltas from -10 to 234, using the current
12582 definitions of DWARF_LINE_BASE and DWARF_LINE_RANGE.
12583 This takes 1 byte. */
12584 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE + line_delta,
12585 "line %u", current_line);
12586 }
12587 else
12588 {
12589 /* This can handle any delta. This takes at least 4 bytes,
12590 depending on the value being encoded. */
12591 dw2_asm_output_data (1, DW_LNS_advance_line,
12592 "advance to line %u", current_line);
12593 dw2_asm_output_data_sleb128 (line_offset, NULL);
12594 dw2_asm_output_data (1, DW_LNS_copy, NULL);
12595 }
12596 }
12597 break;
12598
12599 case LI_set_file:
12600 dw2_asm_output_data (1, DW_LNS_set_file, "set file %u", ent->val);
12601 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12602 break;
12603
12604 case LI_set_column:
12605 dw2_asm_output_data (1, DW_LNS_set_column, "column %u", ent->val);
12606 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12607 break;
12608
12609 case LI_negate_stmt:
12610 current_is_stmt = !current_is_stmt;
12611 dw2_asm_output_data (1, DW_LNS_negate_stmt,
12612 "is_stmt %d", current_is_stmt);
12613 break;
12614
12615 case LI_set_prologue_end:
12616 dw2_asm_output_data (1, DW_LNS_set_prologue_end,
12617 "set prologue end");
12618 break;
12619
12620 case LI_set_epilogue_begin:
12621 dw2_asm_output_data (1, DW_LNS_set_epilogue_begin,
12622 "set epilogue begin");
12623 break;
12624
12625 case LI_set_discriminator:
12626 dw2_asm_output_data (1, 0, "discriminator %u", ent->val);
12627 dw2_asm_output_data_uleb128 (1 + size_of_uleb128 (ent->val), NULL);
12628 dw2_asm_output_data (1, DW_LNE_set_discriminator, NULL);
12629 dw2_asm_output_data_uleb128 (ent->val, NULL);
12630 break;
12631 }
12632 }
12633
12634 /* Emit debug info for the address of the end of the table. */
12635 dw2_asm_output_data (1, 0, "set address %s", table->end_label);
12636 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12637 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12638 dw2_asm_output_addr (DWARF2_ADDR_SIZE, table->end_label, NULL);
12639
12640 dw2_asm_output_data (1, 0, "end sequence");
12641 dw2_asm_output_data_uleb128 (1, NULL);
12642 dw2_asm_output_data (1, DW_LNE_end_sequence, NULL);
12643 }
12644
12645 /* Output the source line number correspondence information. This
12646 information goes into the .debug_line section. */
12647
12648 static void
12649 output_line_info (bool prologue_only)
12650 {
12651 static unsigned int generation;
12652 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
12653 char p1[MAX_ARTIFICIAL_LABEL_BYTES], p2[MAX_ARTIFICIAL_LABEL_BYTES];
12654 bool saw_one = false;
12655 int opc;
12656
12657 ASM_GENERATE_INTERNAL_LABEL (l1, LINE_NUMBER_BEGIN_LABEL, generation);
12658 ASM_GENERATE_INTERNAL_LABEL (l2, LINE_NUMBER_END_LABEL, generation);
12659 ASM_GENERATE_INTERNAL_LABEL (p1, LN_PROLOG_AS_LABEL, generation);
12660 ASM_GENERATE_INTERNAL_LABEL (p2, LN_PROLOG_END_LABEL, generation++);
12661
12662 if (!XCOFF_DEBUGGING_INFO)
12663 {
12664 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
12665 dw2_asm_output_data (4, 0xffffffff,
12666 "Initial length escape value indicating 64-bit DWARF extension");
12667 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
12668 "Length of Source Line Info");
12669 }
12670
12671 ASM_OUTPUT_LABEL (asm_out_file, l1);
12672
12673 output_dwarf_version ();
12674 if (dwarf_version >= 5)
12675 {
12676 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
12677 dw2_asm_output_data (1, 0, "Segment Size");
12678 }
12679 dw2_asm_output_delta (DWARF_OFFSET_SIZE, p2, p1, "Prolog Length");
12680 ASM_OUTPUT_LABEL (asm_out_file, p1);
12681
12682 /* Define the architecture-dependent minimum instruction length (in bytes).
12683 In this implementation of DWARF, this field is used for information
12684 purposes only. Since GCC generates assembly language, we have no
12685 a priori knowledge of how many instruction bytes are generated for each
12686 source line, and therefore can use only the DW_LNE_set_address and
12687 DW_LNS_fixed_advance_pc line information commands. Accordingly, we fix
12688 this as '1', which is "correct enough" for all architectures,
12689 and don't let the target override. */
12690 dw2_asm_output_data (1, 1, "Minimum Instruction Length");
12691
12692 if (dwarf_version >= 4)
12693 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN,
12694 "Maximum Operations Per Instruction");
12695 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_IS_STMT_START,
12696 "Default is_stmt_start flag");
12697 dw2_asm_output_data (1, DWARF_LINE_BASE,
12698 "Line Base Value (Special Opcodes)");
12699 dw2_asm_output_data (1, DWARF_LINE_RANGE,
12700 "Line Range Value (Special Opcodes)");
12701 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE,
12702 "Special Opcode Base");
12703
12704 for (opc = 1; opc < DWARF_LINE_OPCODE_BASE; opc++)
12705 {
12706 int n_op_args;
12707 switch (opc)
12708 {
12709 case DW_LNS_advance_pc:
12710 case DW_LNS_advance_line:
12711 case DW_LNS_set_file:
12712 case DW_LNS_set_column:
12713 case DW_LNS_fixed_advance_pc:
12714 case DW_LNS_set_isa:
12715 n_op_args = 1;
12716 break;
12717 default:
12718 n_op_args = 0;
12719 break;
12720 }
12721
12722 dw2_asm_output_data (1, n_op_args, "opcode: %#x has %d args",
12723 opc, n_op_args);
12724 }
12725
12726 /* Write out the information about the files we use. */
12727 output_file_names ();
12728 ASM_OUTPUT_LABEL (asm_out_file, p2);
12729 if (prologue_only)
12730 {
12731 /* Output the marker for the end of the line number info. */
12732 ASM_OUTPUT_LABEL (asm_out_file, l2);
12733 return;
12734 }
12735
12736 if (separate_line_info)
12737 {
12738 dw_line_info_table *table;
12739 size_t i;
12740
12741 FOR_EACH_VEC_ELT (*separate_line_info, i, table)
12742 if (table->in_use)
12743 {
12744 output_one_line_info_table (table);
12745 saw_one = true;
12746 }
12747 }
12748 if (cold_text_section_line_info && cold_text_section_line_info->in_use)
12749 {
12750 output_one_line_info_table (cold_text_section_line_info);
12751 saw_one = true;
12752 }
12753
12754 /* ??? Some Darwin linkers crash on a .debug_line section with no
12755 sequences. Further, merely a DW_LNE_end_sequence entry is not
12756 sufficient -- the address column must also be initialized.
12757 Make sure to output at least one set_address/end_sequence pair,
12758 choosing .text since that section is always present. */
12759 if (text_section_line_info->in_use || !saw_one)
12760 output_one_line_info_table (text_section_line_info);
12761
12762 /* Output the marker for the end of the line number info. */
12763 ASM_OUTPUT_LABEL (asm_out_file, l2);
12764 }
12765 \f
12766 /* Return true if DW_AT_endianity should be emitted according to REVERSE. */
12767
12768 static inline bool
12769 need_endianity_attribute_p (bool reverse)
12770 {
12771 return reverse && (dwarf_version >= 3 || !dwarf_strict);
12772 }
12773
12774 /* Given a pointer to a tree node for some base type, return a pointer to
12775 a DIE that describes the given type. REVERSE is true if the type is
12776 to be interpreted in the reverse storage order wrt the target order.
12777
12778 This routine must only be called for GCC type nodes that correspond to
12779 Dwarf base (fundamental) types. */
12780
12781 static dw_die_ref
12782 base_type_die (tree type, bool reverse)
12783 {
12784 dw_die_ref base_type_result;
12785 enum dwarf_type encoding;
12786 bool fpt_used = false;
12787 struct fixed_point_type_info fpt_info;
12788 tree type_bias = NULL_TREE;
12789
12790 /* If this is a subtype that should not be emitted as a subrange type,
12791 use the base type. See subrange_type_for_debug_p. */
12792 if (TREE_CODE (type) == INTEGER_TYPE && TREE_TYPE (type) != NULL_TREE)
12793 type = TREE_TYPE (type);
12794
12795 switch (TREE_CODE (type))
12796 {
12797 case INTEGER_TYPE:
12798 if ((dwarf_version >= 4 || !dwarf_strict)
12799 && TYPE_NAME (type)
12800 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
12801 && DECL_IS_BUILTIN (TYPE_NAME (type))
12802 && DECL_NAME (TYPE_NAME (type)))
12803 {
12804 const char *name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
12805 if (strcmp (name, "char16_t") == 0
12806 || strcmp (name, "char32_t") == 0)
12807 {
12808 encoding = DW_ATE_UTF;
12809 break;
12810 }
12811 }
12812 if ((dwarf_version >= 3 || !dwarf_strict)
12813 && lang_hooks.types.get_fixed_point_type_info)
12814 {
12815 memset (&fpt_info, 0, sizeof (fpt_info));
12816 if (lang_hooks.types.get_fixed_point_type_info (type, &fpt_info))
12817 {
12818 fpt_used = true;
12819 encoding = ((TYPE_UNSIGNED (type))
12820 ? DW_ATE_unsigned_fixed
12821 : DW_ATE_signed_fixed);
12822 break;
12823 }
12824 }
12825 if (TYPE_STRING_FLAG (type))
12826 {
12827 if (TYPE_UNSIGNED (type))
12828 encoding = DW_ATE_unsigned_char;
12829 else
12830 encoding = DW_ATE_signed_char;
12831 }
12832 else if (TYPE_UNSIGNED (type))
12833 encoding = DW_ATE_unsigned;
12834 else
12835 encoding = DW_ATE_signed;
12836
12837 if (!dwarf_strict
12838 && lang_hooks.types.get_type_bias)
12839 type_bias = lang_hooks.types.get_type_bias (type);
12840 break;
12841
12842 case REAL_TYPE:
12843 if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type)))
12844 {
12845 if (dwarf_version >= 3 || !dwarf_strict)
12846 encoding = DW_ATE_decimal_float;
12847 else
12848 encoding = DW_ATE_lo_user;
12849 }
12850 else
12851 encoding = DW_ATE_float;
12852 break;
12853
12854 case FIXED_POINT_TYPE:
12855 if (!(dwarf_version >= 3 || !dwarf_strict))
12856 encoding = DW_ATE_lo_user;
12857 else if (TYPE_UNSIGNED (type))
12858 encoding = DW_ATE_unsigned_fixed;
12859 else
12860 encoding = DW_ATE_signed_fixed;
12861 break;
12862
12863 /* Dwarf2 doesn't know anything about complex ints, so use
12864 a user defined type for it. */
12865 case COMPLEX_TYPE:
12866 if (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE)
12867 encoding = DW_ATE_complex_float;
12868 else
12869 encoding = DW_ATE_lo_user;
12870 break;
12871
12872 case BOOLEAN_TYPE:
12873 /* GNU FORTRAN/Ada/C++ BOOLEAN type. */
12874 encoding = DW_ATE_boolean;
12875 break;
12876
12877 default:
12878 /* No other TREE_CODEs are Dwarf fundamental types. */
12879 gcc_unreachable ();
12880 }
12881
12882 base_type_result = new_die_raw (DW_TAG_base_type);
12883
12884 add_AT_unsigned (base_type_result, DW_AT_byte_size,
12885 int_size_in_bytes (type));
12886 add_AT_unsigned (base_type_result, DW_AT_encoding, encoding);
12887
12888 if (need_endianity_attribute_p (reverse))
12889 add_AT_unsigned (base_type_result, DW_AT_endianity,
12890 BYTES_BIG_ENDIAN ? DW_END_little : DW_END_big);
12891
12892 add_alignment_attribute (base_type_result, type);
12893
12894 if (fpt_used)
12895 {
12896 switch (fpt_info.scale_factor_kind)
12897 {
12898 case fixed_point_scale_factor_binary:
12899 add_AT_int (base_type_result, DW_AT_binary_scale,
12900 fpt_info.scale_factor.binary);
12901 break;
12902
12903 case fixed_point_scale_factor_decimal:
12904 add_AT_int (base_type_result, DW_AT_decimal_scale,
12905 fpt_info.scale_factor.decimal);
12906 break;
12907
12908 case fixed_point_scale_factor_arbitrary:
12909 /* Arbitrary scale factors cannot be described in standard DWARF,
12910 yet. */
12911 if (!dwarf_strict)
12912 {
12913 /* Describe the scale factor as a rational constant. */
12914 const dw_die_ref scale_factor
12915 = new_die (DW_TAG_constant, comp_unit_die (), type);
12916
12917 add_AT_unsigned (scale_factor, DW_AT_GNU_numerator,
12918 fpt_info.scale_factor.arbitrary.numerator);
12919 add_AT_int (scale_factor, DW_AT_GNU_denominator,
12920 fpt_info.scale_factor.arbitrary.denominator);
12921
12922 add_AT_die_ref (base_type_result, DW_AT_small, scale_factor);
12923 }
12924 break;
12925
12926 default:
12927 gcc_unreachable ();
12928 }
12929 }
12930
12931 if (type_bias)
12932 add_scalar_info (base_type_result, DW_AT_GNU_bias, type_bias,
12933 dw_scalar_form_constant
12934 | dw_scalar_form_exprloc
12935 | dw_scalar_form_reference,
12936 NULL);
12937
12938 return base_type_result;
12939 }
12940
12941 /* A C++ function with deduced return type can have a TEMPLATE_TYPE_PARM
12942 named 'auto' in its type: return true for it, false otherwise. */
12943
12944 static inline bool
12945 is_cxx_auto (tree type)
12946 {
12947 if (is_cxx ())
12948 {
12949 tree name = TYPE_IDENTIFIER (type);
12950 if (name == get_identifier ("auto")
12951 || name == get_identifier ("decltype(auto)"))
12952 return true;
12953 }
12954 return false;
12955 }
12956
12957 /* Given a pointer to an arbitrary ..._TYPE tree node, return nonzero if the
12958 given input type is a Dwarf "fundamental" type. Otherwise return null. */
12959
12960 static inline int
12961 is_base_type (tree type)
12962 {
12963 switch (TREE_CODE (type))
12964 {
12965 case INTEGER_TYPE:
12966 case REAL_TYPE:
12967 case FIXED_POINT_TYPE:
12968 case COMPLEX_TYPE:
12969 case BOOLEAN_TYPE:
12970 return 1;
12971
12972 case VOID_TYPE:
12973 case ARRAY_TYPE:
12974 case RECORD_TYPE:
12975 case UNION_TYPE:
12976 case QUAL_UNION_TYPE:
12977 case ENUMERAL_TYPE:
12978 case FUNCTION_TYPE:
12979 case METHOD_TYPE:
12980 case POINTER_TYPE:
12981 case REFERENCE_TYPE:
12982 case NULLPTR_TYPE:
12983 case OFFSET_TYPE:
12984 case LANG_TYPE:
12985 case VECTOR_TYPE:
12986 return 0;
12987
12988 default:
12989 if (is_cxx_auto (type))
12990 return 0;
12991 gcc_unreachable ();
12992 }
12993
12994 return 0;
12995 }
12996
12997 /* Given a pointer to a tree node, assumed to be some kind of a ..._TYPE
12998 node, return the size in bits for the type if it is a constant, or else
12999 return the alignment for the type if the type's size is not constant, or
13000 else return BITS_PER_WORD if the type actually turns out to be an
13001 ERROR_MARK node. */
13002
13003 static inline unsigned HOST_WIDE_INT
13004 simple_type_size_in_bits (const_tree type)
13005 {
13006 if (TREE_CODE (type) == ERROR_MARK)
13007 return BITS_PER_WORD;
13008 else if (TYPE_SIZE (type) == NULL_TREE)
13009 return 0;
13010 else if (tree_fits_uhwi_p (TYPE_SIZE (type)))
13011 return tree_to_uhwi (TYPE_SIZE (type));
13012 else
13013 return TYPE_ALIGN (type);
13014 }
13015
13016 /* Similarly, but return an offset_int instead of UHWI. */
13017
13018 static inline offset_int
13019 offset_int_type_size_in_bits (const_tree type)
13020 {
13021 if (TREE_CODE (type) == ERROR_MARK)
13022 return BITS_PER_WORD;
13023 else if (TYPE_SIZE (type) == NULL_TREE)
13024 return 0;
13025 else if (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
13026 return wi::to_offset (TYPE_SIZE (type));
13027 else
13028 return TYPE_ALIGN (type);
13029 }
13030
13031 /* Given a pointer to a tree node for a subrange type, return a pointer
13032 to a DIE that describes the given type. */
13033
13034 static dw_die_ref
13035 subrange_type_die (tree type, tree low, tree high, tree bias,
13036 dw_die_ref context_die)
13037 {
13038 dw_die_ref subrange_die;
13039 const HOST_WIDE_INT size_in_bytes = int_size_in_bytes (type);
13040
13041 if (context_die == NULL)
13042 context_die = comp_unit_die ();
13043
13044 subrange_die = new_die (DW_TAG_subrange_type, context_die, type);
13045
13046 if (int_size_in_bytes (TREE_TYPE (type)) != size_in_bytes)
13047 {
13048 /* The size of the subrange type and its base type do not match,
13049 so we need to generate a size attribute for the subrange type. */
13050 add_AT_unsigned (subrange_die, DW_AT_byte_size, size_in_bytes);
13051 }
13052
13053 add_alignment_attribute (subrange_die, type);
13054
13055 if (low)
13056 add_bound_info (subrange_die, DW_AT_lower_bound, low, NULL);
13057 if (high)
13058 add_bound_info (subrange_die, DW_AT_upper_bound, high, NULL);
13059 if (bias && !dwarf_strict)
13060 add_scalar_info (subrange_die, DW_AT_GNU_bias, bias,
13061 dw_scalar_form_constant
13062 | dw_scalar_form_exprloc
13063 | dw_scalar_form_reference,
13064 NULL);
13065
13066 return subrange_die;
13067 }
13068
13069 /* Returns the (const and/or volatile) cv_qualifiers associated with
13070 the decl node. This will normally be augmented with the
13071 cv_qualifiers of the underlying type in add_type_attribute. */
13072
13073 static int
13074 decl_quals (const_tree decl)
13075 {
13076 return ((TREE_READONLY (decl)
13077 /* The C++ front-end correctly marks reference-typed
13078 variables as readonly, but from a language (and debug
13079 info) standpoint they are not const-qualified. */
13080 && TREE_CODE (TREE_TYPE (decl)) != REFERENCE_TYPE
13081 ? TYPE_QUAL_CONST : TYPE_UNQUALIFIED)
13082 | (TREE_THIS_VOLATILE (decl)
13083 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED));
13084 }
13085
13086 /* Determine the TYPE whose qualifiers match the largest strict subset
13087 of the given TYPE_QUALS, and return its qualifiers. Ignore all
13088 qualifiers outside QUAL_MASK. */
13089
13090 static int
13091 get_nearest_type_subqualifiers (tree type, int type_quals, int qual_mask)
13092 {
13093 tree t;
13094 int best_rank = 0, best_qual = 0, max_rank;
13095
13096 type_quals &= qual_mask;
13097 max_rank = popcount_hwi (type_quals) - 1;
13098
13099 for (t = TYPE_MAIN_VARIANT (type); t && best_rank < max_rank;
13100 t = TYPE_NEXT_VARIANT (t))
13101 {
13102 int q = TYPE_QUALS (t) & qual_mask;
13103
13104 if ((q & type_quals) == q && q != type_quals
13105 && check_base_type (t, type))
13106 {
13107 int rank = popcount_hwi (q);
13108
13109 if (rank > best_rank)
13110 {
13111 best_rank = rank;
13112 best_qual = q;
13113 }
13114 }
13115 }
13116
13117 return best_qual;
13118 }
13119
13120 struct dwarf_qual_info_t { int q; enum dwarf_tag t; };
13121 static const dwarf_qual_info_t dwarf_qual_info[] =
13122 {
13123 { TYPE_QUAL_CONST, DW_TAG_const_type },
13124 { TYPE_QUAL_VOLATILE, DW_TAG_volatile_type },
13125 { TYPE_QUAL_RESTRICT, DW_TAG_restrict_type },
13126 { TYPE_QUAL_ATOMIC, DW_TAG_atomic_type }
13127 };
13128 static const unsigned int dwarf_qual_info_size
13129 = sizeof (dwarf_qual_info) / sizeof (dwarf_qual_info[0]);
13130
13131 /* If DIE is a qualified DIE of some base DIE with the same parent,
13132 return the base DIE, otherwise return NULL. Set MASK to the
13133 qualifiers added compared to the returned DIE. */
13134
13135 static dw_die_ref
13136 qualified_die_p (dw_die_ref die, int *mask, unsigned int depth)
13137 {
13138 unsigned int i;
13139 for (i = 0; i < dwarf_qual_info_size; i++)
13140 if (die->die_tag == dwarf_qual_info[i].t)
13141 break;
13142 if (i == dwarf_qual_info_size)
13143 return NULL;
13144 if (vec_safe_length (die->die_attr) != 1)
13145 return NULL;
13146 dw_die_ref type = get_AT_ref (die, DW_AT_type);
13147 if (type == NULL || type->die_parent != die->die_parent)
13148 return NULL;
13149 *mask |= dwarf_qual_info[i].q;
13150 if (depth)
13151 {
13152 dw_die_ref ret = qualified_die_p (type, mask, depth - 1);
13153 if (ret)
13154 return ret;
13155 }
13156 return type;
13157 }
13158
13159 /* Given a pointer to an arbitrary ..._TYPE tree node, return a debugging
13160 entry that chains the modifiers specified by CV_QUALS in front of the
13161 given type. REVERSE is true if the type is to be interpreted in the
13162 reverse storage order wrt the target order. */
13163
13164 static dw_die_ref
13165 modified_type_die (tree type, int cv_quals, bool reverse,
13166 dw_die_ref context_die)
13167 {
13168 enum tree_code code = TREE_CODE (type);
13169 dw_die_ref mod_type_die;
13170 dw_die_ref sub_die = NULL;
13171 tree item_type = NULL;
13172 tree qualified_type;
13173 tree name, low, high;
13174 dw_die_ref mod_scope;
13175 /* Only these cv-qualifiers are currently handled. */
13176 const int cv_qual_mask = (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE
13177 | TYPE_QUAL_RESTRICT | TYPE_QUAL_ATOMIC |
13178 ENCODE_QUAL_ADDR_SPACE(~0U));
13179 const bool reverse_base_type
13180 = need_endianity_attribute_p (reverse) && is_base_type (type);
13181
13182 if (code == ERROR_MARK)
13183 return NULL;
13184
13185 if (lang_hooks.types.get_debug_type)
13186 {
13187 tree debug_type = lang_hooks.types.get_debug_type (type);
13188
13189 if (debug_type != NULL_TREE && debug_type != type)
13190 return modified_type_die (debug_type, cv_quals, reverse, context_die);
13191 }
13192
13193 cv_quals &= cv_qual_mask;
13194
13195 /* Don't emit DW_TAG_restrict_type for DWARFv2, since it is a type
13196 tag modifier (and not an attribute) old consumers won't be able
13197 to handle it. */
13198 if (dwarf_version < 3)
13199 cv_quals &= ~TYPE_QUAL_RESTRICT;
13200
13201 /* Likewise for DW_TAG_atomic_type for DWARFv5. */
13202 if (dwarf_version < 5)
13203 cv_quals &= ~TYPE_QUAL_ATOMIC;
13204
13205 /* See if we already have the appropriately qualified variant of
13206 this type. */
13207 qualified_type = get_qualified_type (type, cv_quals);
13208
13209 if (qualified_type == sizetype)
13210 {
13211 /* Try not to expose the internal sizetype type's name. */
13212 if (TYPE_NAME (qualified_type)
13213 && TREE_CODE (TYPE_NAME (qualified_type)) == TYPE_DECL)
13214 {
13215 tree t = TREE_TYPE (TYPE_NAME (qualified_type));
13216
13217 gcc_checking_assert (TREE_CODE (t) == INTEGER_TYPE
13218 && (TYPE_PRECISION (t)
13219 == TYPE_PRECISION (qualified_type))
13220 && (TYPE_UNSIGNED (t)
13221 == TYPE_UNSIGNED (qualified_type)));
13222 qualified_type = t;
13223 }
13224 else if (qualified_type == sizetype
13225 && TREE_CODE (sizetype) == TREE_CODE (size_type_node)
13226 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (size_type_node)
13227 && TYPE_UNSIGNED (sizetype) == TYPE_UNSIGNED (size_type_node))
13228 qualified_type = size_type_node;
13229 if (type == sizetype)
13230 type = qualified_type;
13231 }
13232
13233 /* If we do, then we can just use its DIE, if it exists. */
13234 if (qualified_type)
13235 {
13236 mod_type_die = lookup_type_die (qualified_type);
13237
13238 /* DW_AT_endianity doesn't come from a qualifier on the type, so it is
13239 dealt with specially: the DIE with the attribute, if it exists, is
13240 placed immediately after the regular DIE for the same base type. */
13241 if (mod_type_die
13242 && (!reverse_base_type
13243 || ((mod_type_die = mod_type_die->die_sib) != NULL
13244 && get_AT_unsigned (mod_type_die, DW_AT_endianity))))
13245 return mod_type_die;
13246 }
13247
13248 name = qualified_type ? TYPE_NAME (qualified_type) : NULL;
13249
13250 /* Handle C typedef types. */
13251 if (name
13252 && TREE_CODE (name) == TYPE_DECL
13253 && DECL_ORIGINAL_TYPE (name)
13254 && !DECL_ARTIFICIAL (name))
13255 {
13256 tree dtype = TREE_TYPE (name);
13257
13258 /* Skip the typedef for base types with DW_AT_endianity, no big deal. */
13259 if (qualified_type == dtype && !reverse_base_type)
13260 {
13261 tree origin = decl_ultimate_origin (name);
13262
13263 /* Typedef variants that have an abstract origin don't get their own
13264 type DIE (see gen_typedef_die), so fall back on the ultimate
13265 abstract origin instead. */
13266 if (origin != NULL && origin != name)
13267 return modified_type_die (TREE_TYPE (origin), cv_quals, reverse,
13268 context_die);
13269
13270 /* For a named type, use the typedef. */
13271 gen_type_die (qualified_type, context_die);
13272 return lookup_type_die (qualified_type);
13273 }
13274 else
13275 {
13276 int dquals = TYPE_QUALS_NO_ADDR_SPACE (dtype);
13277 dquals &= cv_qual_mask;
13278 if ((dquals & ~cv_quals) != TYPE_UNQUALIFIED
13279 || (cv_quals == dquals && DECL_ORIGINAL_TYPE (name) != type))
13280 /* cv-unqualified version of named type. Just use
13281 the unnamed type to which it refers. */
13282 return modified_type_die (DECL_ORIGINAL_TYPE (name), cv_quals,
13283 reverse, context_die);
13284 /* Else cv-qualified version of named type; fall through. */
13285 }
13286 }
13287
13288 mod_scope = scope_die_for (type, context_die);
13289
13290 if (cv_quals)
13291 {
13292 int sub_quals = 0, first_quals = 0;
13293 unsigned i;
13294 dw_die_ref first = NULL, last = NULL;
13295
13296 /* Determine a lesser qualified type that most closely matches
13297 this one. Then generate DW_TAG_* entries for the remaining
13298 qualifiers. */
13299 sub_quals = get_nearest_type_subqualifiers (type, cv_quals,
13300 cv_qual_mask);
13301 if (sub_quals && use_debug_types)
13302 {
13303 bool needed = false;
13304 /* If emitting type units, make sure the order of qualifiers
13305 is canonical. Thus, start from unqualified type if
13306 an earlier qualifier is missing in sub_quals, but some later
13307 one is present there. */
13308 for (i = 0; i < dwarf_qual_info_size; i++)
13309 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13310 needed = true;
13311 else if (needed && (dwarf_qual_info[i].q & cv_quals))
13312 {
13313 sub_quals = 0;
13314 break;
13315 }
13316 }
13317 mod_type_die = modified_type_die (type, sub_quals, reverse, context_die);
13318 if (mod_scope && mod_type_die && mod_type_die->die_parent == mod_scope)
13319 {
13320 /* As not all intermediate qualified DIEs have corresponding
13321 tree types, ensure that qualified DIEs in the same scope
13322 as their DW_AT_type are emitted after their DW_AT_type,
13323 only with other qualified DIEs for the same type possibly
13324 in between them. Determine the range of such qualified
13325 DIEs now (first being the base type, last being corresponding
13326 last qualified DIE for it). */
13327 unsigned int count = 0;
13328 first = qualified_die_p (mod_type_die, &first_quals,
13329 dwarf_qual_info_size);
13330 if (first == NULL)
13331 first = mod_type_die;
13332 gcc_assert ((first_quals & ~sub_quals) == 0);
13333 for (count = 0, last = first;
13334 count < (1U << dwarf_qual_info_size);
13335 count++, last = last->die_sib)
13336 {
13337 int quals = 0;
13338 if (last == mod_scope->die_child)
13339 break;
13340 if (qualified_die_p (last->die_sib, &quals, dwarf_qual_info_size)
13341 != first)
13342 break;
13343 }
13344 }
13345
13346 for (i = 0; i < dwarf_qual_info_size; i++)
13347 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13348 {
13349 dw_die_ref d;
13350 if (first && first != last)
13351 {
13352 for (d = first->die_sib; ; d = d->die_sib)
13353 {
13354 int quals = 0;
13355 qualified_die_p (d, &quals, dwarf_qual_info_size);
13356 if (quals == (first_quals | dwarf_qual_info[i].q))
13357 break;
13358 if (d == last)
13359 {
13360 d = NULL;
13361 break;
13362 }
13363 }
13364 if (d)
13365 {
13366 mod_type_die = d;
13367 continue;
13368 }
13369 }
13370 if (first)
13371 {
13372 d = new_die_raw (dwarf_qual_info[i].t);
13373 add_child_die_after (mod_scope, d, last);
13374 last = d;
13375 }
13376 else
13377 d = new_die (dwarf_qual_info[i].t, mod_scope, type);
13378 if (mod_type_die)
13379 add_AT_die_ref (d, DW_AT_type, mod_type_die);
13380 mod_type_die = d;
13381 first_quals |= dwarf_qual_info[i].q;
13382 }
13383 }
13384 else if (code == POINTER_TYPE || code == REFERENCE_TYPE)
13385 {
13386 dwarf_tag tag = DW_TAG_pointer_type;
13387 if (code == REFERENCE_TYPE)
13388 {
13389 if (TYPE_REF_IS_RVALUE (type) && dwarf_version >= 4)
13390 tag = DW_TAG_rvalue_reference_type;
13391 else
13392 tag = DW_TAG_reference_type;
13393 }
13394 mod_type_die = new_die (tag, mod_scope, type);
13395
13396 add_AT_unsigned (mod_type_die, DW_AT_byte_size,
13397 simple_type_size_in_bits (type) / BITS_PER_UNIT);
13398 add_alignment_attribute (mod_type_die, type);
13399 item_type = TREE_TYPE (type);
13400
13401 addr_space_t as = TYPE_ADDR_SPACE (item_type);
13402 if (!ADDR_SPACE_GENERIC_P (as))
13403 {
13404 int action = targetm.addr_space.debug (as);
13405 if (action >= 0)
13406 {
13407 /* Positive values indicate an address_class. */
13408 add_AT_unsigned (mod_type_die, DW_AT_address_class, action);
13409 }
13410 else
13411 {
13412 /* Negative values indicate an (inverted) segment base reg. */
13413 dw_loc_descr_ref d
13414 = one_reg_loc_descriptor (~action, VAR_INIT_STATUS_INITIALIZED);
13415 add_AT_loc (mod_type_die, DW_AT_segment, d);
13416 }
13417 }
13418 }
13419 else if (code == INTEGER_TYPE
13420 && TREE_TYPE (type) != NULL_TREE
13421 && subrange_type_for_debug_p (type, &low, &high))
13422 {
13423 tree bias = NULL_TREE;
13424 if (lang_hooks.types.get_type_bias)
13425 bias = lang_hooks.types.get_type_bias (type);
13426 mod_type_die = subrange_type_die (type, low, high, bias, context_die);
13427 item_type = TREE_TYPE (type);
13428 }
13429 else if (is_base_type (type))
13430 {
13431 mod_type_die = base_type_die (type, reverse);
13432
13433 /* The DIE with DW_AT_endianity is placed right after the naked DIE. */
13434 if (reverse_base_type)
13435 {
13436 dw_die_ref after_die
13437 = modified_type_die (type, cv_quals, false, context_die);
13438 add_child_die_after (comp_unit_die (), mod_type_die, after_die);
13439 }
13440 else
13441 add_child_die (comp_unit_die (), mod_type_die);
13442
13443 add_pubtype (type, mod_type_die);
13444 }
13445 else
13446 {
13447 gen_type_die (type, context_die);
13448
13449 /* We have to get the type_main_variant here (and pass that to the
13450 `lookup_type_die' routine) because the ..._TYPE node we have
13451 might simply be a *copy* of some original type node (where the
13452 copy was created to help us keep track of typedef names) and
13453 that copy might have a different TYPE_UID from the original
13454 ..._TYPE node. */
13455 if (TREE_CODE (type) == FUNCTION_TYPE
13456 || TREE_CODE (type) == METHOD_TYPE)
13457 {
13458 /* For function/method types, can't just use type_main_variant here,
13459 because that can have different ref-qualifiers for C++,
13460 but try to canonicalize. */
13461 tree main = TYPE_MAIN_VARIANT (type);
13462 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
13463 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
13464 && check_base_type (t, main)
13465 && check_lang_type (t, type))
13466 return lookup_type_die (t);
13467 return lookup_type_die (type);
13468 }
13469 else if (TREE_CODE (type) != VECTOR_TYPE
13470 && TREE_CODE (type) != ARRAY_TYPE)
13471 return lookup_type_die (type_main_variant (type));
13472 else
13473 /* Vectors have the debugging information in the type,
13474 not the main variant. */
13475 return lookup_type_die (type);
13476 }
13477
13478 /* Builtin types don't have a DECL_ORIGINAL_TYPE. For those,
13479 don't output a DW_TAG_typedef, since there isn't one in the
13480 user's program; just attach a DW_AT_name to the type.
13481 Don't attach a DW_AT_name to DW_TAG_const_type or DW_TAG_volatile_type
13482 if the base type already has the same name. */
13483 if (name
13484 && ((TREE_CODE (name) != TYPE_DECL
13485 && (qualified_type == TYPE_MAIN_VARIANT (type)
13486 || (cv_quals == TYPE_UNQUALIFIED)))
13487 || (TREE_CODE (name) == TYPE_DECL
13488 && TREE_TYPE (name) == qualified_type
13489 && DECL_NAME (name))))
13490 {
13491 if (TREE_CODE (name) == TYPE_DECL)
13492 /* Could just call add_name_and_src_coords_attributes here,
13493 but since this is a builtin type it doesn't have any
13494 useful source coordinates anyway. */
13495 name = DECL_NAME (name);
13496 add_name_attribute (mod_type_die, IDENTIFIER_POINTER (name));
13497 }
13498 /* This probably indicates a bug. */
13499 else if (mod_type_die && mod_type_die->die_tag == DW_TAG_base_type)
13500 {
13501 name = TYPE_IDENTIFIER (type);
13502 add_name_attribute (mod_type_die,
13503 name ? IDENTIFIER_POINTER (name) : "__unknown__");
13504 }
13505
13506 if (qualified_type && !reverse_base_type)
13507 equate_type_number_to_die (qualified_type, mod_type_die);
13508
13509 if (item_type)
13510 /* We must do this after the equate_type_number_to_die call, in case
13511 this is a recursive type. This ensures that the modified_type_die
13512 recursion will terminate even if the type is recursive. Recursive
13513 types are possible in Ada. */
13514 sub_die = modified_type_die (item_type,
13515 TYPE_QUALS_NO_ADDR_SPACE (item_type),
13516 reverse,
13517 context_die);
13518
13519 if (sub_die != NULL)
13520 add_AT_die_ref (mod_type_die, DW_AT_type, sub_die);
13521
13522 add_gnat_descriptive_type_attribute (mod_type_die, type, context_die);
13523 if (TYPE_ARTIFICIAL (type))
13524 add_AT_flag (mod_type_die, DW_AT_artificial, 1);
13525
13526 return mod_type_die;
13527 }
13528
13529 /* Generate DIEs for the generic parameters of T.
13530 T must be either a generic type or a generic function.
13531 See http://gcc.gnu.org/wiki/TemplateParmsDwarf for more. */
13532
13533 static void
13534 gen_generic_params_dies (tree t)
13535 {
13536 tree parms, args;
13537 int parms_num, i;
13538 dw_die_ref die = NULL;
13539 int non_default;
13540
13541 if (!t || (TYPE_P (t) && !COMPLETE_TYPE_P (t)))
13542 return;
13543
13544 if (TYPE_P (t))
13545 die = lookup_type_die (t);
13546 else if (DECL_P (t))
13547 die = lookup_decl_die (t);
13548
13549 gcc_assert (die);
13550
13551 parms = lang_hooks.get_innermost_generic_parms (t);
13552 if (!parms)
13553 /* T has no generic parameter. It means T is neither a generic type
13554 or function. End of story. */
13555 return;
13556
13557 parms_num = TREE_VEC_LENGTH (parms);
13558 args = lang_hooks.get_innermost_generic_args (t);
13559 if (TREE_CHAIN (args) && TREE_CODE (TREE_CHAIN (args)) == INTEGER_CST)
13560 non_default = int_cst_value (TREE_CHAIN (args));
13561 else
13562 non_default = TREE_VEC_LENGTH (args);
13563 for (i = 0; i < parms_num; i++)
13564 {
13565 tree parm, arg, arg_pack_elems;
13566 dw_die_ref parm_die;
13567
13568 parm = TREE_VEC_ELT (parms, i);
13569 arg = TREE_VEC_ELT (args, i);
13570 arg_pack_elems = lang_hooks.types.get_argument_pack_elems (arg);
13571 gcc_assert (parm && TREE_VALUE (parm) && arg);
13572
13573 if (parm && TREE_VALUE (parm) && arg)
13574 {
13575 /* If PARM represents a template parameter pack,
13576 emit a DW_TAG_GNU_template_parameter_pack DIE, followed
13577 by DW_TAG_template_*_parameter DIEs for the argument
13578 pack elements of ARG. Note that ARG would then be
13579 an argument pack. */
13580 if (arg_pack_elems)
13581 parm_die = template_parameter_pack_die (TREE_VALUE (parm),
13582 arg_pack_elems,
13583 die);
13584 else
13585 parm_die = generic_parameter_die (TREE_VALUE (parm), arg,
13586 true /* emit name */, die);
13587 if (i >= non_default)
13588 add_AT_flag (parm_die, DW_AT_default_value, 1);
13589 }
13590 }
13591 }
13592
13593 /* Create and return a DIE for PARM which should be
13594 the representation of a generic type parameter.
13595 For instance, in the C++ front end, PARM would be a template parameter.
13596 ARG is the argument to PARM.
13597 EMIT_NAME_P if tree, the DIE will have DW_AT_name attribute set to the
13598 name of the PARM.
13599 PARENT_DIE is the parent DIE which the new created DIE should be added to,
13600 as a child node. */
13601
13602 static dw_die_ref
13603 generic_parameter_die (tree parm, tree arg,
13604 bool emit_name_p,
13605 dw_die_ref parent_die)
13606 {
13607 dw_die_ref tmpl_die = NULL;
13608 const char *name = NULL;
13609
13610 /* C++2a accepts class literals as template parameters, and var
13611 decls with initializers represent them. The VAR_DECLs would be
13612 rejected, but we can take the DECL_INITIAL constructor and
13613 attempt to expand it. */
13614 if (arg && VAR_P (arg))
13615 arg = DECL_INITIAL (arg);
13616
13617 if (!parm || !DECL_NAME (parm) || !arg)
13618 return NULL;
13619
13620 /* We support non-type generic parameters and arguments,
13621 type generic parameters and arguments, as well as
13622 generic generic parameters (a.k.a. template template parameters in C++)
13623 and arguments. */
13624 if (TREE_CODE (parm) == PARM_DECL)
13625 /* PARM is a nontype generic parameter */
13626 tmpl_die = new_die (DW_TAG_template_value_param, parent_die, parm);
13627 else if (TREE_CODE (parm) == TYPE_DECL)
13628 /* PARM is a type generic parameter. */
13629 tmpl_die = new_die (DW_TAG_template_type_param, parent_die, parm);
13630 else if (lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13631 /* PARM is a generic generic parameter.
13632 Its DIE is a GNU extension. It shall have a
13633 DW_AT_name attribute to represent the name of the template template
13634 parameter, and a DW_AT_GNU_template_name attribute to represent the
13635 name of the template template argument. */
13636 tmpl_die = new_die (DW_TAG_GNU_template_template_param,
13637 parent_die, parm);
13638 else
13639 gcc_unreachable ();
13640
13641 if (tmpl_die)
13642 {
13643 tree tmpl_type;
13644
13645 /* If PARM is a generic parameter pack, it means we are
13646 emitting debug info for a template argument pack element.
13647 In other terms, ARG is a template argument pack element.
13648 In that case, we don't emit any DW_AT_name attribute for
13649 the die. */
13650 if (emit_name_p)
13651 {
13652 name = IDENTIFIER_POINTER (DECL_NAME (parm));
13653 gcc_assert (name);
13654 add_AT_string (tmpl_die, DW_AT_name, name);
13655 }
13656
13657 if (!lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13658 {
13659 /* DWARF3, 5.6.8 says if PARM is a non-type generic parameter
13660 TMPL_DIE should have a child DW_AT_type attribute that is set
13661 to the type of the argument to PARM, which is ARG.
13662 If PARM is a type generic parameter, TMPL_DIE should have a
13663 child DW_AT_type that is set to ARG. */
13664 tmpl_type = TYPE_P (arg) ? arg : TREE_TYPE (arg);
13665 add_type_attribute (tmpl_die, tmpl_type,
13666 (TREE_THIS_VOLATILE (tmpl_type)
13667 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED),
13668 false, parent_die);
13669 }
13670 else
13671 {
13672 /* So TMPL_DIE is a DIE representing a
13673 a generic generic template parameter, a.k.a template template
13674 parameter in C++ and arg is a template. */
13675
13676 /* The DW_AT_GNU_template_name attribute of the DIE must be set
13677 to the name of the argument. */
13678 name = dwarf2_name (TYPE_P (arg) ? TYPE_NAME (arg) : arg, 1);
13679 if (name)
13680 add_AT_string (tmpl_die, DW_AT_GNU_template_name, name);
13681 }
13682
13683 if (TREE_CODE (parm) == PARM_DECL)
13684 /* So PARM is a non-type generic parameter.
13685 DWARF3 5.6.8 says we must set a DW_AT_const_value child
13686 attribute of TMPL_DIE which value represents the value
13687 of ARG.
13688 We must be careful here:
13689 The value of ARG might reference some function decls.
13690 We might currently be emitting debug info for a generic
13691 type and types are emitted before function decls, we don't
13692 know if the function decls referenced by ARG will actually be
13693 emitted after cgraph computations.
13694 So must defer the generation of the DW_AT_const_value to
13695 after cgraph is ready. */
13696 append_entry_to_tmpl_value_parm_die_table (tmpl_die, arg);
13697 }
13698
13699 return tmpl_die;
13700 }
13701
13702 /* Generate and return a DW_TAG_GNU_template_parameter_pack DIE representing.
13703 PARM_PACK must be a template parameter pack. The returned DIE
13704 will be child DIE of PARENT_DIE. */
13705
13706 static dw_die_ref
13707 template_parameter_pack_die (tree parm_pack,
13708 tree parm_pack_args,
13709 dw_die_ref parent_die)
13710 {
13711 dw_die_ref die;
13712 int j;
13713
13714 gcc_assert (parent_die && parm_pack);
13715
13716 die = new_die (DW_TAG_GNU_template_parameter_pack, parent_die, parm_pack);
13717 add_name_and_src_coords_attributes (die, parm_pack);
13718 for (j = 0; j < TREE_VEC_LENGTH (parm_pack_args); j++)
13719 generic_parameter_die (parm_pack,
13720 TREE_VEC_ELT (parm_pack_args, j),
13721 false /* Don't emit DW_AT_name */,
13722 die);
13723 return die;
13724 }
13725
13726 /* Return the DBX register number described by a given RTL node. */
13727
13728 static unsigned int
13729 dbx_reg_number (const_rtx rtl)
13730 {
13731 unsigned regno = REGNO (rtl);
13732
13733 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
13734
13735 #ifdef LEAF_REG_REMAP
13736 if (crtl->uses_only_leaf_regs)
13737 {
13738 int leaf_reg = LEAF_REG_REMAP (regno);
13739 if (leaf_reg != -1)
13740 regno = (unsigned) leaf_reg;
13741 }
13742 #endif
13743
13744 regno = DBX_REGISTER_NUMBER (regno);
13745 gcc_assert (regno != INVALID_REGNUM);
13746 return regno;
13747 }
13748
13749 /* Optionally add a DW_OP_piece term to a location description expression.
13750 DW_OP_piece is only added if the location description expression already
13751 doesn't end with DW_OP_piece. */
13752
13753 static void
13754 add_loc_descr_op_piece (dw_loc_descr_ref *list_head, int size)
13755 {
13756 dw_loc_descr_ref loc;
13757
13758 if (*list_head != NULL)
13759 {
13760 /* Find the end of the chain. */
13761 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
13762 ;
13763
13764 if (loc->dw_loc_opc != DW_OP_piece)
13765 loc->dw_loc_next = new_loc_descr (DW_OP_piece, size, 0);
13766 }
13767 }
13768
13769 /* Return a location descriptor that designates a machine register or
13770 zero if there is none. */
13771
13772 static dw_loc_descr_ref
13773 reg_loc_descriptor (rtx rtl, enum var_init_status initialized)
13774 {
13775 rtx regs;
13776
13777 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
13778 return 0;
13779
13780 /* We only use "frame base" when we're sure we're talking about the
13781 post-prologue local stack frame. We do this by *not* running
13782 register elimination until this point, and recognizing the special
13783 argument pointer and soft frame pointer rtx's.
13784 Use DW_OP_fbreg offset DW_OP_stack_value in this case. */
13785 if ((rtl == arg_pointer_rtx || rtl == frame_pointer_rtx)
13786 && eliminate_regs (rtl, VOIDmode, NULL_RTX) != rtl)
13787 {
13788 dw_loc_descr_ref result = NULL;
13789
13790 if (dwarf_version >= 4 || !dwarf_strict)
13791 {
13792 result = mem_loc_descriptor (rtl, GET_MODE (rtl), VOIDmode,
13793 initialized);
13794 if (result)
13795 add_loc_descr (&result,
13796 new_loc_descr (DW_OP_stack_value, 0, 0));
13797 }
13798 return result;
13799 }
13800
13801 regs = targetm.dwarf_register_span (rtl);
13802
13803 if (REG_NREGS (rtl) > 1 || regs)
13804 return multiple_reg_loc_descriptor (rtl, regs, initialized);
13805 else
13806 {
13807 unsigned int dbx_regnum = dbx_reg_number (rtl);
13808 if (dbx_regnum == IGNORED_DWARF_REGNUM)
13809 return 0;
13810 return one_reg_loc_descriptor (dbx_regnum, initialized);
13811 }
13812 }
13813
13814 /* Return a location descriptor that designates a machine register for
13815 a given hard register number. */
13816
13817 static dw_loc_descr_ref
13818 one_reg_loc_descriptor (unsigned int regno, enum var_init_status initialized)
13819 {
13820 dw_loc_descr_ref reg_loc_descr;
13821
13822 if (regno <= 31)
13823 reg_loc_descr
13824 = new_loc_descr ((enum dwarf_location_atom) (DW_OP_reg0 + regno), 0, 0);
13825 else
13826 reg_loc_descr = new_loc_descr (DW_OP_regx, regno, 0);
13827
13828 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
13829 add_loc_descr (&reg_loc_descr, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13830
13831 return reg_loc_descr;
13832 }
13833
13834 /* Given an RTL of a register, return a location descriptor that
13835 designates a value that spans more than one register. */
13836
13837 static dw_loc_descr_ref
13838 multiple_reg_loc_descriptor (rtx rtl, rtx regs,
13839 enum var_init_status initialized)
13840 {
13841 int size, i;
13842 dw_loc_descr_ref loc_result = NULL;
13843
13844 /* Simple, contiguous registers. */
13845 if (regs == NULL_RTX)
13846 {
13847 unsigned reg = REGNO (rtl);
13848 int nregs;
13849
13850 #ifdef LEAF_REG_REMAP
13851 if (crtl->uses_only_leaf_regs)
13852 {
13853 int leaf_reg = LEAF_REG_REMAP (reg);
13854 if (leaf_reg != -1)
13855 reg = (unsigned) leaf_reg;
13856 }
13857 #endif
13858
13859 gcc_assert ((unsigned) DBX_REGISTER_NUMBER (reg) == dbx_reg_number (rtl));
13860 nregs = REG_NREGS (rtl);
13861
13862 /* At present we only track constant-sized pieces. */
13863 if (!GET_MODE_SIZE (GET_MODE (rtl)).is_constant (&size))
13864 return NULL;
13865 size /= nregs;
13866
13867 loc_result = NULL;
13868 while (nregs--)
13869 {
13870 dw_loc_descr_ref t;
13871
13872 t = one_reg_loc_descriptor (DBX_REGISTER_NUMBER (reg),
13873 VAR_INIT_STATUS_INITIALIZED);
13874 add_loc_descr (&loc_result, t);
13875 add_loc_descr_op_piece (&loc_result, size);
13876 ++reg;
13877 }
13878 return loc_result;
13879 }
13880
13881 /* Now onto stupid register sets in non contiguous locations. */
13882
13883 gcc_assert (GET_CODE (regs) == PARALLEL);
13884
13885 /* At present we only track constant-sized pieces. */
13886 if (!GET_MODE_SIZE (GET_MODE (XVECEXP (regs, 0, 0))).is_constant (&size))
13887 return NULL;
13888 loc_result = NULL;
13889
13890 for (i = 0; i < XVECLEN (regs, 0); ++i)
13891 {
13892 dw_loc_descr_ref t;
13893
13894 t = one_reg_loc_descriptor (dbx_reg_number (XVECEXP (regs, 0, i)),
13895 VAR_INIT_STATUS_INITIALIZED);
13896 add_loc_descr (&loc_result, t);
13897 add_loc_descr_op_piece (&loc_result, size);
13898 }
13899
13900 if (loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
13901 add_loc_descr (&loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13902 return loc_result;
13903 }
13904
13905 static unsigned long size_of_int_loc_descriptor (HOST_WIDE_INT);
13906
13907 /* Return a location descriptor that designates a constant i,
13908 as a compound operation from constant (i >> shift), constant shift
13909 and DW_OP_shl. */
13910
13911 static dw_loc_descr_ref
13912 int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
13913 {
13914 dw_loc_descr_ref ret = int_loc_descriptor (i >> shift);
13915 add_loc_descr (&ret, int_loc_descriptor (shift));
13916 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
13917 return ret;
13918 }
13919
13920 /* Return a location descriptor that designates constant POLY_I. */
13921
13922 static dw_loc_descr_ref
13923 int_loc_descriptor (poly_int64 poly_i)
13924 {
13925 enum dwarf_location_atom op;
13926
13927 HOST_WIDE_INT i;
13928 if (!poly_i.is_constant (&i))
13929 {
13930 /* Create location descriptions for the non-constant part and
13931 add any constant offset at the end. */
13932 dw_loc_descr_ref ret = NULL;
13933 HOST_WIDE_INT constant = poly_i.coeffs[0];
13934 for (unsigned int j = 1; j < NUM_POLY_INT_COEFFS; ++j)
13935 {
13936 HOST_WIDE_INT coeff = poly_i.coeffs[j];
13937 if (coeff != 0)
13938 {
13939 dw_loc_descr_ref start = ret;
13940 unsigned int factor;
13941 int bias;
13942 unsigned int regno = targetm.dwarf_poly_indeterminate_value
13943 (j, &factor, &bias);
13944
13945 /* Add COEFF * ((REGNO / FACTOR) - BIAS) to the value:
13946 add COEFF * (REGNO / FACTOR) now and subtract
13947 COEFF * BIAS from the final constant part. */
13948 constant -= coeff * bias;
13949 add_loc_descr (&ret, new_reg_loc_descr (regno, 0));
13950 if (coeff % factor == 0)
13951 coeff /= factor;
13952 else
13953 {
13954 int amount = exact_log2 (factor);
13955 gcc_assert (amount >= 0);
13956 add_loc_descr (&ret, int_loc_descriptor (amount));
13957 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
13958 }
13959 if (coeff != 1)
13960 {
13961 add_loc_descr (&ret, int_loc_descriptor (coeff));
13962 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
13963 }
13964 if (start)
13965 add_loc_descr (&ret, new_loc_descr (DW_OP_plus, 0, 0));
13966 }
13967 }
13968 loc_descr_plus_const (&ret, constant);
13969 return ret;
13970 }
13971
13972 /* Pick the smallest representation of a constant, rather than just
13973 defaulting to the LEB encoding. */
13974 if (i >= 0)
13975 {
13976 int clz = clz_hwi (i);
13977 int ctz = ctz_hwi (i);
13978 if (i <= 31)
13979 op = (enum dwarf_location_atom) (DW_OP_lit0 + i);
13980 else if (i <= 0xff)
13981 op = DW_OP_const1u;
13982 else if (i <= 0xffff)
13983 op = DW_OP_const2u;
13984 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
13985 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
13986 /* DW_OP_litX DW_OP_litY DW_OP_shl takes just 3 bytes and
13987 DW_OP_litX DW_OP_const1u Y DW_OP_shl takes just 4 bytes,
13988 while DW_OP_const4u is 5 bytes. */
13989 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 5);
13990 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13991 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
13992 /* DW_OP_const1u X DW_OP_litY DW_OP_shl takes just 4 bytes,
13993 while DW_OP_const4u is 5 bytes. */
13994 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13995
13996 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
13997 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
13998 <= 4)
13999 {
14000 /* As i >= 2**31, the double cast above will yield a negative number.
14001 Since wrapping is defined in DWARF expressions we can output big
14002 positive integers as small negative ones, regardless of the size
14003 of host wide ints.
14004
14005 Here, since the evaluator will handle 32-bit values and since i >=
14006 2**31, we know it's going to be interpreted as a negative literal:
14007 store it this way if we can do better than 5 bytes this way. */
14008 return int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
14009 }
14010 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14011 op = DW_OP_const4u;
14012
14013 /* Past this point, i >= 0x100000000 and thus DW_OP_constu will take at
14014 least 6 bytes: see if we can do better before falling back to it. */
14015 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14016 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
14017 /* DW_OP_const1u X DW_OP_const1u Y DW_OP_shl takes just 5 bytes. */
14018 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
14019 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
14020 && clz + 16 + (size_of_uleb128 (i) > 5 ? 255 : 31)
14021 >= HOST_BITS_PER_WIDE_INT)
14022 /* DW_OP_const2u X DW_OP_litY DW_OP_shl takes just 5 bytes,
14023 DW_OP_const2u X DW_OP_const1u Y DW_OP_shl takes 6 bytes. */
14024 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 16);
14025 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
14026 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
14027 && size_of_uleb128 (i) > 6)
14028 /* DW_OP_const4u X DW_OP_litY DW_OP_shl takes just 7 bytes. */
14029 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 32);
14030 else
14031 op = DW_OP_constu;
14032 }
14033 else
14034 {
14035 if (i >= -0x80)
14036 op = DW_OP_const1s;
14037 else if (i >= -0x8000)
14038 op = DW_OP_const2s;
14039 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
14040 {
14041 if (size_of_int_loc_descriptor (i) < 5)
14042 {
14043 dw_loc_descr_ref ret = int_loc_descriptor (-i);
14044 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
14045 return ret;
14046 }
14047 op = DW_OP_const4s;
14048 }
14049 else
14050 {
14051 if (size_of_int_loc_descriptor (i)
14052 < (unsigned long) 1 + size_of_sleb128 (i))
14053 {
14054 dw_loc_descr_ref ret = int_loc_descriptor (-i);
14055 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
14056 return ret;
14057 }
14058 op = DW_OP_consts;
14059 }
14060 }
14061
14062 return new_loc_descr (op, i, 0);
14063 }
14064
14065 /* Likewise, for unsigned constants. */
14066
14067 static dw_loc_descr_ref
14068 uint_loc_descriptor (unsigned HOST_WIDE_INT i)
14069 {
14070 const unsigned HOST_WIDE_INT max_int = INTTYPE_MAXIMUM (HOST_WIDE_INT);
14071 const unsigned HOST_WIDE_INT max_uint
14072 = INTTYPE_MAXIMUM (unsigned HOST_WIDE_INT);
14073
14074 /* If possible, use the clever signed constants handling. */
14075 if (i <= max_int)
14076 return int_loc_descriptor ((HOST_WIDE_INT) i);
14077
14078 /* Here, we are left with positive numbers that cannot be represented as
14079 HOST_WIDE_INT, i.e.:
14080 max (HOST_WIDE_INT) < i <= max (unsigned HOST_WIDE_INT)
14081
14082 Using DW_OP_const4/8/./u operation to encode them consumes a lot of bytes
14083 whereas may be better to output a negative integer: thanks to integer
14084 wrapping, we know that:
14085 x = x - 2 ** DWARF2_ADDR_SIZE
14086 = x - 2 * (max (HOST_WIDE_INT) + 1)
14087 So numbers close to max (unsigned HOST_WIDE_INT) could be represented as
14088 small negative integers. Let's try that in cases it will clearly improve
14089 the encoding: there is no gain turning DW_OP_const4u into
14090 DW_OP_const4s. */
14091 if (DWARF2_ADDR_SIZE * 8 == HOST_BITS_PER_WIDE_INT
14092 && ((DWARF2_ADDR_SIZE == 4 && i > max_uint - 0x8000)
14093 || (DWARF2_ADDR_SIZE == 8 && i > max_uint - 0x80000000)))
14094 {
14095 const unsigned HOST_WIDE_INT first_shift = i - max_int - 1;
14096
14097 /* Now, -1 < first_shift <= max (HOST_WIDE_INT)
14098 i.e. 0 <= first_shift <= max (HOST_WIDE_INT). */
14099 const HOST_WIDE_INT second_shift
14100 = (HOST_WIDE_INT) first_shift - (HOST_WIDE_INT) max_int - 1;
14101
14102 /* So we finally have:
14103 -max (HOST_WIDE_INT) - 1 <= second_shift <= -1.
14104 i.e. min (HOST_WIDE_INT) <= second_shift < 0. */
14105 return int_loc_descriptor (second_shift);
14106 }
14107
14108 /* Last chance: fallback to a simple constant operation. */
14109 return new_loc_descr
14110 ((HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14111 ? DW_OP_const4u
14112 : DW_OP_const8u,
14113 i, 0);
14114 }
14115
14116 /* Generate and return a location description that computes the unsigned
14117 comparison of the two stack top entries (a OP b where b is the top-most
14118 entry and a is the second one). The KIND of comparison can be LT_EXPR,
14119 LE_EXPR, GT_EXPR or GE_EXPR. */
14120
14121 static dw_loc_descr_ref
14122 uint_comparison_loc_list (enum tree_code kind)
14123 {
14124 enum dwarf_location_atom op, flip_op;
14125 dw_loc_descr_ref ret, bra_node, jmp_node, tmp;
14126
14127 switch (kind)
14128 {
14129 case LT_EXPR:
14130 op = DW_OP_lt;
14131 break;
14132 case LE_EXPR:
14133 op = DW_OP_le;
14134 break;
14135 case GT_EXPR:
14136 op = DW_OP_gt;
14137 break;
14138 case GE_EXPR:
14139 op = DW_OP_ge;
14140 break;
14141 default:
14142 gcc_unreachable ();
14143 }
14144
14145 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14146 jmp_node = new_loc_descr (DW_OP_skip, 0, 0);
14147
14148 /* Until DWARFv4, operations all work on signed integers. It is nevertheless
14149 possible to perform unsigned comparisons: we just have to distinguish
14150 three cases:
14151
14152 1. when a and b have the same sign (as signed integers); then we should
14153 return: a OP(signed) b;
14154
14155 2. when a is a negative signed integer while b is a positive one, then a
14156 is a greater unsigned integer than b; likewise when a and b's roles
14157 are flipped.
14158
14159 So first, compare the sign of the two operands. */
14160 ret = new_loc_descr (DW_OP_over, 0, 0);
14161 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
14162 add_loc_descr (&ret, new_loc_descr (DW_OP_xor, 0, 0));
14163 /* If they have different signs (i.e. they have different sign bits), then
14164 the stack top value has now the sign bit set and thus it's smaller than
14165 zero. */
14166 add_loc_descr (&ret, new_loc_descr (DW_OP_lit0, 0, 0));
14167 add_loc_descr (&ret, new_loc_descr (DW_OP_lt, 0, 0));
14168 add_loc_descr (&ret, bra_node);
14169
14170 /* We are in case 1. At this point, we know both operands have the same
14171 sign, to it's safe to use the built-in signed comparison. */
14172 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14173 add_loc_descr (&ret, jmp_node);
14174
14175 /* We are in case 2. Here, we know both operands do not have the same sign,
14176 so we have to flip the signed comparison. */
14177 flip_op = (kind == LT_EXPR || kind == LE_EXPR) ? DW_OP_gt : DW_OP_lt;
14178 tmp = new_loc_descr (flip_op, 0, 0);
14179 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14180 bra_node->dw_loc_oprnd1.v.val_loc = tmp;
14181 add_loc_descr (&ret, tmp);
14182
14183 /* This dummy operation is necessary to make the two branches join. */
14184 tmp = new_loc_descr (DW_OP_nop, 0, 0);
14185 jmp_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14186 jmp_node->dw_loc_oprnd1.v.val_loc = tmp;
14187 add_loc_descr (&ret, tmp);
14188
14189 return ret;
14190 }
14191
14192 /* Likewise, but takes the location description lists (might be destructive on
14193 them). Return NULL if either is NULL or if concatenation fails. */
14194
14195 static dw_loc_list_ref
14196 loc_list_from_uint_comparison (dw_loc_list_ref left, dw_loc_list_ref right,
14197 enum tree_code kind)
14198 {
14199 if (left == NULL || right == NULL)
14200 return NULL;
14201
14202 add_loc_list (&left, right);
14203 if (left == NULL)
14204 return NULL;
14205
14206 add_loc_descr_to_each (left, uint_comparison_loc_list (kind));
14207 return left;
14208 }
14209
14210 /* Return size_of_locs (int_shift_loc_descriptor (i, shift))
14211 without actually allocating it. */
14212
14213 static unsigned long
14214 size_of_int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
14215 {
14216 return size_of_int_loc_descriptor (i >> shift)
14217 + size_of_int_loc_descriptor (shift)
14218 + 1;
14219 }
14220
14221 /* Return size_of_locs (int_loc_descriptor (i)) without
14222 actually allocating it. */
14223
14224 static unsigned long
14225 size_of_int_loc_descriptor (HOST_WIDE_INT i)
14226 {
14227 unsigned long s;
14228
14229 if (i >= 0)
14230 {
14231 int clz, ctz;
14232 if (i <= 31)
14233 return 1;
14234 else if (i <= 0xff)
14235 return 2;
14236 else if (i <= 0xffff)
14237 return 3;
14238 clz = clz_hwi (i);
14239 ctz = ctz_hwi (i);
14240 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
14241 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
14242 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14243 - clz - 5);
14244 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14245 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
14246 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14247 - clz - 8);
14248 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
14249 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
14250 <= 4)
14251 return size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
14252 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14253 return 5;
14254 s = size_of_uleb128 ((unsigned HOST_WIDE_INT) i);
14255 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14256 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
14257 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14258 - clz - 8);
14259 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
14260 && clz + 16 + (s > 5 ? 255 : 31) >= HOST_BITS_PER_WIDE_INT)
14261 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14262 - clz - 16);
14263 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
14264 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
14265 && s > 6)
14266 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14267 - clz - 32);
14268 else
14269 return 1 + s;
14270 }
14271 else
14272 {
14273 if (i >= -0x80)
14274 return 2;
14275 else if (i >= -0x8000)
14276 return 3;
14277 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
14278 {
14279 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14280 {
14281 s = size_of_int_loc_descriptor (-i) + 1;
14282 if (s < 5)
14283 return s;
14284 }
14285 return 5;
14286 }
14287 else
14288 {
14289 unsigned long r = 1 + size_of_sleb128 (i);
14290 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14291 {
14292 s = size_of_int_loc_descriptor (-i) + 1;
14293 if (s < r)
14294 return s;
14295 }
14296 return r;
14297 }
14298 }
14299 }
14300
14301 /* Return loc description representing "address" of integer value.
14302 This can appear only as toplevel expression. */
14303
14304 static dw_loc_descr_ref
14305 address_of_int_loc_descriptor (int size, HOST_WIDE_INT i)
14306 {
14307 int litsize;
14308 dw_loc_descr_ref loc_result = NULL;
14309
14310 if (!(dwarf_version >= 4 || !dwarf_strict))
14311 return NULL;
14312
14313 litsize = size_of_int_loc_descriptor (i);
14314 /* Determine if DW_OP_stack_value or DW_OP_implicit_value
14315 is more compact. For DW_OP_stack_value we need:
14316 litsize + 1 (DW_OP_stack_value)
14317 and for DW_OP_implicit_value:
14318 1 (DW_OP_implicit_value) + 1 (length) + size. */
14319 if ((int) DWARF2_ADDR_SIZE >= size && litsize + 1 <= 1 + 1 + size)
14320 {
14321 loc_result = int_loc_descriptor (i);
14322 add_loc_descr (&loc_result,
14323 new_loc_descr (DW_OP_stack_value, 0, 0));
14324 return loc_result;
14325 }
14326
14327 loc_result = new_loc_descr (DW_OP_implicit_value,
14328 size, 0);
14329 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
14330 loc_result->dw_loc_oprnd2.v.val_int = i;
14331 return loc_result;
14332 }
14333
14334 /* Return a location descriptor that designates a base+offset location. */
14335
14336 static dw_loc_descr_ref
14337 based_loc_descr (rtx reg, poly_int64 offset,
14338 enum var_init_status initialized)
14339 {
14340 unsigned int regno;
14341 dw_loc_descr_ref result;
14342 dw_fde_ref fde = cfun->fde;
14343
14344 /* We only use "frame base" when we're sure we're talking about the
14345 post-prologue local stack frame. We do this by *not* running
14346 register elimination until this point, and recognizing the special
14347 argument pointer and soft frame pointer rtx's. */
14348 if (reg == arg_pointer_rtx || reg == frame_pointer_rtx)
14349 {
14350 rtx elim = (ira_use_lra_p
14351 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
14352 : eliminate_regs (reg, VOIDmode, NULL_RTX));
14353
14354 if (elim != reg)
14355 {
14356 /* Allow hard frame pointer here even if frame pointer
14357 isn't used since hard frame pointer is encoded with
14358 DW_OP_fbreg which uses the DW_AT_frame_base attribute,
14359 not hard frame pointer directly. */
14360 elim = strip_offset_and_add (elim, &offset);
14361 gcc_assert (elim == hard_frame_pointer_rtx
14362 || elim == stack_pointer_rtx);
14363
14364 /* If drap register is used to align stack, use frame
14365 pointer + offset to access stack variables. If stack
14366 is aligned without drap, use stack pointer + offset to
14367 access stack variables. */
14368 if (crtl->stack_realign_tried
14369 && reg == frame_pointer_rtx)
14370 {
14371 int base_reg
14372 = DWARF_FRAME_REGNUM ((fde && fde->drap_reg != INVALID_REGNUM)
14373 ? HARD_FRAME_POINTER_REGNUM
14374 : REGNO (elim));
14375 return new_reg_loc_descr (base_reg, offset);
14376 }
14377
14378 gcc_assert (frame_pointer_fb_offset_valid);
14379 offset += frame_pointer_fb_offset;
14380 HOST_WIDE_INT const_offset;
14381 if (offset.is_constant (&const_offset))
14382 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14383 else
14384 {
14385 dw_loc_descr_ref ret = new_loc_descr (DW_OP_fbreg, 0, 0);
14386 loc_descr_plus_const (&ret, offset);
14387 return ret;
14388 }
14389 }
14390 }
14391
14392 regno = REGNO (reg);
14393 #ifdef LEAF_REG_REMAP
14394 if (crtl->uses_only_leaf_regs)
14395 {
14396 int leaf_reg = LEAF_REG_REMAP (regno);
14397 if (leaf_reg != -1)
14398 regno = (unsigned) leaf_reg;
14399 }
14400 #endif
14401 regno = DWARF_FRAME_REGNUM (regno);
14402
14403 HOST_WIDE_INT const_offset;
14404 if (!optimize && fde
14405 && (fde->drap_reg == regno || fde->vdrap_reg == regno)
14406 && offset.is_constant (&const_offset))
14407 {
14408 /* Use cfa+offset to represent the location of arguments passed
14409 on the stack when drap is used to align stack.
14410 Only do this when not optimizing, for optimized code var-tracking
14411 is supposed to track where the arguments live and the register
14412 used as vdrap or drap in some spot might be used for something
14413 else in other part of the routine. */
14414 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14415 }
14416
14417 result = new_reg_loc_descr (regno, offset);
14418
14419 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
14420 add_loc_descr (&result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
14421
14422 return result;
14423 }
14424
14425 /* Return true if this RTL expression describes a base+offset calculation. */
14426
14427 static inline int
14428 is_based_loc (const_rtx rtl)
14429 {
14430 return (GET_CODE (rtl) == PLUS
14431 && ((REG_P (XEXP (rtl, 0))
14432 && REGNO (XEXP (rtl, 0)) < FIRST_PSEUDO_REGISTER
14433 && CONST_INT_P (XEXP (rtl, 1)))));
14434 }
14435
14436 /* Try to handle TLS MEMs, for which mem_loc_descriptor on XEXP (mem, 0)
14437 failed. */
14438
14439 static dw_loc_descr_ref
14440 tls_mem_loc_descriptor (rtx mem)
14441 {
14442 tree base;
14443 dw_loc_descr_ref loc_result;
14444
14445 if (MEM_EXPR (mem) == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
14446 return NULL;
14447
14448 base = get_base_address (MEM_EXPR (mem));
14449 if (base == NULL
14450 || !VAR_P (base)
14451 || !DECL_THREAD_LOCAL_P (base))
14452 return NULL;
14453
14454 loc_result = loc_descriptor_from_tree (MEM_EXPR (mem), 1, NULL);
14455 if (loc_result == NULL)
14456 return NULL;
14457
14458 if (maybe_ne (MEM_OFFSET (mem), 0))
14459 loc_descr_plus_const (&loc_result, MEM_OFFSET (mem));
14460
14461 return loc_result;
14462 }
14463
14464 /* Output debug info about reason why we failed to expand expression as dwarf
14465 expression. */
14466
14467 static void
14468 expansion_failed (tree expr, rtx rtl, char const *reason)
14469 {
14470 if (dump_file && (dump_flags & TDF_DETAILS))
14471 {
14472 fprintf (dump_file, "Failed to expand as dwarf: ");
14473 if (expr)
14474 print_generic_expr (dump_file, expr, dump_flags);
14475 if (rtl)
14476 {
14477 fprintf (dump_file, "\n");
14478 print_rtl (dump_file, rtl);
14479 }
14480 fprintf (dump_file, "\nReason: %s\n", reason);
14481 }
14482 }
14483
14484 /* Helper function for const_ok_for_output. */
14485
14486 static bool
14487 const_ok_for_output_1 (rtx rtl)
14488 {
14489 if (targetm.const_not_ok_for_debug_p (rtl))
14490 {
14491 if (GET_CODE (rtl) != UNSPEC)
14492 {
14493 expansion_failed (NULL_TREE, rtl,
14494 "Expression rejected for debug by the backend.\n");
14495 return false;
14496 }
14497
14498 /* If delegitimize_address couldn't do anything with the UNSPEC, and
14499 the target hook doesn't explicitly allow it in debug info, assume
14500 we can't express it in the debug info. */
14501 /* Don't complain about TLS UNSPECs, those are just too hard to
14502 delegitimize. Note this could be a non-decl SYMBOL_REF such as
14503 one in a constant pool entry, so testing SYMBOL_REF_TLS_MODEL
14504 rather than DECL_THREAD_LOCAL_P is not just an optimization. */
14505 if (flag_checking
14506 && (XVECLEN (rtl, 0) == 0
14507 || GET_CODE (XVECEXP (rtl, 0, 0)) != SYMBOL_REF
14508 || SYMBOL_REF_TLS_MODEL (XVECEXP (rtl, 0, 0)) == TLS_MODEL_NONE))
14509 inform (current_function_decl
14510 ? DECL_SOURCE_LOCATION (current_function_decl)
14511 : UNKNOWN_LOCATION,
14512 #if NUM_UNSPEC_VALUES > 0
14513 "non-delegitimized UNSPEC %s (%d) found in variable location",
14514 ((XINT (rtl, 1) >= 0 && XINT (rtl, 1) < NUM_UNSPEC_VALUES)
14515 ? unspec_strings[XINT (rtl, 1)] : "unknown"),
14516 #else
14517 "non-delegitimized UNSPEC %d found in variable location",
14518 #endif
14519 XINT (rtl, 1));
14520 expansion_failed (NULL_TREE, rtl,
14521 "UNSPEC hasn't been delegitimized.\n");
14522 return false;
14523 }
14524
14525 if (CONST_POLY_INT_P (rtl))
14526 return false;
14527
14528 /* FIXME: Refer to PR60655. It is possible for simplification
14529 of rtl expressions in var tracking to produce such expressions.
14530 We should really identify / validate expressions
14531 enclosed in CONST that can be handled by assemblers on various
14532 targets and only handle legitimate cases here. */
14533 switch (GET_CODE (rtl))
14534 {
14535 case SYMBOL_REF:
14536 break;
14537 case NOT:
14538 case NEG:
14539 return false;
14540 case PLUS:
14541 {
14542 /* Make sure SYMBOL_REFs/UNSPECs are at most in one of the
14543 operands. */
14544 subrtx_var_iterator::array_type array;
14545 bool first = false;
14546 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL)
14547 if (SYMBOL_REF_P (*iter)
14548 || LABEL_P (*iter)
14549 || GET_CODE (*iter) == UNSPEC)
14550 {
14551 first = true;
14552 break;
14553 }
14554 if (!first)
14555 return true;
14556 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 1), ALL)
14557 if (SYMBOL_REF_P (*iter)
14558 || LABEL_P (*iter)
14559 || GET_CODE (*iter) == UNSPEC)
14560 return false;
14561 return true;
14562 }
14563 case MINUS:
14564 {
14565 /* Disallow negation of SYMBOL_REFs or UNSPECs when they
14566 appear in the second operand of MINUS. */
14567 subrtx_var_iterator::array_type array;
14568 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 1), ALL)
14569 if (SYMBOL_REF_P (*iter)
14570 || LABEL_P (*iter)
14571 || GET_CODE (*iter) == UNSPEC)
14572 return false;
14573 return true;
14574 }
14575 default:
14576 return true;
14577 }
14578
14579 if (CONSTANT_POOL_ADDRESS_P (rtl))
14580 {
14581 bool marked;
14582 get_pool_constant_mark (rtl, &marked);
14583 /* If all references to this pool constant were optimized away,
14584 it was not output and thus we can't represent it. */
14585 if (!marked)
14586 {
14587 expansion_failed (NULL_TREE, rtl,
14588 "Constant was removed from constant pool.\n");
14589 return false;
14590 }
14591 }
14592
14593 if (SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
14594 return false;
14595
14596 /* Avoid references to external symbols in debug info, on several targets
14597 the linker might even refuse to link when linking a shared library,
14598 and in many other cases the relocations for .debug_info/.debug_loc are
14599 dropped, so the address becomes zero anyway. Hidden symbols, guaranteed
14600 to be defined within the same shared library or executable are fine. */
14601 if (SYMBOL_REF_EXTERNAL_P (rtl))
14602 {
14603 tree decl = SYMBOL_REF_DECL (rtl);
14604
14605 if (decl == NULL || !targetm.binds_local_p (decl))
14606 {
14607 expansion_failed (NULL_TREE, rtl,
14608 "Symbol not defined in current TU.\n");
14609 return false;
14610 }
14611 }
14612
14613 return true;
14614 }
14615
14616 /* Return true if constant RTL can be emitted in DW_OP_addr or
14617 DW_AT_const_value. TLS SYMBOL_REFs, external SYMBOL_REFs or
14618 non-marked constant pool SYMBOL_REFs can't be referenced in it. */
14619
14620 static bool
14621 const_ok_for_output (rtx rtl)
14622 {
14623 if (GET_CODE (rtl) == SYMBOL_REF)
14624 return const_ok_for_output_1 (rtl);
14625
14626 if (GET_CODE (rtl) == CONST)
14627 {
14628 subrtx_var_iterator::array_type array;
14629 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL)
14630 if (!const_ok_for_output_1 (*iter))
14631 return false;
14632 return true;
14633 }
14634
14635 return true;
14636 }
14637
14638 /* Return a reference to DW_TAG_base_type corresponding to MODE and UNSIGNEDP
14639 if possible, NULL otherwise. */
14640
14641 static dw_die_ref
14642 base_type_for_mode (machine_mode mode, bool unsignedp)
14643 {
14644 dw_die_ref type_die;
14645 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
14646
14647 if (type == NULL)
14648 return NULL;
14649 switch (TREE_CODE (type))
14650 {
14651 case INTEGER_TYPE:
14652 case REAL_TYPE:
14653 break;
14654 default:
14655 return NULL;
14656 }
14657 type_die = lookup_type_die (type);
14658 if (!type_die)
14659 type_die = modified_type_die (type, TYPE_UNQUALIFIED, false,
14660 comp_unit_die ());
14661 if (type_die == NULL || type_die->die_tag != DW_TAG_base_type)
14662 return NULL;
14663 return type_die;
14664 }
14665
14666 /* For OP descriptor assumed to be in unsigned MODE, convert it to a unsigned
14667 type matching MODE, or, if MODE is narrower than or as wide as
14668 DWARF2_ADDR_SIZE, untyped. Return NULL if the conversion is not
14669 possible. */
14670
14671 static dw_loc_descr_ref
14672 convert_descriptor_to_mode (scalar_int_mode mode, dw_loc_descr_ref op)
14673 {
14674 machine_mode outer_mode = mode;
14675 dw_die_ref type_die;
14676 dw_loc_descr_ref cvt;
14677
14678 if (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE)
14679 {
14680 add_loc_descr (&op, new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0));
14681 return op;
14682 }
14683 type_die = base_type_for_mode (outer_mode, 1);
14684 if (type_die == NULL)
14685 return NULL;
14686 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14687 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14688 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14689 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14690 add_loc_descr (&op, cvt);
14691 return op;
14692 }
14693
14694 /* Return location descriptor for comparison OP with operands OP0 and OP1. */
14695
14696 static dw_loc_descr_ref
14697 compare_loc_descriptor (enum dwarf_location_atom op, dw_loc_descr_ref op0,
14698 dw_loc_descr_ref op1)
14699 {
14700 dw_loc_descr_ref ret = op0;
14701 add_loc_descr (&ret, op1);
14702 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14703 if (STORE_FLAG_VALUE != 1)
14704 {
14705 add_loc_descr (&ret, int_loc_descriptor (STORE_FLAG_VALUE));
14706 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
14707 }
14708 return ret;
14709 }
14710
14711 /* Subroutine of scompare_loc_descriptor for the case in which we're
14712 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14713 and in which OP_MODE is bigger than DWARF2_ADDR_SIZE. */
14714
14715 static dw_loc_descr_ref
14716 scompare_loc_descriptor_wide (enum dwarf_location_atom op,
14717 scalar_int_mode op_mode,
14718 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14719 {
14720 dw_die_ref type_die = base_type_for_mode (op_mode, 0);
14721 dw_loc_descr_ref cvt;
14722
14723 if (type_die == NULL)
14724 return NULL;
14725 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14726 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14727 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14728 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14729 add_loc_descr (&op0, cvt);
14730 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14731 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14732 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14733 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14734 add_loc_descr (&op1, cvt);
14735 return compare_loc_descriptor (op, op0, op1);
14736 }
14737
14738 /* Subroutine of scompare_loc_descriptor for the case in which we're
14739 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14740 and in which OP_MODE is smaller than DWARF2_ADDR_SIZE. */
14741
14742 static dw_loc_descr_ref
14743 scompare_loc_descriptor_narrow (enum dwarf_location_atom op, rtx rtl,
14744 scalar_int_mode op_mode,
14745 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14746 {
14747 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (op_mode)) * BITS_PER_UNIT;
14748 /* For eq/ne, if the operands are known to be zero-extended,
14749 there is no need to do the fancy shifting up. */
14750 if (op == DW_OP_eq || op == DW_OP_ne)
14751 {
14752 dw_loc_descr_ref last0, last1;
14753 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14754 ;
14755 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14756 ;
14757 /* deref_size zero extends, and for constants we can check
14758 whether they are zero extended or not. */
14759 if (((last0->dw_loc_opc == DW_OP_deref_size
14760 && last0->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14761 || (CONST_INT_P (XEXP (rtl, 0))
14762 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 0))
14763 == (INTVAL (XEXP (rtl, 0)) & GET_MODE_MASK (op_mode))))
14764 && ((last1->dw_loc_opc == DW_OP_deref_size
14765 && last1->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14766 || (CONST_INT_P (XEXP (rtl, 1))
14767 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 1))
14768 == (INTVAL (XEXP (rtl, 1)) & GET_MODE_MASK (op_mode)))))
14769 return compare_loc_descriptor (op, op0, op1);
14770
14771 /* EQ/NE comparison against constant in narrower type than
14772 DWARF2_ADDR_SIZE can be performed either as
14773 DW_OP_const1u <shift> DW_OP_shl DW_OP_const* <cst << shift>
14774 DW_OP_{eq,ne}
14775 or
14776 DW_OP_const*u <mode_mask> DW_OP_and DW_OP_const* <cst & mode_mask>
14777 DW_OP_{eq,ne}. Pick whatever is shorter. */
14778 if (CONST_INT_P (XEXP (rtl, 1))
14779 && GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
14780 && (size_of_int_loc_descriptor (shift) + 1
14781 + size_of_int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift)
14782 >= size_of_int_loc_descriptor (GET_MODE_MASK (op_mode)) + 1
14783 + size_of_int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14784 & GET_MODE_MASK (op_mode))))
14785 {
14786 add_loc_descr (&op0, int_loc_descriptor (GET_MODE_MASK (op_mode)));
14787 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14788 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14789 & GET_MODE_MASK (op_mode));
14790 return compare_loc_descriptor (op, op0, op1);
14791 }
14792 }
14793 add_loc_descr (&op0, int_loc_descriptor (shift));
14794 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14795 if (CONST_INT_P (XEXP (rtl, 1)))
14796 op1 = int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift);
14797 else
14798 {
14799 add_loc_descr (&op1, int_loc_descriptor (shift));
14800 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14801 }
14802 return compare_loc_descriptor (op, op0, op1);
14803 }
14804
14805 /* Return location descriptor for unsigned comparison OP RTL. */
14806
14807 static dw_loc_descr_ref
14808 scompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14809 machine_mode mem_mode)
14810 {
14811 machine_mode op_mode = GET_MODE (XEXP (rtl, 0));
14812 dw_loc_descr_ref op0, op1;
14813
14814 if (op_mode == VOIDmode)
14815 op_mode = GET_MODE (XEXP (rtl, 1));
14816 if (op_mode == VOIDmode)
14817 return NULL;
14818
14819 scalar_int_mode int_op_mode;
14820 if (dwarf_strict
14821 && dwarf_version < 5
14822 && (!is_a <scalar_int_mode> (op_mode, &int_op_mode)
14823 || GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE))
14824 return NULL;
14825
14826 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14827 VAR_INIT_STATUS_INITIALIZED);
14828 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14829 VAR_INIT_STATUS_INITIALIZED);
14830
14831 if (op0 == NULL || op1 == NULL)
14832 return NULL;
14833
14834 if (is_a <scalar_int_mode> (op_mode, &int_op_mode))
14835 {
14836 if (GET_MODE_SIZE (int_op_mode) < DWARF2_ADDR_SIZE)
14837 return scompare_loc_descriptor_narrow (op, rtl, int_op_mode, op0, op1);
14838
14839 if (GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE)
14840 return scompare_loc_descriptor_wide (op, int_op_mode, op0, op1);
14841 }
14842 return compare_loc_descriptor (op, op0, op1);
14843 }
14844
14845 /* Return location descriptor for unsigned comparison OP RTL. */
14846
14847 static dw_loc_descr_ref
14848 ucompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14849 machine_mode mem_mode)
14850 {
14851 dw_loc_descr_ref op0, op1;
14852
14853 machine_mode test_op_mode = GET_MODE (XEXP (rtl, 0));
14854 if (test_op_mode == VOIDmode)
14855 test_op_mode = GET_MODE (XEXP (rtl, 1));
14856
14857 scalar_int_mode op_mode;
14858 if (!is_a <scalar_int_mode> (test_op_mode, &op_mode))
14859 return NULL;
14860
14861 if (dwarf_strict
14862 && dwarf_version < 5
14863 && GET_MODE_SIZE (op_mode) > DWARF2_ADDR_SIZE)
14864 return NULL;
14865
14866 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14867 VAR_INIT_STATUS_INITIALIZED);
14868 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14869 VAR_INIT_STATUS_INITIALIZED);
14870
14871 if (op0 == NULL || op1 == NULL)
14872 return NULL;
14873
14874 if (GET_MODE_SIZE (op_mode) < DWARF2_ADDR_SIZE)
14875 {
14876 HOST_WIDE_INT mask = GET_MODE_MASK (op_mode);
14877 dw_loc_descr_ref last0, last1;
14878 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14879 ;
14880 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14881 ;
14882 if (CONST_INT_P (XEXP (rtl, 0)))
14883 op0 = int_loc_descriptor (INTVAL (XEXP (rtl, 0)) & mask);
14884 /* deref_size zero extends, so no need to mask it again. */
14885 else if (last0->dw_loc_opc != DW_OP_deref_size
14886 || last0->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14887 {
14888 add_loc_descr (&op0, int_loc_descriptor (mask));
14889 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14890 }
14891 if (CONST_INT_P (XEXP (rtl, 1)))
14892 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) & mask);
14893 /* deref_size zero extends, so no need to mask it again. */
14894 else if (last1->dw_loc_opc != DW_OP_deref_size
14895 || last1->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14896 {
14897 add_loc_descr (&op1, int_loc_descriptor (mask));
14898 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14899 }
14900 }
14901 else if (GET_MODE_SIZE (op_mode) == DWARF2_ADDR_SIZE)
14902 {
14903 HOST_WIDE_INT bias = 1;
14904 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14905 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14906 if (CONST_INT_P (XEXP (rtl, 1)))
14907 op1 = int_loc_descriptor ((unsigned HOST_WIDE_INT) bias
14908 + INTVAL (XEXP (rtl, 1)));
14909 else
14910 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst,
14911 bias, 0));
14912 }
14913 return compare_loc_descriptor (op, op0, op1);
14914 }
14915
14916 /* Return location descriptor for {U,S}{MIN,MAX}. */
14917
14918 static dw_loc_descr_ref
14919 minmax_loc_descriptor (rtx rtl, machine_mode mode,
14920 machine_mode mem_mode)
14921 {
14922 enum dwarf_location_atom op;
14923 dw_loc_descr_ref op0, op1, ret;
14924 dw_loc_descr_ref bra_node, drop_node;
14925
14926 scalar_int_mode int_mode;
14927 if (dwarf_strict
14928 && dwarf_version < 5
14929 && (!is_a <scalar_int_mode> (mode, &int_mode)
14930 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE))
14931 return NULL;
14932
14933 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14934 VAR_INIT_STATUS_INITIALIZED);
14935 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14936 VAR_INIT_STATUS_INITIALIZED);
14937
14938 if (op0 == NULL || op1 == NULL)
14939 return NULL;
14940
14941 add_loc_descr (&op0, new_loc_descr (DW_OP_dup, 0, 0));
14942 add_loc_descr (&op1, new_loc_descr (DW_OP_swap, 0, 0));
14943 add_loc_descr (&op1, new_loc_descr (DW_OP_over, 0, 0));
14944 if (GET_CODE (rtl) == UMIN || GET_CODE (rtl) == UMAX)
14945 {
14946 /* Checked by the caller. */
14947 int_mode = as_a <scalar_int_mode> (mode);
14948 if (GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14949 {
14950 HOST_WIDE_INT mask = GET_MODE_MASK (int_mode);
14951 add_loc_descr (&op0, int_loc_descriptor (mask));
14952 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14953 add_loc_descr (&op1, int_loc_descriptor (mask));
14954 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14955 }
14956 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
14957 {
14958 HOST_WIDE_INT bias = 1;
14959 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14960 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14961 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14962 }
14963 }
14964 else if (is_a <scalar_int_mode> (mode, &int_mode)
14965 && GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14966 {
14967 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (int_mode)) * BITS_PER_UNIT;
14968 add_loc_descr (&op0, int_loc_descriptor (shift));
14969 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14970 add_loc_descr (&op1, int_loc_descriptor (shift));
14971 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14972 }
14973 else if (is_a <scalar_int_mode> (mode, &int_mode)
14974 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14975 {
14976 dw_die_ref type_die = base_type_for_mode (int_mode, 0);
14977 dw_loc_descr_ref cvt;
14978 if (type_die == NULL)
14979 return NULL;
14980 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14981 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14982 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14983 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14984 add_loc_descr (&op0, cvt);
14985 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14986 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14987 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14988 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14989 add_loc_descr (&op1, cvt);
14990 }
14991
14992 if (GET_CODE (rtl) == SMIN || GET_CODE (rtl) == UMIN)
14993 op = DW_OP_lt;
14994 else
14995 op = DW_OP_gt;
14996 ret = op0;
14997 add_loc_descr (&ret, op1);
14998 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14999 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
15000 add_loc_descr (&ret, bra_node);
15001 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15002 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
15003 add_loc_descr (&ret, drop_node);
15004 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
15005 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
15006 if ((GET_CODE (rtl) == SMIN || GET_CODE (rtl) == SMAX)
15007 && is_a <scalar_int_mode> (mode, &int_mode)
15008 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15009 ret = convert_descriptor_to_mode (int_mode, ret);
15010 return ret;
15011 }
15012
15013 /* Helper function for mem_loc_descriptor. Perform OP binary op,
15014 but after converting arguments to type_die, afterwards
15015 convert back to unsigned. */
15016
15017 static dw_loc_descr_ref
15018 typed_binop (enum dwarf_location_atom op, rtx rtl, dw_die_ref type_die,
15019 scalar_int_mode mode, machine_mode mem_mode)
15020 {
15021 dw_loc_descr_ref cvt, op0, op1;
15022
15023 if (type_die == NULL)
15024 return NULL;
15025 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15026 VAR_INIT_STATUS_INITIALIZED);
15027 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15028 VAR_INIT_STATUS_INITIALIZED);
15029 if (op0 == NULL || op1 == NULL)
15030 return NULL;
15031 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15032 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15033 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15034 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15035 add_loc_descr (&op0, cvt);
15036 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15037 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15038 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15039 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15040 add_loc_descr (&op1, cvt);
15041 add_loc_descr (&op0, op1);
15042 add_loc_descr (&op0, new_loc_descr (op, 0, 0));
15043 return convert_descriptor_to_mode (mode, op0);
15044 }
15045
15046 /* CLZ (where constV is CLZ_DEFINED_VALUE_AT_ZERO computed value,
15047 const0 is DW_OP_lit0 or corresponding typed constant,
15048 const1 is DW_OP_lit1 or corresponding typed constant
15049 and constMSB is constant with just the MSB bit set
15050 for the mode):
15051 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
15052 L1: const0 DW_OP_swap
15053 L2: DW_OP_dup constMSB DW_OP_and DW_OP_bra <L3> const1 DW_OP_shl
15054 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
15055 L3: DW_OP_drop
15056 L4: DW_OP_nop
15057
15058 CTZ is similar:
15059 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
15060 L1: const0 DW_OP_swap
15061 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
15062 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
15063 L3: DW_OP_drop
15064 L4: DW_OP_nop
15065
15066 FFS is similar:
15067 DW_OP_dup DW_OP_bra <L1> DW_OP_drop const0 DW_OP_skip <L4>
15068 L1: const1 DW_OP_swap
15069 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
15070 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
15071 L3: DW_OP_drop
15072 L4: DW_OP_nop */
15073
15074 static dw_loc_descr_ref
15075 clz_loc_descriptor (rtx rtl, scalar_int_mode mode,
15076 machine_mode mem_mode)
15077 {
15078 dw_loc_descr_ref op0, ret, tmp;
15079 HOST_WIDE_INT valv;
15080 dw_loc_descr_ref l1jump, l1label;
15081 dw_loc_descr_ref l2jump, l2label;
15082 dw_loc_descr_ref l3jump, l3label;
15083 dw_loc_descr_ref l4jump, l4label;
15084 rtx msb;
15085
15086 if (GET_MODE (XEXP (rtl, 0)) != mode)
15087 return NULL;
15088
15089 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15090 VAR_INIT_STATUS_INITIALIZED);
15091 if (op0 == NULL)
15092 return NULL;
15093 ret = op0;
15094 if (GET_CODE (rtl) == CLZ)
15095 {
15096 if (!CLZ_DEFINED_VALUE_AT_ZERO (mode, valv))
15097 valv = GET_MODE_BITSIZE (mode);
15098 }
15099 else if (GET_CODE (rtl) == FFS)
15100 valv = 0;
15101 else if (!CTZ_DEFINED_VALUE_AT_ZERO (mode, valv))
15102 valv = GET_MODE_BITSIZE (mode);
15103 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15104 l1jump = new_loc_descr (DW_OP_bra, 0, 0);
15105 add_loc_descr (&ret, l1jump);
15106 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
15107 tmp = mem_loc_descriptor (GEN_INT (valv), mode, mem_mode,
15108 VAR_INIT_STATUS_INITIALIZED);
15109 if (tmp == NULL)
15110 return NULL;
15111 add_loc_descr (&ret, tmp);
15112 l4jump = new_loc_descr (DW_OP_skip, 0, 0);
15113 add_loc_descr (&ret, l4jump);
15114 l1label = mem_loc_descriptor (GET_CODE (rtl) == FFS
15115 ? const1_rtx : const0_rtx,
15116 mode, mem_mode,
15117 VAR_INIT_STATUS_INITIALIZED);
15118 if (l1label == NULL)
15119 return NULL;
15120 add_loc_descr (&ret, l1label);
15121 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15122 l2label = new_loc_descr (DW_OP_dup, 0, 0);
15123 add_loc_descr (&ret, l2label);
15124 if (GET_CODE (rtl) != CLZ)
15125 msb = const1_rtx;
15126 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
15127 msb = GEN_INT (HOST_WIDE_INT_1U
15128 << (GET_MODE_BITSIZE (mode) - 1));
15129 else
15130 msb = immed_wide_int_const
15131 (wi::set_bit_in_zero (GET_MODE_PRECISION (mode) - 1,
15132 GET_MODE_PRECISION (mode)), mode);
15133 if (GET_CODE (msb) == CONST_INT && INTVAL (msb) < 0)
15134 tmp = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15135 ? DW_OP_const4u : HOST_BITS_PER_WIDE_INT == 64
15136 ? DW_OP_const8u : DW_OP_constu, INTVAL (msb), 0);
15137 else
15138 tmp = mem_loc_descriptor (msb, mode, mem_mode,
15139 VAR_INIT_STATUS_INITIALIZED);
15140 if (tmp == NULL)
15141 return NULL;
15142 add_loc_descr (&ret, tmp);
15143 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15144 l3jump = new_loc_descr (DW_OP_bra, 0, 0);
15145 add_loc_descr (&ret, l3jump);
15146 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15147 VAR_INIT_STATUS_INITIALIZED);
15148 if (tmp == NULL)
15149 return NULL;
15150 add_loc_descr (&ret, tmp);
15151 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == CLZ
15152 ? DW_OP_shl : DW_OP_shr, 0, 0));
15153 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15154 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst, 1, 0));
15155 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15156 l2jump = new_loc_descr (DW_OP_skip, 0, 0);
15157 add_loc_descr (&ret, l2jump);
15158 l3label = new_loc_descr (DW_OP_drop, 0, 0);
15159 add_loc_descr (&ret, l3label);
15160 l4label = new_loc_descr (DW_OP_nop, 0, 0);
15161 add_loc_descr (&ret, l4label);
15162 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15163 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15164 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15165 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15166 l3jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15167 l3jump->dw_loc_oprnd1.v.val_loc = l3label;
15168 l4jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15169 l4jump->dw_loc_oprnd1.v.val_loc = l4label;
15170 return ret;
15171 }
15172
15173 /* POPCOUNT (const0 is DW_OP_lit0 or corresponding typed constant,
15174 const1 is DW_OP_lit1 or corresponding typed constant):
15175 const0 DW_OP_swap
15176 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15177 DW_OP_plus DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15178 L2: DW_OP_drop
15179
15180 PARITY is similar:
15181 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15182 DW_OP_xor DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15183 L2: DW_OP_drop */
15184
15185 static dw_loc_descr_ref
15186 popcount_loc_descriptor (rtx rtl, scalar_int_mode mode,
15187 machine_mode mem_mode)
15188 {
15189 dw_loc_descr_ref op0, ret, tmp;
15190 dw_loc_descr_ref l1jump, l1label;
15191 dw_loc_descr_ref l2jump, l2label;
15192
15193 if (GET_MODE (XEXP (rtl, 0)) != mode)
15194 return NULL;
15195
15196 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15197 VAR_INIT_STATUS_INITIALIZED);
15198 if (op0 == NULL)
15199 return NULL;
15200 ret = op0;
15201 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15202 VAR_INIT_STATUS_INITIALIZED);
15203 if (tmp == NULL)
15204 return NULL;
15205 add_loc_descr (&ret, tmp);
15206 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15207 l1label = new_loc_descr (DW_OP_dup, 0, 0);
15208 add_loc_descr (&ret, l1label);
15209 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15210 add_loc_descr (&ret, l2jump);
15211 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15212 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15213 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15214 VAR_INIT_STATUS_INITIALIZED);
15215 if (tmp == NULL)
15216 return NULL;
15217 add_loc_descr (&ret, tmp);
15218 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15219 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == POPCOUNT
15220 ? DW_OP_plus : DW_OP_xor, 0, 0));
15221 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15222 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15223 VAR_INIT_STATUS_INITIALIZED);
15224 add_loc_descr (&ret, tmp);
15225 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15226 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15227 add_loc_descr (&ret, l1jump);
15228 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15229 add_loc_descr (&ret, l2label);
15230 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15231 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15232 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15233 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15234 return ret;
15235 }
15236
15237 /* BSWAP (constS is initial shift count, either 56 or 24):
15238 constS const0
15239 L1: DW_OP_pick <2> constS DW_OP_pick <3> DW_OP_minus DW_OP_shr
15240 const255 DW_OP_and DW_OP_pick <2> DW_OP_shl DW_OP_or
15241 DW_OP_swap DW_OP_dup const0 DW_OP_eq DW_OP_bra <L2> const8
15242 DW_OP_minus DW_OP_swap DW_OP_skip <L1>
15243 L2: DW_OP_drop DW_OP_swap DW_OP_drop */
15244
15245 static dw_loc_descr_ref
15246 bswap_loc_descriptor (rtx rtl, scalar_int_mode mode,
15247 machine_mode mem_mode)
15248 {
15249 dw_loc_descr_ref op0, ret, tmp;
15250 dw_loc_descr_ref l1jump, l1label;
15251 dw_loc_descr_ref l2jump, l2label;
15252
15253 if (BITS_PER_UNIT != 8
15254 || (GET_MODE_BITSIZE (mode) != 32
15255 && GET_MODE_BITSIZE (mode) != 64))
15256 return NULL;
15257
15258 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15259 VAR_INIT_STATUS_INITIALIZED);
15260 if (op0 == NULL)
15261 return NULL;
15262
15263 ret = op0;
15264 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15265 mode, mem_mode,
15266 VAR_INIT_STATUS_INITIALIZED);
15267 if (tmp == NULL)
15268 return NULL;
15269 add_loc_descr (&ret, tmp);
15270 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15271 VAR_INIT_STATUS_INITIALIZED);
15272 if (tmp == NULL)
15273 return NULL;
15274 add_loc_descr (&ret, tmp);
15275 l1label = new_loc_descr (DW_OP_pick, 2, 0);
15276 add_loc_descr (&ret, l1label);
15277 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15278 mode, mem_mode,
15279 VAR_INIT_STATUS_INITIALIZED);
15280 add_loc_descr (&ret, tmp);
15281 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 3, 0));
15282 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15283 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15284 tmp = mem_loc_descriptor (GEN_INT (255), mode, mem_mode,
15285 VAR_INIT_STATUS_INITIALIZED);
15286 if (tmp == NULL)
15287 return NULL;
15288 add_loc_descr (&ret, tmp);
15289 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15290 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 2, 0));
15291 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15292 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15293 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15294 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15295 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15296 VAR_INIT_STATUS_INITIALIZED);
15297 add_loc_descr (&ret, tmp);
15298 add_loc_descr (&ret, new_loc_descr (DW_OP_eq, 0, 0));
15299 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15300 add_loc_descr (&ret, l2jump);
15301 tmp = mem_loc_descriptor (GEN_INT (8), mode, mem_mode,
15302 VAR_INIT_STATUS_INITIALIZED);
15303 add_loc_descr (&ret, tmp);
15304 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15305 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15306 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15307 add_loc_descr (&ret, l1jump);
15308 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15309 add_loc_descr (&ret, l2label);
15310 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15311 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
15312 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15313 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15314 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15315 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15316 return ret;
15317 }
15318
15319 /* ROTATE (constMASK is mode mask, BITSIZE is bitsize of mode):
15320 DW_OP_over DW_OP_over DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15321 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_neg
15322 DW_OP_plus_uconst <BITSIZE> DW_OP_shr DW_OP_or
15323
15324 ROTATERT is similar:
15325 DW_OP_over DW_OP_over DW_OP_neg DW_OP_plus_uconst <BITSIZE>
15326 DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15327 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_shr DW_OP_or */
15328
15329 static dw_loc_descr_ref
15330 rotate_loc_descriptor (rtx rtl, scalar_int_mode mode,
15331 machine_mode mem_mode)
15332 {
15333 rtx rtlop1 = XEXP (rtl, 1);
15334 dw_loc_descr_ref op0, op1, ret, mask[2] = { NULL, NULL };
15335 int i;
15336
15337 if (is_narrower_int_mode (GET_MODE (rtlop1), mode))
15338 rtlop1 = gen_rtx_ZERO_EXTEND (mode, rtlop1);
15339 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15340 VAR_INIT_STATUS_INITIALIZED);
15341 op1 = mem_loc_descriptor (rtlop1, mode, mem_mode,
15342 VAR_INIT_STATUS_INITIALIZED);
15343 if (op0 == NULL || op1 == NULL)
15344 return NULL;
15345 if (GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE)
15346 for (i = 0; i < 2; i++)
15347 {
15348 if (GET_MODE_BITSIZE (mode) < HOST_BITS_PER_WIDE_INT)
15349 mask[i] = mem_loc_descriptor (GEN_INT (GET_MODE_MASK (mode)),
15350 mode, mem_mode,
15351 VAR_INIT_STATUS_INITIALIZED);
15352 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
15353 mask[i] = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15354 ? DW_OP_const4u
15355 : HOST_BITS_PER_WIDE_INT == 64
15356 ? DW_OP_const8u : DW_OP_constu,
15357 GET_MODE_MASK (mode), 0);
15358 else
15359 mask[i] = NULL;
15360 if (mask[i] == NULL)
15361 return NULL;
15362 add_loc_descr (&mask[i], new_loc_descr (DW_OP_and, 0, 0));
15363 }
15364 ret = op0;
15365 add_loc_descr (&ret, op1);
15366 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15367 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15368 if (GET_CODE (rtl) == ROTATERT)
15369 {
15370 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15371 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15372 GET_MODE_BITSIZE (mode), 0));
15373 }
15374 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15375 if (mask[0] != NULL)
15376 add_loc_descr (&ret, mask[0]);
15377 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15378 if (mask[1] != NULL)
15379 {
15380 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15381 add_loc_descr (&ret, mask[1]);
15382 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15383 }
15384 if (GET_CODE (rtl) == ROTATE)
15385 {
15386 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15387 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15388 GET_MODE_BITSIZE (mode), 0));
15389 }
15390 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15391 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15392 return ret;
15393 }
15394
15395 /* Helper function for mem_loc_descriptor. Return DW_OP_GNU_parameter_ref
15396 for DEBUG_PARAMETER_REF RTL. */
15397
15398 static dw_loc_descr_ref
15399 parameter_ref_descriptor (rtx rtl)
15400 {
15401 dw_loc_descr_ref ret;
15402 dw_die_ref ref;
15403
15404 if (dwarf_strict)
15405 return NULL;
15406 gcc_assert (TREE_CODE (DEBUG_PARAMETER_REF_DECL (rtl)) == PARM_DECL);
15407 /* With LTO during LTRANS we get the late DIE that refers to the early
15408 DIE, thus we add another indirection here. This seems to confuse
15409 gdb enough to make gcc.dg/guality/pr68860-1.c FAIL with LTO. */
15410 ref = lookup_decl_die (DEBUG_PARAMETER_REF_DECL (rtl));
15411 ret = new_loc_descr (DW_OP_GNU_parameter_ref, 0, 0);
15412 if (ref)
15413 {
15414 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15415 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
15416 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
15417 }
15418 else
15419 {
15420 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
15421 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_PARAMETER_REF_DECL (rtl);
15422 }
15423 return ret;
15424 }
15425
15426 /* The following routine converts the RTL for a variable or parameter
15427 (resident in memory) into an equivalent Dwarf representation of a
15428 mechanism for getting the address of that same variable onto the top of a
15429 hypothetical "address evaluation" stack.
15430
15431 When creating memory location descriptors, we are effectively transforming
15432 the RTL for a memory-resident object into its Dwarf postfix expression
15433 equivalent. This routine recursively descends an RTL tree, turning
15434 it into Dwarf postfix code as it goes.
15435
15436 MODE is the mode that should be assumed for the rtl if it is VOIDmode.
15437
15438 MEM_MODE is the mode of the memory reference, needed to handle some
15439 autoincrement addressing modes.
15440
15441 Return 0 if we can't represent the location. */
15442
15443 dw_loc_descr_ref
15444 mem_loc_descriptor (rtx rtl, machine_mode mode,
15445 machine_mode mem_mode,
15446 enum var_init_status initialized)
15447 {
15448 dw_loc_descr_ref mem_loc_result = NULL;
15449 enum dwarf_location_atom op;
15450 dw_loc_descr_ref op0, op1;
15451 rtx inner = NULL_RTX;
15452 poly_int64 offset;
15453
15454 if (mode == VOIDmode)
15455 mode = GET_MODE (rtl);
15456
15457 /* Note that for a dynamically sized array, the location we will generate a
15458 description of here will be the lowest numbered location which is
15459 actually within the array. That's *not* necessarily the same as the
15460 zeroth element of the array. */
15461
15462 rtl = targetm.delegitimize_address (rtl);
15463
15464 if (mode != GET_MODE (rtl) && GET_MODE (rtl) != VOIDmode)
15465 return NULL;
15466
15467 scalar_int_mode int_mode = BImode, inner_mode, op1_mode;
15468 switch (GET_CODE (rtl))
15469 {
15470 case POST_INC:
15471 case POST_DEC:
15472 case POST_MODIFY:
15473 return mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, initialized);
15474
15475 case SUBREG:
15476 /* The case of a subreg may arise when we have a local (register)
15477 variable or a formal (register) parameter which doesn't quite fill
15478 up an entire register. For now, just assume that it is
15479 legitimate to make the Dwarf info refer to the whole register which
15480 contains the given subreg. */
15481 if (!subreg_lowpart_p (rtl))
15482 break;
15483 inner = SUBREG_REG (rtl);
15484 /* FALLTHRU */
15485 case TRUNCATE:
15486 if (inner == NULL_RTX)
15487 inner = XEXP (rtl, 0);
15488 if (is_a <scalar_int_mode> (mode, &int_mode)
15489 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15490 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15491 #ifdef POINTERS_EXTEND_UNSIGNED
15492 || (int_mode == Pmode && mem_mode != VOIDmode)
15493 #endif
15494 )
15495 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE)
15496 {
15497 mem_loc_result = mem_loc_descriptor (inner,
15498 inner_mode,
15499 mem_mode, initialized);
15500 break;
15501 }
15502 if (dwarf_strict && dwarf_version < 5)
15503 break;
15504 if (is_a <scalar_int_mode> (mode, &int_mode)
15505 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15506 ? GET_MODE_SIZE (int_mode) <= GET_MODE_SIZE (inner_mode)
15507 : known_eq (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15508 {
15509 dw_die_ref type_die;
15510 dw_loc_descr_ref cvt;
15511
15512 mem_loc_result = mem_loc_descriptor (inner,
15513 GET_MODE (inner),
15514 mem_mode, initialized);
15515 if (mem_loc_result == NULL)
15516 break;
15517 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15518 if (type_die == NULL)
15519 {
15520 mem_loc_result = NULL;
15521 break;
15522 }
15523 if (maybe_ne (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15524 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15525 else
15526 cvt = new_loc_descr (dwarf_OP (DW_OP_reinterpret), 0, 0);
15527 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15528 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15529 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15530 add_loc_descr (&mem_loc_result, cvt);
15531 if (is_a <scalar_int_mode> (mode, &int_mode)
15532 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15533 {
15534 /* Convert it to untyped afterwards. */
15535 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15536 add_loc_descr (&mem_loc_result, cvt);
15537 }
15538 }
15539 break;
15540
15541 case REG:
15542 if (!is_a <scalar_int_mode> (mode, &int_mode)
15543 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15544 && rtl != arg_pointer_rtx
15545 && rtl != frame_pointer_rtx
15546 #ifdef POINTERS_EXTEND_UNSIGNED
15547 && (int_mode != Pmode || mem_mode == VOIDmode)
15548 #endif
15549 ))
15550 {
15551 dw_die_ref type_die;
15552 unsigned int dbx_regnum;
15553
15554 if (dwarf_strict && dwarf_version < 5)
15555 break;
15556 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
15557 break;
15558 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15559 if (type_die == NULL)
15560 break;
15561
15562 dbx_regnum = dbx_reg_number (rtl);
15563 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15564 break;
15565 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_regval_type),
15566 dbx_regnum, 0);
15567 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15568 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15569 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.external = 0;
15570 break;
15571 }
15572 /* Whenever a register number forms a part of the description of the
15573 method for calculating the (dynamic) address of a memory resident
15574 object, DWARF rules require the register number be referred to as
15575 a "base register". This distinction is not based in any way upon
15576 what category of register the hardware believes the given register
15577 belongs to. This is strictly DWARF terminology we're dealing with
15578 here. Note that in cases where the location of a memory-resident
15579 data object could be expressed as: OP_ADD (OP_BASEREG (basereg),
15580 OP_CONST (0)) the actual DWARF location descriptor that we generate
15581 may just be OP_BASEREG (basereg). This may look deceptively like
15582 the object in question was allocated to a register (rather than in
15583 memory) so DWARF consumers need to be aware of the subtle
15584 distinction between OP_REG and OP_BASEREG. */
15585 if (REGNO (rtl) < FIRST_PSEUDO_REGISTER)
15586 mem_loc_result = based_loc_descr (rtl, 0, VAR_INIT_STATUS_INITIALIZED);
15587 else if (stack_realign_drap
15588 && crtl->drap_reg
15589 && crtl->args.internal_arg_pointer == rtl
15590 && REGNO (crtl->drap_reg) < FIRST_PSEUDO_REGISTER)
15591 {
15592 /* If RTL is internal_arg_pointer, which has been optimized
15593 out, use DRAP instead. */
15594 mem_loc_result = based_loc_descr (crtl->drap_reg, 0,
15595 VAR_INIT_STATUS_INITIALIZED);
15596 }
15597 break;
15598
15599 case SIGN_EXTEND:
15600 case ZERO_EXTEND:
15601 if (!is_a <scalar_int_mode> (mode, &int_mode)
15602 || !is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode))
15603 break;
15604 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
15605 mem_mode, VAR_INIT_STATUS_INITIALIZED);
15606 if (op0 == 0)
15607 break;
15608 else if (GET_CODE (rtl) == ZERO_EXTEND
15609 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15610 && GET_MODE_BITSIZE (inner_mode) < HOST_BITS_PER_WIDE_INT
15611 /* If DW_OP_const{1,2,4}u won't be used, it is shorter
15612 to expand zero extend as two shifts instead of
15613 masking. */
15614 && GET_MODE_SIZE (inner_mode) <= 4)
15615 {
15616 mem_loc_result = op0;
15617 add_loc_descr (&mem_loc_result,
15618 int_loc_descriptor (GET_MODE_MASK (inner_mode)));
15619 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_and, 0, 0));
15620 }
15621 else if (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15622 {
15623 int shift = DWARF2_ADDR_SIZE - GET_MODE_SIZE (inner_mode);
15624 shift *= BITS_PER_UNIT;
15625 if (GET_CODE (rtl) == SIGN_EXTEND)
15626 op = DW_OP_shra;
15627 else
15628 op = DW_OP_shr;
15629 mem_loc_result = op0;
15630 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15631 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
15632 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15633 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15634 }
15635 else if (!dwarf_strict || dwarf_version >= 5)
15636 {
15637 dw_die_ref type_die1, type_die2;
15638 dw_loc_descr_ref cvt;
15639
15640 type_die1 = base_type_for_mode (inner_mode,
15641 GET_CODE (rtl) == ZERO_EXTEND);
15642 if (type_die1 == NULL)
15643 break;
15644 type_die2 = base_type_for_mode (int_mode, 1);
15645 if (type_die2 == NULL)
15646 break;
15647 mem_loc_result = op0;
15648 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15649 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15650 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die1;
15651 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15652 add_loc_descr (&mem_loc_result, cvt);
15653 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15654 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15655 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die2;
15656 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15657 add_loc_descr (&mem_loc_result, cvt);
15658 }
15659 break;
15660
15661 case MEM:
15662 {
15663 rtx new_rtl = avoid_constant_pool_reference (rtl);
15664 if (new_rtl != rtl)
15665 {
15666 mem_loc_result = mem_loc_descriptor (new_rtl, mode, mem_mode,
15667 initialized);
15668 if (mem_loc_result != NULL)
15669 return mem_loc_result;
15670 }
15671 }
15672 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0),
15673 get_address_mode (rtl), mode,
15674 VAR_INIT_STATUS_INITIALIZED);
15675 if (mem_loc_result == NULL)
15676 mem_loc_result = tls_mem_loc_descriptor (rtl);
15677 if (mem_loc_result != NULL)
15678 {
15679 if (!is_a <scalar_int_mode> (mode, &int_mode)
15680 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15681 {
15682 dw_die_ref type_die;
15683 dw_loc_descr_ref deref;
15684 HOST_WIDE_INT size;
15685
15686 if (dwarf_strict && dwarf_version < 5)
15687 return NULL;
15688 if (!GET_MODE_SIZE (mode).is_constant (&size))
15689 return NULL;
15690 type_die
15691 = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15692 if (type_die == NULL)
15693 return NULL;
15694 deref = new_loc_descr (dwarf_OP (DW_OP_deref_type), size, 0);
15695 deref->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15696 deref->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15697 deref->dw_loc_oprnd2.v.val_die_ref.external = 0;
15698 add_loc_descr (&mem_loc_result, deref);
15699 }
15700 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
15701 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_deref, 0, 0));
15702 else
15703 add_loc_descr (&mem_loc_result,
15704 new_loc_descr (DW_OP_deref_size,
15705 GET_MODE_SIZE (int_mode), 0));
15706 }
15707 break;
15708
15709 case LO_SUM:
15710 return mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, initialized);
15711
15712 case LABEL_REF:
15713 /* Some ports can transform a symbol ref into a label ref, because
15714 the symbol ref is too far away and has to be dumped into a constant
15715 pool. */
15716 case CONST:
15717 case SYMBOL_REF:
15718 case UNSPEC:
15719 if (!is_a <scalar_int_mode> (mode, &int_mode)
15720 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15721 #ifdef POINTERS_EXTEND_UNSIGNED
15722 && (int_mode != Pmode || mem_mode == VOIDmode)
15723 #endif
15724 ))
15725 break;
15726
15727 if (GET_CODE (rtl) == UNSPEC)
15728 {
15729 /* If delegitimize_address couldn't do anything with the UNSPEC, we
15730 can't express it in the debug info. This can happen e.g. with some
15731 TLS UNSPECs. Allow UNSPECs formerly from CONST that the backend
15732 approves. */
15733 bool not_ok = false;
15734 subrtx_var_iterator::array_type array;
15735 FOR_EACH_SUBRTX_VAR (iter, array, rtl, ALL)
15736 if (*iter != rtl && !CONSTANT_P (*iter))
15737 {
15738 not_ok = true;
15739 break;
15740 }
15741
15742 if (not_ok)
15743 break;
15744
15745 FOR_EACH_SUBRTX_VAR (iter, array, rtl, ALL)
15746 if (!const_ok_for_output_1 (*iter))
15747 {
15748 not_ok = true;
15749 break;
15750 }
15751
15752 if (not_ok)
15753 break;
15754
15755 rtl = gen_rtx_CONST (GET_MODE (rtl), rtl);
15756 goto symref;
15757 }
15758
15759 if (GET_CODE (rtl) == SYMBOL_REF
15760 && SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
15761 {
15762 dw_loc_descr_ref temp;
15763
15764 /* If this is not defined, we have no way to emit the data. */
15765 if (!targetm.have_tls || !targetm.asm_out.output_dwarf_dtprel)
15766 break;
15767
15768 temp = new_addr_loc_descr (rtl, dtprel_true);
15769
15770 /* We check for DWARF 5 here because gdb did not implement
15771 DW_OP_form_tls_address until after 7.12. */
15772 mem_loc_result = new_loc_descr ((dwarf_version >= 5
15773 ? DW_OP_form_tls_address
15774 : DW_OP_GNU_push_tls_address),
15775 0, 0);
15776 add_loc_descr (&mem_loc_result, temp);
15777
15778 break;
15779 }
15780
15781 if (!const_ok_for_output (rtl))
15782 {
15783 if (GET_CODE (rtl) == CONST)
15784 switch (GET_CODE (XEXP (rtl, 0)))
15785 {
15786 case NOT:
15787 op = DW_OP_not;
15788 goto try_const_unop;
15789 case NEG:
15790 op = DW_OP_neg;
15791 goto try_const_unop;
15792 try_const_unop:
15793 rtx arg;
15794 arg = XEXP (XEXP (rtl, 0), 0);
15795 if (!CONSTANT_P (arg))
15796 arg = gen_rtx_CONST (int_mode, arg);
15797 op0 = mem_loc_descriptor (arg, int_mode, mem_mode,
15798 initialized);
15799 if (op0)
15800 {
15801 mem_loc_result = op0;
15802 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15803 }
15804 break;
15805 default:
15806 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), int_mode,
15807 mem_mode, initialized);
15808 break;
15809 }
15810 break;
15811 }
15812
15813 symref:
15814 mem_loc_result = new_addr_loc_descr (rtl, dtprel_false);
15815 vec_safe_push (used_rtx_array, rtl);
15816 break;
15817
15818 case CONCAT:
15819 case CONCATN:
15820 case VAR_LOCATION:
15821 case DEBUG_IMPLICIT_PTR:
15822 expansion_failed (NULL_TREE, rtl,
15823 "CONCAT/CONCATN/VAR_LOCATION is handled only by loc_descriptor");
15824 return 0;
15825
15826 case ENTRY_VALUE:
15827 if (dwarf_strict && dwarf_version < 5)
15828 return NULL;
15829 if (REG_P (ENTRY_VALUE_EXP (rtl)))
15830 {
15831 if (!is_a <scalar_int_mode> (mode, &int_mode)
15832 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15833 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15834 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15835 else
15836 {
15837 unsigned int dbx_regnum = dbx_reg_number (ENTRY_VALUE_EXP (rtl));
15838 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15839 return NULL;
15840 op0 = one_reg_loc_descriptor (dbx_regnum,
15841 VAR_INIT_STATUS_INITIALIZED);
15842 }
15843 }
15844 else if (MEM_P (ENTRY_VALUE_EXP (rtl))
15845 && REG_P (XEXP (ENTRY_VALUE_EXP (rtl), 0)))
15846 {
15847 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15848 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15849 if (op0 && op0->dw_loc_opc == DW_OP_fbreg)
15850 return NULL;
15851 }
15852 else
15853 gcc_unreachable ();
15854 if (op0 == NULL)
15855 return NULL;
15856 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_entry_value), 0, 0);
15857 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_loc;
15858 mem_loc_result->dw_loc_oprnd1.v.val_loc = op0;
15859 break;
15860
15861 case DEBUG_PARAMETER_REF:
15862 mem_loc_result = parameter_ref_descriptor (rtl);
15863 break;
15864
15865 case PRE_MODIFY:
15866 /* Extract the PLUS expression nested inside and fall into
15867 PLUS code below. */
15868 rtl = XEXP (rtl, 1);
15869 goto plus;
15870
15871 case PRE_INC:
15872 case PRE_DEC:
15873 /* Turn these into a PLUS expression and fall into the PLUS code
15874 below. */
15875 rtl = gen_rtx_PLUS (mode, XEXP (rtl, 0),
15876 gen_int_mode (GET_CODE (rtl) == PRE_INC
15877 ? GET_MODE_UNIT_SIZE (mem_mode)
15878 : -GET_MODE_UNIT_SIZE (mem_mode),
15879 mode));
15880
15881 /* fall through */
15882
15883 case PLUS:
15884 plus:
15885 if (is_based_loc (rtl)
15886 && is_a <scalar_int_mode> (mode, &int_mode)
15887 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15888 || XEXP (rtl, 0) == arg_pointer_rtx
15889 || XEXP (rtl, 0) == frame_pointer_rtx))
15890 mem_loc_result = based_loc_descr (XEXP (rtl, 0),
15891 INTVAL (XEXP (rtl, 1)),
15892 VAR_INIT_STATUS_INITIALIZED);
15893 else
15894 {
15895 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15896 VAR_INIT_STATUS_INITIALIZED);
15897 if (mem_loc_result == 0)
15898 break;
15899
15900 if (CONST_INT_P (XEXP (rtl, 1))
15901 && (GET_MODE_SIZE (as_a <scalar_int_mode> (mode))
15902 <= DWARF2_ADDR_SIZE))
15903 loc_descr_plus_const (&mem_loc_result, INTVAL (XEXP (rtl, 1)));
15904 else
15905 {
15906 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15907 VAR_INIT_STATUS_INITIALIZED);
15908 if (op1 == 0)
15909 return NULL;
15910 add_loc_descr (&mem_loc_result, op1);
15911 add_loc_descr (&mem_loc_result,
15912 new_loc_descr (DW_OP_plus, 0, 0));
15913 }
15914 }
15915 break;
15916
15917 /* If a pseudo-reg is optimized away, it is possible for it to
15918 be replaced with a MEM containing a multiply or shift. */
15919 case MINUS:
15920 op = DW_OP_minus;
15921 goto do_binop;
15922
15923 case MULT:
15924 op = DW_OP_mul;
15925 goto do_binop;
15926
15927 case DIV:
15928 if ((!dwarf_strict || dwarf_version >= 5)
15929 && is_a <scalar_int_mode> (mode, &int_mode)
15930 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15931 {
15932 mem_loc_result = typed_binop (DW_OP_div, rtl,
15933 base_type_for_mode (mode, 0),
15934 int_mode, mem_mode);
15935 break;
15936 }
15937 op = DW_OP_div;
15938 goto do_binop;
15939
15940 case UMOD:
15941 op = DW_OP_mod;
15942 goto do_binop;
15943
15944 case ASHIFT:
15945 op = DW_OP_shl;
15946 goto do_shift;
15947
15948 case ASHIFTRT:
15949 op = DW_OP_shra;
15950 goto do_shift;
15951
15952 case LSHIFTRT:
15953 op = DW_OP_shr;
15954 goto do_shift;
15955
15956 do_shift:
15957 if (!is_a <scalar_int_mode> (mode, &int_mode))
15958 break;
15959 op0 = mem_loc_descriptor (XEXP (rtl, 0), int_mode, mem_mode,
15960 VAR_INIT_STATUS_INITIALIZED);
15961 {
15962 rtx rtlop1 = XEXP (rtl, 1);
15963 if (is_a <scalar_int_mode> (GET_MODE (rtlop1), &op1_mode)
15964 && GET_MODE_BITSIZE (op1_mode) < GET_MODE_BITSIZE (int_mode))
15965 rtlop1 = gen_rtx_ZERO_EXTEND (int_mode, rtlop1);
15966 op1 = mem_loc_descriptor (rtlop1, int_mode, mem_mode,
15967 VAR_INIT_STATUS_INITIALIZED);
15968 }
15969
15970 if (op0 == 0 || op1 == 0)
15971 break;
15972
15973 mem_loc_result = op0;
15974 add_loc_descr (&mem_loc_result, op1);
15975 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15976 break;
15977
15978 case AND:
15979 op = DW_OP_and;
15980 goto do_binop;
15981
15982 case IOR:
15983 op = DW_OP_or;
15984 goto do_binop;
15985
15986 case XOR:
15987 op = DW_OP_xor;
15988 goto do_binop;
15989
15990 do_binop:
15991 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15992 VAR_INIT_STATUS_INITIALIZED);
15993 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15994 VAR_INIT_STATUS_INITIALIZED);
15995
15996 if (op0 == 0 || op1 == 0)
15997 break;
15998
15999 mem_loc_result = op0;
16000 add_loc_descr (&mem_loc_result, op1);
16001 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
16002 break;
16003
16004 case MOD:
16005 if ((!dwarf_strict || dwarf_version >= 5)
16006 && is_a <scalar_int_mode> (mode, &int_mode)
16007 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
16008 {
16009 mem_loc_result = typed_binop (DW_OP_mod, rtl,
16010 base_type_for_mode (mode, 0),
16011 int_mode, mem_mode);
16012 break;
16013 }
16014
16015 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
16016 VAR_INIT_STATUS_INITIALIZED);
16017 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
16018 VAR_INIT_STATUS_INITIALIZED);
16019
16020 if (op0 == 0 || op1 == 0)
16021 break;
16022
16023 mem_loc_result = op0;
16024 add_loc_descr (&mem_loc_result, op1);
16025 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
16026 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
16027 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_div, 0, 0));
16028 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_mul, 0, 0));
16029 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_minus, 0, 0));
16030 break;
16031
16032 case UDIV:
16033 if ((!dwarf_strict || dwarf_version >= 5)
16034 && is_a <scalar_int_mode> (mode, &int_mode))
16035 {
16036 if (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
16037 {
16038 op = DW_OP_div;
16039 goto do_binop;
16040 }
16041 mem_loc_result = typed_binop (DW_OP_div, rtl,
16042 base_type_for_mode (int_mode, 1),
16043 int_mode, mem_mode);
16044 }
16045 break;
16046
16047 case NOT:
16048 op = DW_OP_not;
16049 goto do_unop;
16050
16051 case ABS:
16052 op = DW_OP_abs;
16053 goto do_unop;
16054
16055 case NEG:
16056 op = DW_OP_neg;
16057 goto do_unop;
16058
16059 do_unop:
16060 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
16061 VAR_INIT_STATUS_INITIALIZED);
16062
16063 if (op0 == 0)
16064 break;
16065
16066 mem_loc_result = op0;
16067 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
16068 break;
16069
16070 case CONST_INT:
16071 if (!is_a <scalar_int_mode> (mode, &int_mode)
16072 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16073 #ifdef POINTERS_EXTEND_UNSIGNED
16074 || (int_mode == Pmode
16075 && mem_mode != VOIDmode
16076 && trunc_int_for_mode (INTVAL (rtl), ptr_mode) == INTVAL (rtl))
16077 #endif
16078 )
16079 {
16080 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
16081 break;
16082 }
16083 if ((!dwarf_strict || dwarf_version >= 5)
16084 && (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT
16085 || GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_DOUBLE_INT))
16086 {
16087 dw_die_ref type_die = base_type_for_mode (int_mode, 1);
16088 scalar_int_mode amode;
16089 if (type_die == NULL)
16090 return NULL;
16091 if (INTVAL (rtl) >= 0
16092 && (int_mode_for_size (DWARF2_ADDR_SIZE * BITS_PER_UNIT, 0)
16093 .exists (&amode))
16094 && trunc_int_for_mode (INTVAL (rtl), amode) == INTVAL (rtl)
16095 /* const DW_OP_convert <XXX> vs.
16096 DW_OP_const_type <XXX, 1, const>. */
16097 && size_of_int_loc_descriptor (INTVAL (rtl)) + 1 + 1
16098 < (unsigned long) 1 + 1 + 1 + GET_MODE_SIZE (int_mode))
16099 {
16100 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
16101 op0 = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16102 op0->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16103 op0->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16104 op0->dw_loc_oprnd1.v.val_die_ref.external = 0;
16105 add_loc_descr (&mem_loc_result, op0);
16106 return mem_loc_result;
16107 }
16108 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0,
16109 INTVAL (rtl));
16110 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16111 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16112 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16113 if (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT)
16114 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
16115 else
16116 {
16117 mem_loc_result->dw_loc_oprnd2.val_class
16118 = dw_val_class_const_double;
16119 mem_loc_result->dw_loc_oprnd2.v.val_double
16120 = double_int::from_shwi (INTVAL (rtl));
16121 }
16122 }
16123 break;
16124
16125 case CONST_DOUBLE:
16126 if (!dwarf_strict || dwarf_version >= 5)
16127 {
16128 dw_die_ref type_die;
16129
16130 /* Note that if TARGET_SUPPORTS_WIDE_INT == 0, a
16131 CONST_DOUBLE rtx could represent either a large integer
16132 or a floating-point constant. If TARGET_SUPPORTS_WIDE_INT != 0,
16133 the value is always a floating point constant.
16134
16135 When it is an integer, a CONST_DOUBLE is used whenever
16136 the constant requires 2 HWIs to be adequately represented.
16137 We output CONST_DOUBLEs as blocks. */
16138 if (mode == VOIDmode
16139 || (GET_MODE (rtl) == VOIDmode
16140 && maybe_ne (GET_MODE_BITSIZE (mode),
16141 HOST_BITS_PER_DOUBLE_INT)))
16142 break;
16143 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
16144 if (type_die == NULL)
16145 return NULL;
16146 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
16147 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16148 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16149 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16150 #if TARGET_SUPPORTS_WIDE_INT == 0
16151 if (!SCALAR_FLOAT_MODE_P (mode))
16152 {
16153 mem_loc_result->dw_loc_oprnd2.val_class
16154 = dw_val_class_const_double;
16155 mem_loc_result->dw_loc_oprnd2.v.val_double
16156 = rtx_to_double_int (rtl);
16157 }
16158 else
16159 #endif
16160 {
16161 scalar_float_mode float_mode = as_a <scalar_float_mode> (mode);
16162 unsigned int length = GET_MODE_SIZE (float_mode);
16163 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16164
16165 insert_float (rtl, array);
16166 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16167 mem_loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16168 mem_loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16169 mem_loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16170 }
16171 }
16172 break;
16173
16174 case CONST_WIDE_INT:
16175 if (!dwarf_strict || dwarf_version >= 5)
16176 {
16177 dw_die_ref type_die;
16178
16179 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
16180 if (type_die == NULL)
16181 return NULL;
16182 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
16183 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16184 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16185 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16186 mem_loc_result->dw_loc_oprnd2.val_class
16187 = dw_val_class_wide_int;
16188 mem_loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16189 *mem_loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, mode);
16190 }
16191 break;
16192
16193 case CONST_POLY_INT:
16194 mem_loc_result = int_loc_descriptor (rtx_to_poly_int64 (rtl));
16195 break;
16196
16197 case EQ:
16198 mem_loc_result = scompare_loc_descriptor (DW_OP_eq, rtl, mem_mode);
16199 break;
16200
16201 case GE:
16202 mem_loc_result = scompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16203 break;
16204
16205 case GT:
16206 mem_loc_result = scompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16207 break;
16208
16209 case LE:
16210 mem_loc_result = scompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16211 break;
16212
16213 case LT:
16214 mem_loc_result = scompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16215 break;
16216
16217 case NE:
16218 mem_loc_result = scompare_loc_descriptor (DW_OP_ne, rtl, mem_mode);
16219 break;
16220
16221 case GEU:
16222 mem_loc_result = ucompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16223 break;
16224
16225 case GTU:
16226 mem_loc_result = ucompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16227 break;
16228
16229 case LEU:
16230 mem_loc_result = ucompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16231 break;
16232
16233 case LTU:
16234 mem_loc_result = ucompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16235 break;
16236
16237 case UMIN:
16238 case UMAX:
16239 if (!SCALAR_INT_MODE_P (mode))
16240 break;
16241 /* FALLTHRU */
16242 case SMIN:
16243 case SMAX:
16244 mem_loc_result = minmax_loc_descriptor (rtl, mode, mem_mode);
16245 break;
16246
16247 case ZERO_EXTRACT:
16248 case SIGN_EXTRACT:
16249 if (CONST_INT_P (XEXP (rtl, 1))
16250 && CONST_INT_P (XEXP (rtl, 2))
16251 && is_a <scalar_int_mode> (mode, &int_mode)
16252 && is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode)
16253 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16254 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE
16255 && ((unsigned) INTVAL (XEXP (rtl, 1))
16256 + (unsigned) INTVAL (XEXP (rtl, 2))
16257 <= GET_MODE_BITSIZE (int_mode)))
16258 {
16259 int shift, size;
16260 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
16261 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16262 if (op0 == 0)
16263 break;
16264 if (GET_CODE (rtl) == SIGN_EXTRACT)
16265 op = DW_OP_shra;
16266 else
16267 op = DW_OP_shr;
16268 mem_loc_result = op0;
16269 size = INTVAL (XEXP (rtl, 1));
16270 shift = INTVAL (XEXP (rtl, 2));
16271 if (BITS_BIG_ENDIAN)
16272 shift = GET_MODE_BITSIZE (inner_mode) - shift - size;
16273 if (shift + size != (int) DWARF2_ADDR_SIZE)
16274 {
16275 add_loc_descr (&mem_loc_result,
16276 int_loc_descriptor (DWARF2_ADDR_SIZE
16277 - shift - size));
16278 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
16279 }
16280 if (size != (int) DWARF2_ADDR_SIZE)
16281 {
16282 add_loc_descr (&mem_loc_result,
16283 int_loc_descriptor (DWARF2_ADDR_SIZE - size));
16284 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
16285 }
16286 }
16287 break;
16288
16289 case IF_THEN_ELSE:
16290 {
16291 dw_loc_descr_ref op2, bra_node, drop_node;
16292 op0 = mem_loc_descriptor (XEXP (rtl, 0),
16293 GET_MODE (XEXP (rtl, 0)) == VOIDmode
16294 ? word_mode : GET_MODE (XEXP (rtl, 0)),
16295 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16296 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
16297 VAR_INIT_STATUS_INITIALIZED);
16298 op2 = mem_loc_descriptor (XEXP (rtl, 2), mode, mem_mode,
16299 VAR_INIT_STATUS_INITIALIZED);
16300 if (op0 == NULL || op1 == NULL || op2 == NULL)
16301 break;
16302
16303 mem_loc_result = op1;
16304 add_loc_descr (&mem_loc_result, op2);
16305 add_loc_descr (&mem_loc_result, op0);
16306 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
16307 add_loc_descr (&mem_loc_result, bra_node);
16308 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_swap, 0, 0));
16309 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
16310 add_loc_descr (&mem_loc_result, drop_node);
16311 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
16312 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
16313 }
16314 break;
16315
16316 case FLOAT_EXTEND:
16317 case FLOAT_TRUNCATE:
16318 case FLOAT:
16319 case UNSIGNED_FLOAT:
16320 case FIX:
16321 case UNSIGNED_FIX:
16322 if (!dwarf_strict || dwarf_version >= 5)
16323 {
16324 dw_die_ref type_die;
16325 dw_loc_descr_ref cvt;
16326
16327 op0 = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (XEXP (rtl, 0)),
16328 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16329 if (op0 == NULL)
16330 break;
16331 if (is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &int_mode)
16332 && (GET_CODE (rtl) == FLOAT
16333 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE))
16334 {
16335 type_die = base_type_for_mode (int_mode,
16336 GET_CODE (rtl) == UNSIGNED_FLOAT);
16337 if (type_die == NULL)
16338 break;
16339 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16340 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16341 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16342 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16343 add_loc_descr (&op0, cvt);
16344 }
16345 type_die = base_type_for_mode (mode, GET_CODE (rtl) == UNSIGNED_FIX);
16346 if (type_die == NULL)
16347 break;
16348 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16349 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16350 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16351 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16352 add_loc_descr (&op0, cvt);
16353 if (is_a <scalar_int_mode> (mode, &int_mode)
16354 && (GET_CODE (rtl) == FIX
16355 || GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE))
16356 {
16357 op0 = convert_descriptor_to_mode (int_mode, op0);
16358 if (op0 == NULL)
16359 break;
16360 }
16361 mem_loc_result = op0;
16362 }
16363 break;
16364
16365 case CLZ:
16366 case CTZ:
16367 case FFS:
16368 if (is_a <scalar_int_mode> (mode, &int_mode))
16369 mem_loc_result = clz_loc_descriptor (rtl, int_mode, mem_mode);
16370 break;
16371
16372 case POPCOUNT:
16373 case PARITY:
16374 if (is_a <scalar_int_mode> (mode, &int_mode))
16375 mem_loc_result = popcount_loc_descriptor (rtl, int_mode, mem_mode);
16376 break;
16377
16378 case BSWAP:
16379 if (is_a <scalar_int_mode> (mode, &int_mode))
16380 mem_loc_result = bswap_loc_descriptor (rtl, int_mode, mem_mode);
16381 break;
16382
16383 case ROTATE:
16384 case ROTATERT:
16385 if (is_a <scalar_int_mode> (mode, &int_mode))
16386 mem_loc_result = rotate_loc_descriptor (rtl, int_mode, mem_mode);
16387 break;
16388
16389 case COMPARE:
16390 /* In theory, we could implement the above. */
16391 /* DWARF cannot represent the unsigned compare operations
16392 natively. */
16393 case SS_MULT:
16394 case US_MULT:
16395 case SS_DIV:
16396 case US_DIV:
16397 case SS_PLUS:
16398 case US_PLUS:
16399 case SS_MINUS:
16400 case US_MINUS:
16401 case SS_NEG:
16402 case US_NEG:
16403 case SS_ABS:
16404 case SS_ASHIFT:
16405 case US_ASHIFT:
16406 case SS_TRUNCATE:
16407 case US_TRUNCATE:
16408 case UNORDERED:
16409 case ORDERED:
16410 case UNEQ:
16411 case UNGE:
16412 case UNGT:
16413 case UNLE:
16414 case UNLT:
16415 case LTGT:
16416 case FRACT_CONVERT:
16417 case UNSIGNED_FRACT_CONVERT:
16418 case SAT_FRACT:
16419 case UNSIGNED_SAT_FRACT:
16420 case SQRT:
16421 case ASM_OPERANDS:
16422 case VEC_MERGE:
16423 case VEC_SELECT:
16424 case VEC_CONCAT:
16425 case VEC_DUPLICATE:
16426 case VEC_SERIES:
16427 case HIGH:
16428 case FMA:
16429 case STRICT_LOW_PART:
16430 case CONST_VECTOR:
16431 case CONST_FIXED:
16432 case CLRSB:
16433 case CLOBBER:
16434 break;
16435
16436 case CONST_STRING:
16437 resolve_one_addr (&rtl);
16438 goto symref;
16439
16440 /* RTL sequences inside PARALLEL record a series of DWARF operations for
16441 the expression. An UNSPEC rtx represents a raw DWARF operation,
16442 new_loc_descr is called for it to build the operation directly.
16443 Otherwise mem_loc_descriptor is called recursively. */
16444 case PARALLEL:
16445 {
16446 int index = 0;
16447 dw_loc_descr_ref exp_result = NULL;
16448
16449 for (; index < XVECLEN (rtl, 0); index++)
16450 {
16451 rtx elem = XVECEXP (rtl, 0, index);
16452 if (GET_CODE (elem) == UNSPEC)
16453 {
16454 /* Each DWARF operation UNSPEC contain two operands, if
16455 one operand is not used for the operation, const0_rtx is
16456 passed. */
16457 gcc_assert (XVECLEN (elem, 0) == 2);
16458
16459 HOST_WIDE_INT dw_op = XINT (elem, 1);
16460 HOST_WIDE_INT oprnd1 = INTVAL (XVECEXP (elem, 0, 0));
16461 HOST_WIDE_INT oprnd2 = INTVAL (XVECEXP (elem, 0, 1));
16462 exp_result
16463 = new_loc_descr ((enum dwarf_location_atom) dw_op, oprnd1,
16464 oprnd2);
16465 }
16466 else
16467 exp_result
16468 = mem_loc_descriptor (elem, mode, mem_mode,
16469 VAR_INIT_STATUS_INITIALIZED);
16470
16471 if (!mem_loc_result)
16472 mem_loc_result = exp_result;
16473 else
16474 add_loc_descr (&mem_loc_result, exp_result);
16475 }
16476
16477 break;
16478 }
16479
16480 default:
16481 if (flag_checking)
16482 {
16483 print_rtl (stderr, rtl);
16484 gcc_unreachable ();
16485 }
16486 break;
16487 }
16488
16489 if (mem_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16490 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16491
16492 return mem_loc_result;
16493 }
16494
16495 /* Return a descriptor that describes the concatenation of two locations.
16496 This is typically a complex variable. */
16497
16498 static dw_loc_descr_ref
16499 concat_loc_descriptor (rtx x0, rtx x1, enum var_init_status initialized)
16500 {
16501 /* At present we only track constant-sized pieces. */
16502 unsigned int size0, size1;
16503 if (!GET_MODE_SIZE (GET_MODE (x0)).is_constant (&size0)
16504 || !GET_MODE_SIZE (GET_MODE (x1)).is_constant (&size1))
16505 return 0;
16506
16507 dw_loc_descr_ref cc_loc_result = NULL;
16508 dw_loc_descr_ref x0_ref
16509 = loc_descriptor (x0, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16510 dw_loc_descr_ref x1_ref
16511 = loc_descriptor (x1, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16512
16513 if (x0_ref == 0 || x1_ref == 0)
16514 return 0;
16515
16516 cc_loc_result = x0_ref;
16517 add_loc_descr_op_piece (&cc_loc_result, size0);
16518
16519 add_loc_descr (&cc_loc_result, x1_ref);
16520 add_loc_descr_op_piece (&cc_loc_result, size1);
16521
16522 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
16523 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16524
16525 return cc_loc_result;
16526 }
16527
16528 /* Return a descriptor that describes the concatenation of N
16529 locations. */
16530
16531 static dw_loc_descr_ref
16532 concatn_loc_descriptor (rtx concatn, enum var_init_status initialized)
16533 {
16534 unsigned int i;
16535 dw_loc_descr_ref cc_loc_result = NULL;
16536 unsigned int n = XVECLEN (concatn, 0);
16537 unsigned int size;
16538
16539 for (i = 0; i < n; ++i)
16540 {
16541 dw_loc_descr_ref ref;
16542 rtx x = XVECEXP (concatn, 0, i);
16543
16544 /* At present we only track constant-sized pieces. */
16545 if (!GET_MODE_SIZE (GET_MODE (x)).is_constant (&size))
16546 return NULL;
16547
16548 ref = loc_descriptor (x, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16549 if (ref == NULL)
16550 return NULL;
16551
16552 add_loc_descr (&cc_loc_result, ref);
16553 add_loc_descr_op_piece (&cc_loc_result, size);
16554 }
16555
16556 if (cc_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16557 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16558
16559 return cc_loc_result;
16560 }
16561
16562 /* Helper function for loc_descriptor. Return DW_OP_implicit_pointer
16563 for DEBUG_IMPLICIT_PTR RTL. */
16564
16565 static dw_loc_descr_ref
16566 implicit_ptr_descriptor (rtx rtl, HOST_WIDE_INT offset)
16567 {
16568 dw_loc_descr_ref ret;
16569 dw_die_ref ref;
16570
16571 if (dwarf_strict && dwarf_version < 5)
16572 return NULL;
16573 gcc_assert (TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == VAR_DECL
16574 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == PARM_DECL
16575 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == RESULT_DECL);
16576 ref = lookup_decl_die (DEBUG_IMPLICIT_PTR_DECL (rtl));
16577 ret = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
16578 ret->dw_loc_oprnd2.val_class = dw_val_class_const;
16579 if (ref)
16580 {
16581 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16582 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
16583 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
16584 }
16585 else
16586 {
16587 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
16588 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_IMPLICIT_PTR_DECL (rtl);
16589 }
16590 return ret;
16591 }
16592
16593 /* Output a proper Dwarf location descriptor for a variable or parameter
16594 which is either allocated in a register or in a memory location. For a
16595 register, we just generate an OP_REG and the register number. For a
16596 memory location we provide a Dwarf postfix expression describing how to
16597 generate the (dynamic) address of the object onto the address stack.
16598
16599 MODE is mode of the decl if this loc_descriptor is going to be used in
16600 .debug_loc section where DW_OP_stack_value and DW_OP_implicit_value are
16601 allowed, VOIDmode otherwise.
16602
16603 If we don't know how to describe it, return 0. */
16604
16605 static dw_loc_descr_ref
16606 loc_descriptor (rtx rtl, machine_mode mode,
16607 enum var_init_status initialized)
16608 {
16609 dw_loc_descr_ref loc_result = NULL;
16610 scalar_int_mode int_mode;
16611
16612 switch (GET_CODE (rtl))
16613 {
16614 case SUBREG:
16615 /* The case of a subreg may arise when we have a local (register)
16616 variable or a formal (register) parameter which doesn't quite fill
16617 up an entire register. For now, just assume that it is
16618 legitimate to make the Dwarf info refer to the whole register which
16619 contains the given subreg. */
16620 if (REG_P (SUBREG_REG (rtl)) && subreg_lowpart_p (rtl))
16621 loc_result = loc_descriptor (SUBREG_REG (rtl),
16622 GET_MODE (SUBREG_REG (rtl)), initialized);
16623 else
16624 goto do_default;
16625 break;
16626
16627 case REG:
16628 loc_result = reg_loc_descriptor (rtl, initialized);
16629 break;
16630
16631 case MEM:
16632 loc_result = mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
16633 GET_MODE (rtl), initialized);
16634 if (loc_result == NULL)
16635 loc_result = tls_mem_loc_descriptor (rtl);
16636 if (loc_result == NULL)
16637 {
16638 rtx new_rtl = avoid_constant_pool_reference (rtl);
16639 if (new_rtl != rtl)
16640 loc_result = loc_descriptor (new_rtl, mode, initialized);
16641 }
16642 break;
16643
16644 case CONCAT:
16645 loc_result = concat_loc_descriptor (XEXP (rtl, 0), XEXP (rtl, 1),
16646 initialized);
16647 break;
16648
16649 case CONCATN:
16650 loc_result = concatn_loc_descriptor (rtl, initialized);
16651 break;
16652
16653 case VAR_LOCATION:
16654 /* Single part. */
16655 if (GET_CODE (PAT_VAR_LOCATION_LOC (rtl)) != PARALLEL)
16656 {
16657 rtx loc = PAT_VAR_LOCATION_LOC (rtl);
16658 if (GET_CODE (loc) == EXPR_LIST)
16659 loc = XEXP (loc, 0);
16660 loc_result = loc_descriptor (loc, mode, initialized);
16661 break;
16662 }
16663
16664 rtl = XEXP (rtl, 1);
16665 /* FALLTHRU */
16666
16667 case PARALLEL:
16668 {
16669 rtvec par_elems = XVEC (rtl, 0);
16670 int num_elem = GET_NUM_ELEM (par_elems);
16671 machine_mode mode;
16672 int i, size;
16673
16674 /* Create the first one, so we have something to add to. */
16675 loc_result = loc_descriptor (XEXP (RTVEC_ELT (par_elems, 0), 0),
16676 VOIDmode, initialized);
16677 if (loc_result == NULL)
16678 return NULL;
16679 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, 0), 0));
16680 /* At present we only track constant-sized pieces. */
16681 if (!GET_MODE_SIZE (mode).is_constant (&size))
16682 return NULL;
16683 add_loc_descr_op_piece (&loc_result, size);
16684 for (i = 1; i < num_elem; i++)
16685 {
16686 dw_loc_descr_ref temp;
16687
16688 temp = loc_descriptor (XEXP (RTVEC_ELT (par_elems, i), 0),
16689 VOIDmode, initialized);
16690 if (temp == NULL)
16691 return NULL;
16692 add_loc_descr (&loc_result, temp);
16693 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, i), 0));
16694 /* At present we only track constant-sized pieces. */
16695 if (!GET_MODE_SIZE (mode).is_constant (&size))
16696 return NULL;
16697 add_loc_descr_op_piece (&loc_result, size);
16698 }
16699 }
16700 break;
16701
16702 case CONST_INT:
16703 if (mode != VOIDmode && mode != BLKmode)
16704 {
16705 int_mode = as_a <scalar_int_mode> (mode);
16706 loc_result = address_of_int_loc_descriptor (GET_MODE_SIZE (int_mode),
16707 INTVAL (rtl));
16708 }
16709 break;
16710
16711 case CONST_DOUBLE:
16712 if (mode == VOIDmode)
16713 mode = GET_MODE (rtl);
16714
16715 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16716 {
16717 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16718
16719 /* Note that a CONST_DOUBLE rtx could represent either an integer
16720 or a floating-point constant. A CONST_DOUBLE is used whenever
16721 the constant requires more than one word in order to be
16722 adequately represented. We output CONST_DOUBLEs as blocks. */
16723 scalar_mode smode = as_a <scalar_mode> (mode);
16724 loc_result = new_loc_descr (DW_OP_implicit_value,
16725 GET_MODE_SIZE (smode), 0);
16726 #if TARGET_SUPPORTS_WIDE_INT == 0
16727 if (!SCALAR_FLOAT_MODE_P (smode))
16728 {
16729 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const_double;
16730 loc_result->dw_loc_oprnd2.v.val_double
16731 = rtx_to_double_int (rtl);
16732 }
16733 else
16734 #endif
16735 {
16736 unsigned int length = GET_MODE_SIZE (smode);
16737 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16738
16739 insert_float (rtl, array);
16740 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16741 loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16742 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16743 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16744 }
16745 }
16746 break;
16747
16748 case CONST_WIDE_INT:
16749 if (mode == VOIDmode)
16750 mode = GET_MODE (rtl);
16751
16752 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16753 {
16754 int_mode = as_a <scalar_int_mode> (mode);
16755 loc_result = new_loc_descr (DW_OP_implicit_value,
16756 GET_MODE_SIZE (int_mode), 0);
16757 loc_result->dw_loc_oprnd2.val_class = dw_val_class_wide_int;
16758 loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16759 *loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, int_mode);
16760 }
16761 break;
16762
16763 case CONST_VECTOR:
16764 if (mode == VOIDmode)
16765 mode = GET_MODE (rtl);
16766
16767 if (mode != VOIDmode
16768 /* The combination of a length and byte elt_size doesn't extend
16769 naturally to boolean vectors, where several elements are packed
16770 into the same byte. */
16771 && GET_MODE_CLASS (mode) != MODE_VECTOR_BOOL
16772 && (dwarf_version >= 4 || !dwarf_strict))
16773 {
16774 unsigned int length;
16775 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
16776 return NULL;
16777
16778 unsigned int elt_size = GET_MODE_UNIT_SIZE (GET_MODE (rtl));
16779 unsigned char *array
16780 = ggc_vec_alloc<unsigned char> (length * elt_size);
16781 unsigned int i;
16782 unsigned char *p;
16783 machine_mode imode = GET_MODE_INNER (mode);
16784
16785 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16786 switch (GET_MODE_CLASS (mode))
16787 {
16788 case MODE_VECTOR_INT:
16789 for (i = 0, p = array; i < length; i++, p += elt_size)
16790 {
16791 rtx elt = CONST_VECTOR_ELT (rtl, i);
16792 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
16793 }
16794 break;
16795
16796 case MODE_VECTOR_FLOAT:
16797 for (i = 0, p = array; i < length; i++, p += elt_size)
16798 {
16799 rtx elt = CONST_VECTOR_ELT (rtl, i);
16800 insert_float (elt, p);
16801 }
16802 break;
16803
16804 default:
16805 gcc_unreachable ();
16806 }
16807
16808 loc_result = new_loc_descr (DW_OP_implicit_value,
16809 length * elt_size, 0);
16810 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16811 loc_result->dw_loc_oprnd2.v.val_vec.length = length;
16812 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = elt_size;
16813 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16814 }
16815 break;
16816
16817 case CONST:
16818 if (mode == VOIDmode
16819 || CONST_SCALAR_INT_P (XEXP (rtl, 0))
16820 || CONST_DOUBLE_AS_FLOAT_P (XEXP (rtl, 0))
16821 || GET_CODE (XEXP (rtl, 0)) == CONST_VECTOR)
16822 {
16823 loc_result = loc_descriptor (XEXP (rtl, 0), mode, initialized);
16824 break;
16825 }
16826 /* FALLTHROUGH */
16827 case SYMBOL_REF:
16828 if (!const_ok_for_output (rtl))
16829 break;
16830 /* FALLTHROUGH */
16831 case LABEL_REF:
16832 if (is_a <scalar_int_mode> (mode, &int_mode)
16833 && GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE
16834 && (dwarf_version >= 4 || !dwarf_strict))
16835 {
16836 loc_result = new_addr_loc_descr (rtl, dtprel_false);
16837 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
16838 vec_safe_push (used_rtx_array, rtl);
16839 }
16840 break;
16841
16842 case DEBUG_IMPLICIT_PTR:
16843 loc_result = implicit_ptr_descriptor (rtl, 0);
16844 break;
16845
16846 case PLUS:
16847 if (GET_CODE (XEXP (rtl, 0)) == DEBUG_IMPLICIT_PTR
16848 && CONST_INT_P (XEXP (rtl, 1)))
16849 {
16850 loc_result
16851 = implicit_ptr_descriptor (XEXP (rtl, 0), INTVAL (XEXP (rtl, 1)));
16852 break;
16853 }
16854 /* FALLTHRU */
16855 do_default:
16856 default:
16857 if ((is_a <scalar_int_mode> (mode, &int_mode)
16858 && GET_MODE (rtl) == int_mode
16859 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16860 && dwarf_version >= 4)
16861 || (!dwarf_strict && mode != VOIDmode && mode != BLKmode))
16862 {
16863 /* Value expression. */
16864 loc_result = mem_loc_descriptor (rtl, mode, VOIDmode, initialized);
16865 if (loc_result)
16866 add_loc_descr (&loc_result,
16867 new_loc_descr (DW_OP_stack_value, 0, 0));
16868 }
16869 break;
16870 }
16871
16872 return loc_result;
16873 }
16874
16875 /* We need to figure out what section we should use as the base for the
16876 address ranges where a given location is valid.
16877 1. If this particular DECL has a section associated with it, use that.
16878 2. If this function has a section associated with it, use that.
16879 3. Otherwise, use the text section.
16880 XXX: If you split a variable across multiple sections, we won't notice. */
16881
16882 static const char *
16883 secname_for_decl (const_tree decl)
16884 {
16885 const char *secname;
16886
16887 if (VAR_OR_FUNCTION_DECL_P (decl)
16888 && (DECL_EXTERNAL (decl) || TREE_PUBLIC (decl) || TREE_STATIC (decl))
16889 && DECL_SECTION_NAME (decl))
16890 secname = DECL_SECTION_NAME (decl);
16891 else if (current_function_decl && DECL_SECTION_NAME (current_function_decl))
16892 {
16893 if (in_cold_section_p)
16894 {
16895 section *sec = current_function_section ();
16896 if (sec->common.flags & SECTION_NAMED)
16897 return sec->named.name;
16898 }
16899 secname = DECL_SECTION_NAME (current_function_decl);
16900 }
16901 else if (cfun && in_cold_section_p)
16902 secname = crtl->subsections.cold_section_label;
16903 else
16904 secname = text_section_label;
16905
16906 return secname;
16907 }
16908
16909 /* Return true when DECL_BY_REFERENCE is defined and set for DECL. */
16910
16911 static bool
16912 decl_by_reference_p (tree decl)
16913 {
16914 return ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL
16915 || VAR_P (decl))
16916 && DECL_BY_REFERENCE (decl));
16917 }
16918
16919 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16920 for VARLOC. */
16921
16922 static dw_loc_descr_ref
16923 dw_loc_list_1 (tree loc, rtx varloc, int want_address,
16924 enum var_init_status initialized)
16925 {
16926 int have_address = 0;
16927 dw_loc_descr_ref descr;
16928 machine_mode mode;
16929
16930 if (want_address != 2)
16931 {
16932 gcc_assert (GET_CODE (varloc) == VAR_LOCATION);
16933 /* Single part. */
16934 if (GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
16935 {
16936 varloc = PAT_VAR_LOCATION_LOC (varloc);
16937 if (GET_CODE (varloc) == EXPR_LIST)
16938 varloc = XEXP (varloc, 0);
16939 mode = GET_MODE (varloc);
16940 if (MEM_P (varloc))
16941 {
16942 rtx addr = XEXP (varloc, 0);
16943 descr = mem_loc_descriptor (addr, get_address_mode (varloc),
16944 mode, initialized);
16945 if (descr)
16946 have_address = 1;
16947 else
16948 {
16949 rtx x = avoid_constant_pool_reference (varloc);
16950 if (x != varloc)
16951 descr = mem_loc_descriptor (x, mode, VOIDmode,
16952 initialized);
16953 }
16954 }
16955 else
16956 descr = mem_loc_descriptor (varloc, mode, VOIDmode, initialized);
16957 }
16958 else
16959 return 0;
16960 }
16961 else
16962 {
16963 if (GET_CODE (varloc) == VAR_LOCATION)
16964 mode = DECL_MODE (PAT_VAR_LOCATION_DECL (varloc));
16965 else
16966 mode = DECL_MODE (loc);
16967 descr = loc_descriptor (varloc, mode, initialized);
16968 have_address = 1;
16969 }
16970
16971 if (!descr)
16972 return 0;
16973
16974 if (want_address == 2 && !have_address
16975 && (dwarf_version >= 4 || !dwarf_strict))
16976 {
16977 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
16978 {
16979 expansion_failed (loc, NULL_RTX,
16980 "DWARF address size mismatch");
16981 return 0;
16982 }
16983 add_loc_descr (&descr, new_loc_descr (DW_OP_stack_value, 0, 0));
16984 have_address = 1;
16985 }
16986 /* Show if we can't fill the request for an address. */
16987 if (want_address && !have_address)
16988 {
16989 expansion_failed (loc, NULL_RTX,
16990 "Want address and only have value");
16991 return 0;
16992 }
16993
16994 /* If we've got an address and don't want one, dereference. */
16995 if (!want_address && have_address)
16996 {
16997 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
16998 enum dwarf_location_atom op;
16999
17000 if (size > DWARF2_ADDR_SIZE || size == -1)
17001 {
17002 expansion_failed (loc, NULL_RTX,
17003 "DWARF address size mismatch");
17004 return 0;
17005 }
17006 else if (size == DWARF2_ADDR_SIZE)
17007 op = DW_OP_deref;
17008 else
17009 op = DW_OP_deref_size;
17010
17011 add_loc_descr (&descr, new_loc_descr (op, size, 0));
17012 }
17013
17014 return descr;
17015 }
17016
17017 /* Create a DW_OP_piece or DW_OP_bit_piece for bitsize, or return NULL
17018 if it is not possible. */
17019
17020 static dw_loc_descr_ref
17021 new_loc_descr_op_bit_piece (HOST_WIDE_INT bitsize, HOST_WIDE_INT offset)
17022 {
17023 if ((bitsize % BITS_PER_UNIT) == 0 && offset == 0)
17024 return new_loc_descr (DW_OP_piece, bitsize / BITS_PER_UNIT, 0);
17025 else if (dwarf_version >= 3 || !dwarf_strict)
17026 return new_loc_descr (DW_OP_bit_piece, bitsize, offset);
17027 else
17028 return NULL;
17029 }
17030
17031 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
17032 for VAR_LOC_NOTE for variable DECL that has been optimized by SRA. */
17033
17034 static dw_loc_descr_ref
17035 dw_sra_loc_expr (tree decl, rtx loc)
17036 {
17037 rtx p;
17038 unsigned HOST_WIDE_INT padsize = 0;
17039 dw_loc_descr_ref descr, *descr_tail;
17040 unsigned HOST_WIDE_INT decl_size;
17041 rtx varloc;
17042 enum var_init_status initialized;
17043
17044 if (DECL_SIZE (decl) == NULL
17045 || !tree_fits_uhwi_p (DECL_SIZE (decl)))
17046 return NULL;
17047
17048 decl_size = tree_to_uhwi (DECL_SIZE (decl));
17049 descr = NULL;
17050 descr_tail = &descr;
17051
17052 for (p = loc; p; p = XEXP (p, 1))
17053 {
17054 unsigned HOST_WIDE_INT bitsize = decl_piece_bitsize (p);
17055 rtx loc_note = *decl_piece_varloc_ptr (p);
17056 dw_loc_descr_ref cur_descr;
17057 dw_loc_descr_ref *tail, last = NULL;
17058 unsigned HOST_WIDE_INT opsize = 0;
17059
17060 if (loc_note == NULL_RTX
17061 || NOTE_VAR_LOCATION_LOC (loc_note) == NULL_RTX)
17062 {
17063 padsize += bitsize;
17064 continue;
17065 }
17066 initialized = NOTE_VAR_LOCATION_STATUS (loc_note);
17067 varloc = NOTE_VAR_LOCATION (loc_note);
17068 cur_descr = dw_loc_list_1 (decl, varloc, 2, initialized);
17069 if (cur_descr == NULL)
17070 {
17071 padsize += bitsize;
17072 continue;
17073 }
17074
17075 /* Check that cur_descr either doesn't use
17076 DW_OP_*piece operations, or their sum is equal
17077 to bitsize. Otherwise we can't embed it. */
17078 for (tail = &cur_descr; *tail != NULL;
17079 tail = &(*tail)->dw_loc_next)
17080 if ((*tail)->dw_loc_opc == DW_OP_piece)
17081 {
17082 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned
17083 * BITS_PER_UNIT;
17084 last = *tail;
17085 }
17086 else if ((*tail)->dw_loc_opc == DW_OP_bit_piece)
17087 {
17088 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned;
17089 last = *tail;
17090 }
17091
17092 if (last != NULL && opsize != bitsize)
17093 {
17094 padsize += bitsize;
17095 /* Discard the current piece of the descriptor and release any
17096 addr_table entries it uses. */
17097 remove_loc_list_addr_table_entries (cur_descr);
17098 continue;
17099 }
17100
17101 /* If there is a hole, add DW_OP_*piece after empty DWARF
17102 expression, which means that those bits are optimized out. */
17103 if (padsize)
17104 {
17105 if (padsize > decl_size)
17106 {
17107 remove_loc_list_addr_table_entries (cur_descr);
17108 goto discard_descr;
17109 }
17110 decl_size -= padsize;
17111 *descr_tail = new_loc_descr_op_bit_piece (padsize, 0);
17112 if (*descr_tail == NULL)
17113 {
17114 remove_loc_list_addr_table_entries (cur_descr);
17115 goto discard_descr;
17116 }
17117 descr_tail = &(*descr_tail)->dw_loc_next;
17118 padsize = 0;
17119 }
17120 *descr_tail = cur_descr;
17121 descr_tail = tail;
17122 if (bitsize > decl_size)
17123 goto discard_descr;
17124 decl_size -= bitsize;
17125 if (last == NULL)
17126 {
17127 HOST_WIDE_INT offset = 0;
17128 if (GET_CODE (varloc) == VAR_LOCATION
17129 && GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
17130 {
17131 varloc = PAT_VAR_LOCATION_LOC (varloc);
17132 if (GET_CODE (varloc) == EXPR_LIST)
17133 varloc = XEXP (varloc, 0);
17134 }
17135 do
17136 {
17137 if (GET_CODE (varloc) == CONST
17138 || GET_CODE (varloc) == SIGN_EXTEND
17139 || GET_CODE (varloc) == ZERO_EXTEND)
17140 varloc = XEXP (varloc, 0);
17141 else if (GET_CODE (varloc) == SUBREG)
17142 varloc = SUBREG_REG (varloc);
17143 else
17144 break;
17145 }
17146 while (1);
17147 /* DW_OP_bit_size offset should be zero for register
17148 or implicit location descriptions and empty location
17149 descriptions, but for memory addresses needs big endian
17150 adjustment. */
17151 if (MEM_P (varloc))
17152 {
17153 unsigned HOST_WIDE_INT memsize;
17154 if (!poly_uint64 (MEM_SIZE (varloc)).is_constant (&memsize))
17155 goto discard_descr;
17156 memsize *= BITS_PER_UNIT;
17157 if (memsize != bitsize)
17158 {
17159 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
17160 && (memsize > BITS_PER_WORD || bitsize > BITS_PER_WORD))
17161 goto discard_descr;
17162 if (memsize < bitsize)
17163 goto discard_descr;
17164 if (BITS_BIG_ENDIAN)
17165 offset = memsize - bitsize;
17166 }
17167 }
17168
17169 *descr_tail = new_loc_descr_op_bit_piece (bitsize, offset);
17170 if (*descr_tail == NULL)
17171 goto discard_descr;
17172 descr_tail = &(*descr_tail)->dw_loc_next;
17173 }
17174 }
17175
17176 /* If there were any non-empty expressions, add padding till the end of
17177 the decl. */
17178 if (descr != NULL && decl_size != 0)
17179 {
17180 *descr_tail = new_loc_descr_op_bit_piece (decl_size, 0);
17181 if (*descr_tail == NULL)
17182 goto discard_descr;
17183 }
17184 return descr;
17185
17186 discard_descr:
17187 /* Discard the descriptor and release any addr_table entries it uses. */
17188 remove_loc_list_addr_table_entries (descr);
17189 return NULL;
17190 }
17191
17192 /* Return the dwarf representation of the location list LOC_LIST of
17193 DECL. WANT_ADDRESS has the same meaning as in loc_list_from_tree
17194 function. */
17195
17196 static dw_loc_list_ref
17197 dw_loc_list (var_loc_list *loc_list, tree decl, int want_address)
17198 {
17199 const char *endname, *secname;
17200 var_loc_view endview;
17201 rtx varloc;
17202 enum var_init_status initialized;
17203 struct var_loc_node *node;
17204 dw_loc_descr_ref descr;
17205 char label_id[MAX_ARTIFICIAL_LABEL_BYTES];
17206 dw_loc_list_ref list = NULL;
17207 dw_loc_list_ref *listp = &list;
17208
17209 /* Now that we know what section we are using for a base,
17210 actually construct the list of locations.
17211 The first location information is what is passed to the
17212 function that creates the location list, and the remaining
17213 locations just get added on to that list.
17214 Note that we only know the start address for a location
17215 (IE location changes), so to build the range, we use
17216 the range [current location start, next location start].
17217 This means we have to special case the last node, and generate
17218 a range of [last location start, end of function label]. */
17219
17220 if (cfun && crtl->has_bb_partition)
17221 {
17222 bool save_in_cold_section_p = in_cold_section_p;
17223 in_cold_section_p = first_function_block_is_cold;
17224 if (loc_list->last_before_switch == NULL)
17225 in_cold_section_p = !in_cold_section_p;
17226 secname = secname_for_decl (decl);
17227 in_cold_section_p = save_in_cold_section_p;
17228 }
17229 else
17230 secname = secname_for_decl (decl);
17231
17232 for (node = loc_list->first; node; node = node->next)
17233 {
17234 bool range_across_switch = false;
17235 if (GET_CODE (node->loc) == EXPR_LIST
17236 || NOTE_VAR_LOCATION_LOC (node->loc) != NULL_RTX)
17237 {
17238 if (GET_CODE (node->loc) == EXPR_LIST)
17239 {
17240 descr = NULL;
17241 /* This requires DW_OP_{,bit_}piece, which is not usable
17242 inside DWARF expressions. */
17243 if (want_address == 2)
17244 descr = dw_sra_loc_expr (decl, node->loc);
17245 }
17246 else
17247 {
17248 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17249 varloc = NOTE_VAR_LOCATION (node->loc);
17250 descr = dw_loc_list_1 (decl, varloc, want_address, initialized);
17251 }
17252 if (descr)
17253 {
17254 /* If section switch happens in between node->label
17255 and node->next->label (or end of function) and
17256 we can't emit it as a single entry list,
17257 emit two ranges, first one ending at the end
17258 of first partition and second one starting at the
17259 beginning of second partition. */
17260 if (node == loc_list->last_before_switch
17261 && (node != loc_list->first || loc_list->first->next
17262 /* If we are to emit a view number, we will emit
17263 a loclist rather than a single location
17264 expression for the entire function (see
17265 loc_list_has_views), so we have to split the
17266 range that straddles across partitions. */
17267 || !ZERO_VIEW_P (node->view))
17268 && current_function_decl)
17269 {
17270 endname = cfun->fde->dw_fde_end;
17271 endview = 0;
17272 range_across_switch = true;
17273 }
17274 /* The variable has a location between NODE->LABEL and
17275 NODE->NEXT->LABEL. */
17276 else if (node->next)
17277 endname = node->next->label, endview = node->next->view;
17278 /* If the variable has a location at the last label
17279 it keeps its location until the end of function. */
17280 else if (!current_function_decl)
17281 endname = text_end_label, endview = 0;
17282 else
17283 {
17284 ASM_GENERATE_INTERNAL_LABEL (label_id, FUNC_END_LABEL,
17285 current_function_funcdef_no);
17286 endname = ggc_strdup (label_id);
17287 endview = 0;
17288 }
17289
17290 *listp = new_loc_list (descr, node->label, node->view,
17291 endname, endview, secname);
17292 if (TREE_CODE (decl) == PARM_DECL
17293 && node == loc_list->first
17294 && NOTE_P (node->loc)
17295 && strcmp (node->label, endname) == 0)
17296 (*listp)->force = true;
17297 listp = &(*listp)->dw_loc_next;
17298 }
17299 }
17300
17301 if (cfun
17302 && crtl->has_bb_partition
17303 && node == loc_list->last_before_switch)
17304 {
17305 bool save_in_cold_section_p = in_cold_section_p;
17306 in_cold_section_p = !first_function_block_is_cold;
17307 secname = secname_for_decl (decl);
17308 in_cold_section_p = save_in_cold_section_p;
17309 }
17310
17311 if (range_across_switch)
17312 {
17313 if (GET_CODE (node->loc) == EXPR_LIST)
17314 descr = dw_sra_loc_expr (decl, node->loc);
17315 else
17316 {
17317 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17318 varloc = NOTE_VAR_LOCATION (node->loc);
17319 descr = dw_loc_list_1 (decl, varloc, want_address,
17320 initialized);
17321 }
17322 gcc_assert (descr);
17323 /* The variable has a location between NODE->LABEL and
17324 NODE->NEXT->LABEL. */
17325 if (node->next)
17326 endname = node->next->label, endview = node->next->view;
17327 else
17328 endname = cfun->fde->dw_fde_second_end, endview = 0;
17329 *listp = new_loc_list (descr, cfun->fde->dw_fde_second_begin, 0,
17330 endname, endview, secname);
17331 listp = &(*listp)->dw_loc_next;
17332 }
17333 }
17334
17335 /* Try to avoid the overhead of a location list emitting a location
17336 expression instead, but only if we didn't have more than one
17337 location entry in the first place. If some entries were not
17338 representable, we don't want to pretend a single entry that was
17339 applies to the entire scope in which the variable is
17340 available. */
17341 if (list && loc_list->first->next)
17342 gen_llsym (list);
17343 else
17344 maybe_gen_llsym (list);
17345
17346 return list;
17347 }
17348
17349 /* Return if the loc_list has only single element and thus can be represented
17350 as location description. */
17351
17352 static bool
17353 single_element_loc_list_p (dw_loc_list_ref list)
17354 {
17355 gcc_assert (!list->dw_loc_next || list->ll_symbol);
17356 return !list->ll_symbol;
17357 }
17358
17359 /* Duplicate a single element of location list. */
17360
17361 static inline dw_loc_descr_ref
17362 copy_loc_descr (dw_loc_descr_ref ref)
17363 {
17364 dw_loc_descr_ref copy = ggc_alloc<dw_loc_descr_node> ();
17365 memcpy (copy, ref, sizeof (dw_loc_descr_node));
17366 return copy;
17367 }
17368
17369 /* To each location in list LIST append loc descr REF. */
17370
17371 static void
17372 add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17373 {
17374 dw_loc_descr_ref copy;
17375 add_loc_descr (&list->expr, ref);
17376 list = list->dw_loc_next;
17377 while (list)
17378 {
17379 copy = copy_loc_descr (ref);
17380 add_loc_descr (&list->expr, copy);
17381 while (copy->dw_loc_next)
17382 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17383 list = list->dw_loc_next;
17384 }
17385 }
17386
17387 /* To each location in list LIST prepend loc descr REF. */
17388
17389 static void
17390 prepend_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17391 {
17392 dw_loc_descr_ref copy;
17393 dw_loc_descr_ref ref_end = list->expr;
17394 add_loc_descr (&ref, list->expr);
17395 list->expr = ref;
17396 list = list->dw_loc_next;
17397 while (list)
17398 {
17399 dw_loc_descr_ref end = list->expr;
17400 list->expr = copy = copy_loc_descr (ref);
17401 while (copy->dw_loc_next != ref_end)
17402 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17403 copy->dw_loc_next = end;
17404 list = list->dw_loc_next;
17405 }
17406 }
17407
17408 /* Given two lists RET and LIST
17409 produce location list that is result of adding expression in LIST
17410 to expression in RET on each position in program.
17411 Might be destructive on both RET and LIST.
17412
17413 TODO: We handle only simple cases of RET or LIST having at most one
17414 element. General case would involve sorting the lists in program order
17415 and merging them that will need some additional work.
17416 Adding that will improve quality of debug info especially for SRA-ed
17417 structures. */
17418
17419 static void
17420 add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list)
17421 {
17422 if (!list)
17423 return;
17424 if (!*ret)
17425 {
17426 *ret = list;
17427 return;
17428 }
17429 if (!list->dw_loc_next)
17430 {
17431 add_loc_descr_to_each (*ret, list->expr);
17432 return;
17433 }
17434 if (!(*ret)->dw_loc_next)
17435 {
17436 prepend_loc_descr_to_each (list, (*ret)->expr);
17437 *ret = list;
17438 return;
17439 }
17440 expansion_failed (NULL_TREE, NULL_RTX,
17441 "Don't know how to merge two non-trivial"
17442 " location lists.\n");
17443 *ret = NULL;
17444 return;
17445 }
17446
17447 /* LOC is constant expression. Try a luck, look it up in constant
17448 pool and return its loc_descr of its address. */
17449
17450 static dw_loc_descr_ref
17451 cst_pool_loc_descr (tree loc)
17452 {
17453 /* Get an RTL for this, if something has been emitted. */
17454 rtx rtl = lookup_constant_def (loc);
17455
17456 if (!rtl || !MEM_P (rtl))
17457 {
17458 gcc_assert (!rtl);
17459 return 0;
17460 }
17461 gcc_assert (GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF);
17462
17463 /* TODO: We might get more coverage if we was actually delaying expansion
17464 of all expressions till end of compilation when constant pools are fully
17465 populated. */
17466 if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (XEXP (rtl, 0))))
17467 {
17468 expansion_failed (loc, NULL_RTX,
17469 "CST value in contant pool but not marked.");
17470 return 0;
17471 }
17472 return mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
17473 GET_MODE (rtl), VAR_INIT_STATUS_INITIALIZED);
17474 }
17475
17476 /* Return dw_loc_list representing address of addr_expr LOC
17477 by looking for inner INDIRECT_REF expression and turning
17478 it into simple arithmetics.
17479
17480 See loc_list_from_tree for the meaning of CONTEXT. */
17481
17482 static dw_loc_list_ref
17483 loc_list_for_address_of_addr_expr_of_indirect_ref (tree loc, bool toplev,
17484 loc_descr_context *context)
17485 {
17486 tree obj, offset;
17487 poly_int64 bitsize, bitpos, bytepos;
17488 machine_mode mode;
17489 int unsignedp, reversep, volatilep = 0;
17490 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
17491
17492 obj = get_inner_reference (TREE_OPERAND (loc, 0),
17493 &bitsize, &bitpos, &offset, &mode,
17494 &unsignedp, &reversep, &volatilep);
17495 STRIP_NOPS (obj);
17496 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos))
17497 {
17498 expansion_failed (loc, NULL_RTX, "bitfield access");
17499 return 0;
17500 }
17501 if (!INDIRECT_REF_P (obj))
17502 {
17503 expansion_failed (obj,
17504 NULL_RTX, "no indirect ref in inner refrence");
17505 return 0;
17506 }
17507 if (!offset && known_eq (bitpos, 0))
17508 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), toplev ? 2 : 1,
17509 context);
17510 else if (toplev
17511 && int_size_in_bytes (TREE_TYPE (loc)) <= DWARF2_ADDR_SIZE
17512 && (dwarf_version >= 4 || !dwarf_strict))
17513 {
17514 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), 0, context);
17515 if (!list_ret)
17516 return 0;
17517 if (offset)
17518 {
17519 /* Variable offset. */
17520 list_ret1 = loc_list_from_tree (offset, 0, context);
17521 if (list_ret1 == 0)
17522 return 0;
17523 add_loc_list (&list_ret, list_ret1);
17524 if (!list_ret)
17525 return 0;
17526 add_loc_descr_to_each (list_ret,
17527 new_loc_descr (DW_OP_plus, 0, 0));
17528 }
17529 HOST_WIDE_INT value;
17530 if (bytepos.is_constant (&value) && value > 0)
17531 add_loc_descr_to_each (list_ret,
17532 new_loc_descr (DW_OP_plus_uconst, value, 0));
17533 else if (maybe_ne (bytepos, 0))
17534 loc_list_plus_const (list_ret, bytepos);
17535 add_loc_descr_to_each (list_ret,
17536 new_loc_descr (DW_OP_stack_value, 0, 0));
17537 }
17538 return list_ret;
17539 }
17540
17541 /* Set LOC to the next operation that is not a DW_OP_nop operation. In the case
17542 all operations from LOC are nops, move to the last one. Insert in NOPS all
17543 operations that are skipped. */
17544
17545 static void
17546 loc_descr_to_next_no_nop (dw_loc_descr_ref &loc,
17547 hash_set<dw_loc_descr_ref> &nops)
17548 {
17549 while (loc->dw_loc_next != NULL && loc->dw_loc_opc == DW_OP_nop)
17550 {
17551 nops.add (loc);
17552 loc = loc->dw_loc_next;
17553 }
17554 }
17555
17556 /* Helper for loc_descr_without_nops: free the location description operation
17557 P. */
17558
17559 bool
17560 free_loc_descr (const dw_loc_descr_ref &loc, void *data ATTRIBUTE_UNUSED)
17561 {
17562 ggc_free (loc);
17563 return true;
17564 }
17565
17566 /* Remove all DW_OP_nop operations from LOC except, if it exists, the one that
17567 finishes LOC. */
17568
17569 static void
17570 loc_descr_without_nops (dw_loc_descr_ref &loc)
17571 {
17572 if (loc->dw_loc_opc == DW_OP_nop && loc->dw_loc_next == NULL)
17573 return;
17574
17575 /* Set of all DW_OP_nop operations we remove. */
17576 hash_set<dw_loc_descr_ref> nops;
17577
17578 /* First, strip all prefix NOP operations in order to keep the head of the
17579 operations list. */
17580 loc_descr_to_next_no_nop (loc, nops);
17581
17582 for (dw_loc_descr_ref cur = loc; cur != NULL;)
17583 {
17584 /* For control flow operations: strip "prefix" nops in destination
17585 labels. */
17586 if (cur->dw_loc_oprnd1.val_class == dw_val_class_loc)
17587 loc_descr_to_next_no_nop (cur->dw_loc_oprnd1.v.val_loc, nops);
17588 if (cur->dw_loc_oprnd2.val_class == dw_val_class_loc)
17589 loc_descr_to_next_no_nop (cur->dw_loc_oprnd2.v.val_loc, nops);
17590
17591 /* Do the same for the operations that follow, then move to the next
17592 iteration. */
17593 if (cur->dw_loc_next != NULL)
17594 loc_descr_to_next_no_nop (cur->dw_loc_next, nops);
17595 cur = cur->dw_loc_next;
17596 }
17597
17598 nops.traverse<void *, free_loc_descr> (NULL);
17599 }
17600
17601
17602 struct dwarf_procedure_info;
17603
17604 /* Helper structure for location descriptions generation. */
17605 struct loc_descr_context
17606 {
17607 /* The type that is implicitly referenced by DW_OP_push_object_address, or
17608 NULL_TREE if DW_OP_push_object_address in invalid for this location
17609 description. This is used when processing PLACEHOLDER_EXPR nodes. */
17610 tree context_type;
17611 /* The ..._DECL node that should be translated as a
17612 DW_OP_push_object_address operation. */
17613 tree base_decl;
17614 /* Information about the DWARF procedure we are currently generating. NULL if
17615 we are not generating a DWARF procedure. */
17616 struct dwarf_procedure_info *dpi;
17617 /* True if integral PLACEHOLDER_EXPR stands for the first argument passed
17618 by consumer. Used for DW_TAG_generic_subrange attributes. */
17619 bool placeholder_arg;
17620 /* True if PLACEHOLDER_EXPR has been seen. */
17621 bool placeholder_seen;
17622 };
17623
17624 /* DWARF procedures generation
17625
17626 DWARF expressions (aka. location descriptions) are used to encode variable
17627 things such as sizes or offsets. Such computations can have redundant parts
17628 that can be factorized in order to reduce the size of the output debug
17629 information. This is the whole point of DWARF procedures.
17630
17631 Thanks to stor-layout.c, size and offset expressions in GENERIC trees are
17632 already factorized into functions ("size functions") in order to handle very
17633 big and complex types. Such functions are quite simple: they have integral
17634 arguments, they return an integral result and their body contains only a
17635 return statement with arithmetic expressions. This is the only kind of
17636 function we are interested in translating into DWARF procedures, here.
17637
17638 DWARF expressions and DWARF procedure are executed using a stack, so we have
17639 to define some calling convention for them to interact. Let's say that:
17640
17641 - Before calling a DWARF procedure, DWARF expressions must push on the stack
17642 all arguments in reverse order (right-to-left) so that when the DWARF
17643 procedure execution starts, the first argument is the top of the stack.
17644
17645 - Then, when returning, the DWARF procedure must have consumed all arguments
17646 on the stack, must have pushed the result and touched nothing else.
17647
17648 - Each integral argument and the result are integral types can be hold in a
17649 single stack slot.
17650
17651 - We call "frame offset" the number of stack slots that are "under DWARF
17652 procedure control": it includes the arguments slots, the temporaries and
17653 the result slot. Thus, it is equal to the number of arguments when the
17654 procedure execution starts and must be equal to one (the result) when it
17655 returns. */
17656
17657 /* Helper structure used when generating operations for a DWARF procedure. */
17658 struct dwarf_procedure_info
17659 {
17660 /* The FUNCTION_DECL node corresponding to the DWARF procedure that is
17661 currently translated. */
17662 tree fndecl;
17663 /* The number of arguments FNDECL takes. */
17664 unsigned args_count;
17665 };
17666
17667 /* Return a pointer to a newly created DIE node for a DWARF procedure. Add
17668 LOCATION as its DW_AT_location attribute. If FNDECL is not NULL_TREE,
17669 equate it to this DIE. */
17670
17671 static dw_die_ref
17672 new_dwarf_proc_die (dw_loc_descr_ref location, tree fndecl,
17673 dw_die_ref parent_die)
17674 {
17675 dw_die_ref dwarf_proc_die;
17676
17677 if ((dwarf_version < 3 && dwarf_strict)
17678 || location == NULL)
17679 return NULL;
17680
17681 dwarf_proc_die = new_die (DW_TAG_dwarf_procedure, parent_die, fndecl);
17682 if (fndecl)
17683 equate_decl_number_to_die (fndecl, dwarf_proc_die);
17684 add_AT_loc (dwarf_proc_die, DW_AT_location, location);
17685 return dwarf_proc_die;
17686 }
17687
17688 /* Return whether TYPE is a supported type as a DWARF procedure argument
17689 type or return type (we handle only scalar types and pointer types that
17690 aren't wider than the DWARF expression evaluation stack. */
17691
17692 static bool
17693 is_handled_procedure_type (tree type)
17694 {
17695 return ((INTEGRAL_TYPE_P (type)
17696 || TREE_CODE (type) == OFFSET_TYPE
17697 || TREE_CODE (type) == POINTER_TYPE)
17698 && int_size_in_bytes (type) <= DWARF2_ADDR_SIZE);
17699 }
17700
17701 /* Helper for resolve_args_picking: do the same but stop when coming across
17702 visited nodes. For each node we visit, register in FRAME_OFFSETS the frame
17703 offset *before* evaluating the corresponding operation. */
17704
17705 static bool
17706 resolve_args_picking_1 (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17707 struct dwarf_procedure_info *dpi,
17708 hash_map<dw_loc_descr_ref, unsigned> &frame_offsets)
17709 {
17710 /* The "frame_offset" identifier is already used to name a macro... */
17711 unsigned frame_offset_ = initial_frame_offset;
17712 dw_loc_descr_ref l;
17713
17714 for (l = loc; l != NULL;)
17715 {
17716 bool existed;
17717 unsigned &l_frame_offset = frame_offsets.get_or_insert (l, &existed);
17718
17719 /* If we already met this node, there is nothing to compute anymore. */
17720 if (existed)
17721 {
17722 /* Make sure that the stack size is consistent wherever the execution
17723 flow comes from. */
17724 gcc_assert ((unsigned) l_frame_offset == frame_offset_);
17725 break;
17726 }
17727 l_frame_offset = frame_offset_;
17728
17729 /* If needed, relocate the picking offset with respect to the frame
17730 offset. */
17731 if (l->frame_offset_rel)
17732 {
17733 unsigned HOST_WIDE_INT off;
17734 switch (l->dw_loc_opc)
17735 {
17736 case DW_OP_pick:
17737 off = l->dw_loc_oprnd1.v.val_unsigned;
17738 break;
17739 case DW_OP_dup:
17740 off = 0;
17741 break;
17742 case DW_OP_over:
17743 off = 1;
17744 break;
17745 default:
17746 gcc_unreachable ();
17747 }
17748 /* frame_offset_ is the size of the current stack frame, including
17749 incoming arguments. Besides, the arguments are pushed
17750 right-to-left. Thus, in order to access the Nth argument from
17751 this operation node, the picking has to skip temporaries *plus*
17752 one stack slot per argument (0 for the first one, 1 for the second
17753 one, etc.).
17754
17755 The targetted argument number (N) is already set as the operand,
17756 and the number of temporaries can be computed with:
17757 frame_offsets_ - dpi->args_count */
17758 off += frame_offset_ - dpi->args_count;
17759
17760 /* DW_OP_pick handles only offsets from 0 to 255 (inclusive)... */
17761 if (off > 255)
17762 return false;
17763
17764 if (off == 0)
17765 {
17766 l->dw_loc_opc = DW_OP_dup;
17767 l->dw_loc_oprnd1.v.val_unsigned = 0;
17768 }
17769 else if (off == 1)
17770 {
17771 l->dw_loc_opc = DW_OP_over;
17772 l->dw_loc_oprnd1.v.val_unsigned = 0;
17773 }
17774 else
17775 {
17776 l->dw_loc_opc = DW_OP_pick;
17777 l->dw_loc_oprnd1.v.val_unsigned = off;
17778 }
17779 }
17780
17781 /* Update frame_offset according to the effect the current operation has
17782 on the stack. */
17783 switch (l->dw_loc_opc)
17784 {
17785 case DW_OP_deref:
17786 case DW_OP_swap:
17787 case DW_OP_rot:
17788 case DW_OP_abs:
17789 case DW_OP_neg:
17790 case DW_OP_not:
17791 case DW_OP_plus_uconst:
17792 case DW_OP_skip:
17793 case DW_OP_reg0:
17794 case DW_OP_reg1:
17795 case DW_OP_reg2:
17796 case DW_OP_reg3:
17797 case DW_OP_reg4:
17798 case DW_OP_reg5:
17799 case DW_OP_reg6:
17800 case DW_OP_reg7:
17801 case DW_OP_reg8:
17802 case DW_OP_reg9:
17803 case DW_OP_reg10:
17804 case DW_OP_reg11:
17805 case DW_OP_reg12:
17806 case DW_OP_reg13:
17807 case DW_OP_reg14:
17808 case DW_OP_reg15:
17809 case DW_OP_reg16:
17810 case DW_OP_reg17:
17811 case DW_OP_reg18:
17812 case DW_OP_reg19:
17813 case DW_OP_reg20:
17814 case DW_OP_reg21:
17815 case DW_OP_reg22:
17816 case DW_OP_reg23:
17817 case DW_OP_reg24:
17818 case DW_OP_reg25:
17819 case DW_OP_reg26:
17820 case DW_OP_reg27:
17821 case DW_OP_reg28:
17822 case DW_OP_reg29:
17823 case DW_OP_reg30:
17824 case DW_OP_reg31:
17825 case DW_OP_bregx:
17826 case DW_OP_piece:
17827 case DW_OP_deref_size:
17828 case DW_OP_nop:
17829 case DW_OP_bit_piece:
17830 case DW_OP_implicit_value:
17831 case DW_OP_stack_value:
17832 break;
17833
17834 case DW_OP_addr:
17835 case DW_OP_const1u:
17836 case DW_OP_const1s:
17837 case DW_OP_const2u:
17838 case DW_OP_const2s:
17839 case DW_OP_const4u:
17840 case DW_OP_const4s:
17841 case DW_OP_const8u:
17842 case DW_OP_const8s:
17843 case DW_OP_constu:
17844 case DW_OP_consts:
17845 case DW_OP_dup:
17846 case DW_OP_over:
17847 case DW_OP_pick:
17848 case DW_OP_lit0:
17849 case DW_OP_lit1:
17850 case DW_OP_lit2:
17851 case DW_OP_lit3:
17852 case DW_OP_lit4:
17853 case DW_OP_lit5:
17854 case DW_OP_lit6:
17855 case DW_OP_lit7:
17856 case DW_OP_lit8:
17857 case DW_OP_lit9:
17858 case DW_OP_lit10:
17859 case DW_OP_lit11:
17860 case DW_OP_lit12:
17861 case DW_OP_lit13:
17862 case DW_OP_lit14:
17863 case DW_OP_lit15:
17864 case DW_OP_lit16:
17865 case DW_OP_lit17:
17866 case DW_OP_lit18:
17867 case DW_OP_lit19:
17868 case DW_OP_lit20:
17869 case DW_OP_lit21:
17870 case DW_OP_lit22:
17871 case DW_OP_lit23:
17872 case DW_OP_lit24:
17873 case DW_OP_lit25:
17874 case DW_OP_lit26:
17875 case DW_OP_lit27:
17876 case DW_OP_lit28:
17877 case DW_OP_lit29:
17878 case DW_OP_lit30:
17879 case DW_OP_lit31:
17880 case DW_OP_breg0:
17881 case DW_OP_breg1:
17882 case DW_OP_breg2:
17883 case DW_OP_breg3:
17884 case DW_OP_breg4:
17885 case DW_OP_breg5:
17886 case DW_OP_breg6:
17887 case DW_OP_breg7:
17888 case DW_OP_breg8:
17889 case DW_OP_breg9:
17890 case DW_OP_breg10:
17891 case DW_OP_breg11:
17892 case DW_OP_breg12:
17893 case DW_OP_breg13:
17894 case DW_OP_breg14:
17895 case DW_OP_breg15:
17896 case DW_OP_breg16:
17897 case DW_OP_breg17:
17898 case DW_OP_breg18:
17899 case DW_OP_breg19:
17900 case DW_OP_breg20:
17901 case DW_OP_breg21:
17902 case DW_OP_breg22:
17903 case DW_OP_breg23:
17904 case DW_OP_breg24:
17905 case DW_OP_breg25:
17906 case DW_OP_breg26:
17907 case DW_OP_breg27:
17908 case DW_OP_breg28:
17909 case DW_OP_breg29:
17910 case DW_OP_breg30:
17911 case DW_OP_breg31:
17912 case DW_OP_fbreg:
17913 case DW_OP_push_object_address:
17914 case DW_OP_call_frame_cfa:
17915 case DW_OP_GNU_variable_value:
17916 case DW_OP_GNU_addr_index:
17917 case DW_OP_GNU_const_index:
17918 ++frame_offset_;
17919 break;
17920
17921 case DW_OP_drop:
17922 case DW_OP_xderef:
17923 case DW_OP_and:
17924 case DW_OP_div:
17925 case DW_OP_minus:
17926 case DW_OP_mod:
17927 case DW_OP_mul:
17928 case DW_OP_or:
17929 case DW_OP_plus:
17930 case DW_OP_shl:
17931 case DW_OP_shr:
17932 case DW_OP_shra:
17933 case DW_OP_xor:
17934 case DW_OP_bra:
17935 case DW_OP_eq:
17936 case DW_OP_ge:
17937 case DW_OP_gt:
17938 case DW_OP_le:
17939 case DW_OP_lt:
17940 case DW_OP_ne:
17941 case DW_OP_regx:
17942 case DW_OP_xderef_size:
17943 --frame_offset_;
17944 break;
17945
17946 case DW_OP_call2:
17947 case DW_OP_call4:
17948 case DW_OP_call_ref:
17949 {
17950 dw_die_ref dwarf_proc = l->dw_loc_oprnd1.v.val_die_ref.die;
17951 int *stack_usage = dwarf_proc_stack_usage_map->get (dwarf_proc);
17952
17953 if (stack_usage == NULL)
17954 return false;
17955 frame_offset_ += *stack_usage;
17956 break;
17957 }
17958
17959 case DW_OP_implicit_pointer:
17960 case DW_OP_entry_value:
17961 case DW_OP_const_type:
17962 case DW_OP_regval_type:
17963 case DW_OP_deref_type:
17964 case DW_OP_convert:
17965 case DW_OP_reinterpret:
17966 case DW_OP_form_tls_address:
17967 case DW_OP_GNU_push_tls_address:
17968 case DW_OP_GNU_uninit:
17969 case DW_OP_GNU_encoded_addr:
17970 case DW_OP_GNU_implicit_pointer:
17971 case DW_OP_GNU_entry_value:
17972 case DW_OP_GNU_const_type:
17973 case DW_OP_GNU_regval_type:
17974 case DW_OP_GNU_deref_type:
17975 case DW_OP_GNU_convert:
17976 case DW_OP_GNU_reinterpret:
17977 case DW_OP_GNU_parameter_ref:
17978 /* loc_list_from_tree will probably not output these operations for
17979 size functions, so assume they will not appear here. */
17980 /* Fall through... */
17981
17982 default:
17983 gcc_unreachable ();
17984 }
17985
17986 /* Now, follow the control flow (except subroutine calls). */
17987 switch (l->dw_loc_opc)
17988 {
17989 case DW_OP_bra:
17990 if (!resolve_args_picking_1 (l->dw_loc_next, frame_offset_, dpi,
17991 frame_offsets))
17992 return false;
17993 /* Fall through. */
17994
17995 case DW_OP_skip:
17996 l = l->dw_loc_oprnd1.v.val_loc;
17997 break;
17998
17999 case DW_OP_stack_value:
18000 return true;
18001
18002 default:
18003 l = l->dw_loc_next;
18004 break;
18005 }
18006 }
18007
18008 return true;
18009 }
18010
18011 /* Make a DFS over operations reachable through LOC (i.e. follow branch
18012 operations) in order to resolve the operand of DW_OP_pick operations that
18013 target DWARF procedure arguments (DPI). INITIAL_FRAME_OFFSET is the frame
18014 offset *before* LOC is executed. Return if all relocations were
18015 successful. */
18016
18017 static bool
18018 resolve_args_picking (dw_loc_descr_ref loc, unsigned initial_frame_offset,
18019 struct dwarf_procedure_info *dpi)
18020 {
18021 /* Associate to all visited operations the frame offset *before* evaluating
18022 this operation. */
18023 hash_map<dw_loc_descr_ref, unsigned> frame_offsets;
18024
18025 return resolve_args_picking_1 (loc, initial_frame_offset, dpi,
18026 frame_offsets);
18027 }
18028
18029 /* Try to generate a DWARF procedure that computes the same result as FNDECL.
18030 Return NULL if it is not possible. */
18031
18032 static dw_die_ref
18033 function_to_dwarf_procedure (tree fndecl)
18034 {
18035 struct loc_descr_context ctx;
18036 struct dwarf_procedure_info dpi;
18037 dw_die_ref dwarf_proc_die;
18038 tree tree_body = DECL_SAVED_TREE (fndecl);
18039 dw_loc_descr_ref loc_body, epilogue;
18040
18041 tree cursor;
18042 unsigned i;
18043
18044 /* Do not generate multiple DWARF procedures for the same function
18045 declaration. */
18046 dwarf_proc_die = lookup_decl_die (fndecl);
18047 if (dwarf_proc_die != NULL)
18048 return dwarf_proc_die;
18049
18050 /* DWARF procedures are available starting with the DWARFv3 standard. */
18051 if (dwarf_version < 3 && dwarf_strict)
18052 return NULL;
18053
18054 /* We handle only functions for which we still have a body, that return a
18055 supported type and that takes arguments with supported types. Note that
18056 there is no point translating functions that return nothing. */
18057 if (tree_body == NULL_TREE
18058 || DECL_RESULT (fndecl) == NULL_TREE
18059 || !is_handled_procedure_type (TREE_TYPE (DECL_RESULT (fndecl))))
18060 return NULL;
18061
18062 for (cursor = DECL_ARGUMENTS (fndecl);
18063 cursor != NULL_TREE;
18064 cursor = TREE_CHAIN (cursor))
18065 if (!is_handled_procedure_type (TREE_TYPE (cursor)))
18066 return NULL;
18067
18068 /* Match only "expr" in: RETURN_EXPR (MODIFY_EXPR (RESULT_DECL, expr)). */
18069 if (TREE_CODE (tree_body) != RETURN_EXPR)
18070 return NULL;
18071 tree_body = TREE_OPERAND (tree_body, 0);
18072 if (TREE_CODE (tree_body) != MODIFY_EXPR
18073 || TREE_OPERAND (tree_body, 0) != DECL_RESULT (fndecl))
18074 return NULL;
18075 tree_body = TREE_OPERAND (tree_body, 1);
18076
18077 /* Try to translate the body expression itself. Note that this will probably
18078 cause an infinite recursion if its call graph has a cycle. This is very
18079 unlikely for size functions, however, so don't bother with such things at
18080 the moment. */
18081 ctx.context_type = NULL_TREE;
18082 ctx.base_decl = NULL_TREE;
18083 ctx.dpi = &dpi;
18084 ctx.placeholder_arg = false;
18085 ctx.placeholder_seen = false;
18086 dpi.fndecl = fndecl;
18087 dpi.args_count = list_length (DECL_ARGUMENTS (fndecl));
18088 loc_body = loc_descriptor_from_tree (tree_body, 0, &ctx);
18089 if (!loc_body)
18090 return NULL;
18091
18092 /* After evaluating all operands in "loc_body", we should still have on the
18093 stack all arguments plus the desired function result (top of the stack).
18094 Generate code in order to keep only the result in our stack frame. */
18095 epilogue = NULL;
18096 for (i = 0; i < dpi.args_count; ++i)
18097 {
18098 dw_loc_descr_ref op_couple = new_loc_descr (DW_OP_swap, 0, 0);
18099 op_couple->dw_loc_next = new_loc_descr (DW_OP_drop, 0, 0);
18100 op_couple->dw_loc_next->dw_loc_next = epilogue;
18101 epilogue = op_couple;
18102 }
18103 add_loc_descr (&loc_body, epilogue);
18104 if (!resolve_args_picking (loc_body, dpi.args_count, &dpi))
18105 return NULL;
18106
18107 /* Trailing nops from loc_descriptor_from_tree (if any) cannot be removed
18108 because they are considered useful. Now there is an epilogue, they are
18109 not anymore, so give it another try. */
18110 loc_descr_without_nops (loc_body);
18111
18112 /* fndecl may be used both as a regular DW_TAG_subprogram DIE and as
18113 a DW_TAG_dwarf_procedure, so we may have a conflict, here. It's unlikely,
18114 though, given that size functions do not come from source, so they should
18115 not have a dedicated DW_TAG_subprogram DIE. */
18116 dwarf_proc_die
18117 = new_dwarf_proc_die (loc_body, fndecl,
18118 get_context_die (DECL_CONTEXT (fndecl)));
18119
18120 /* The called DWARF procedure consumes one stack slot per argument and
18121 returns one stack slot. */
18122 dwarf_proc_stack_usage_map->put (dwarf_proc_die, 1 - dpi.args_count);
18123
18124 return dwarf_proc_die;
18125 }
18126
18127
18128 /* Generate Dwarf location list representing LOC.
18129 If WANT_ADDRESS is false, expression computing LOC will be computed
18130 If WANT_ADDRESS is 1, expression computing address of LOC will be returned
18131 if WANT_ADDRESS is 2, expression computing address useable in location
18132 will be returned (i.e. DW_OP_reg can be used
18133 to refer to register values).
18134
18135 CONTEXT provides information to customize the location descriptions
18136 generation. Its context_type field specifies what type is implicitly
18137 referenced by DW_OP_push_object_address. If it is NULL_TREE, this operation
18138 will not be generated.
18139
18140 Its DPI field determines whether we are generating a DWARF expression for a
18141 DWARF procedure, so PARM_DECL references are processed specifically.
18142
18143 If CONTEXT is NULL, the behavior is the same as if context_type, base_decl
18144 and dpi fields were null. */
18145
18146 static dw_loc_list_ref
18147 loc_list_from_tree_1 (tree loc, int want_address,
18148 struct loc_descr_context *context)
18149 {
18150 dw_loc_descr_ref ret = NULL, ret1 = NULL;
18151 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
18152 int have_address = 0;
18153 enum dwarf_location_atom op;
18154
18155 /* ??? Most of the time we do not take proper care for sign/zero
18156 extending the values properly. Hopefully this won't be a real
18157 problem... */
18158
18159 if (context != NULL
18160 && context->base_decl == loc
18161 && want_address == 0)
18162 {
18163 if (dwarf_version >= 3 || !dwarf_strict)
18164 return new_loc_list (new_loc_descr (DW_OP_push_object_address, 0, 0),
18165 NULL, 0, NULL, 0, NULL);
18166 else
18167 return NULL;
18168 }
18169
18170 switch (TREE_CODE (loc))
18171 {
18172 case ERROR_MARK:
18173 expansion_failed (loc, NULL_RTX, "ERROR_MARK");
18174 return 0;
18175
18176 case PLACEHOLDER_EXPR:
18177 /* This case involves extracting fields from an object to determine the
18178 position of other fields. It is supposed to appear only as the first
18179 operand of COMPONENT_REF nodes and to reference precisely the type
18180 that the context allows. */
18181 if (context != NULL
18182 && TREE_TYPE (loc) == context->context_type
18183 && want_address >= 1)
18184 {
18185 if (dwarf_version >= 3 || !dwarf_strict)
18186 {
18187 ret = new_loc_descr (DW_OP_push_object_address, 0, 0);
18188 have_address = 1;
18189 break;
18190 }
18191 else
18192 return NULL;
18193 }
18194 /* For DW_TAG_generic_subrange attributes, PLACEHOLDER_EXPR stands for
18195 the single argument passed by consumer. */
18196 else if (context != NULL
18197 && context->placeholder_arg
18198 && INTEGRAL_TYPE_P (TREE_TYPE (loc))
18199 && want_address == 0)
18200 {
18201 ret = new_loc_descr (DW_OP_pick, 0, 0);
18202 ret->frame_offset_rel = 1;
18203 context->placeholder_seen = true;
18204 break;
18205 }
18206 else
18207 expansion_failed (loc, NULL_RTX,
18208 "PLACEHOLDER_EXPR for an unexpected type");
18209 break;
18210
18211 case CALL_EXPR:
18212 {
18213 const int nargs = call_expr_nargs (loc);
18214 tree callee = get_callee_fndecl (loc);
18215 int i;
18216 dw_die_ref dwarf_proc;
18217
18218 if (callee == NULL_TREE)
18219 goto call_expansion_failed;
18220
18221 /* We handle only functions that return an integer. */
18222 if (!is_handled_procedure_type (TREE_TYPE (TREE_TYPE (callee))))
18223 goto call_expansion_failed;
18224
18225 dwarf_proc = function_to_dwarf_procedure (callee);
18226 if (dwarf_proc == NULL)
18227 goto call_expansion_failed;
18228
18229 /* Evaluate arguments right-to-left so that the first argument will
18230 be the top-most one on the stack. */
18231 for (i = nargs - 1; i >= 0; --i)
18232 {
18233 dw_loc_descr_ref loc_descr
18234 = loc_descriptor_from_tree (CALL_EXPR_ARG (loc, i), 0,
18235 context);
18236
18237 if (loc_descr == NULL)
18238 goto call_expansion_failed;
18239
18240 add_loc_descr (&ret, loc_descr);
18241 }
18242
18243 ret1 = new_loc_descr (DW_OP_call4, 0, 0);
18244 ret1->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18245 ret1->dw_loc_oprnd1.v.val_die_ref.die = dwarf_proc;
18246 ret1->dw_loc_oprnd1.v.val_die_ref.external = 0;
18247 add_loc_descr (&ret, ret1);
18248 break;
18249
18250 call_expansion_failed:
18251 expansion_failed (loc, NULL_RTX, "CALL_EXPR");
18252 /* There are no opcodes for these operations. */
18253 return 0;
18254 }
18255
18256 case PREINCREMENT_EXPR:
18257 case PREDECREMENT_EXPR:
18258 case POSTINCREMENT_EXPR:
18259 case POSTDECREMENT_EXPR:
18260 expansion_failed (loc, NULL_RTX, "PRE/POST INDCREMENT/DECREMENT");
18261 /* There are no opcodes for these operations. */
18262 return 0;
18263
18264 case ADDR_EXPR:
18265 /* If we already want an address, see if there is INDIRECT_REF inside
18266 e.g. for &this->field. */
18267 if (want_address)
18268 {
18269 list_ret = loc_list_for_address_of_addr_expr_of_indirect_ref
18270 (loc, want_address == 2, context);
18271 if (list_ret)
18272 have_address = 1;
18273 else if (decl_address_ip_invariant_p (TREE_OPERAND (loc, 0))
18274 && (ret = cst_pool_loc_descr (loc)))
18275 have_address = 1;
18276 }
18277 /* Otherwise, process the argument and look for the address. */
18278 if (!list_ret && !ret)
18279 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 1, context);
18280 else
18281 {
18282 if (want_address)
18283 expansion_failed (loc, NULL_RTX, "need address of ADDR_EXPR");
18284 return NULL;
18285 }
18286 break;
18287
18288 case VAR_DECL:
18289 if (DECL_THREAD_LOCAL_P (loc))
18290 {
18291 rtx rtl;
18292 enum dwarf_location_atom tls_op;
18293 enum dtprel_bool dtprel = dtprel_false;
18294
18295 if (targetm.have_tls)
18296 {
18297 /* If this is not defined, we have no way to emit the
18298 data. */
18299 if (!targetm.asm_out.output_dwarf_dtprel)
18300 return 0;
18301
18302 /* The way DW_OP_GNU_push_tls_address is specified, we
18303 can only look up addresses of objects in the current
18304 module. We used DW_OP_addr as first op, but that's
18305 wrong, because DW_OP_addr is relocated by the debug
18306 info consumer, while DW_OP_GNU_push_tls_address
18307 operand shouldn't be. */
18308 if (DECL_EXTERNAL (loc) && !targetm.binds_local_p (loc))
18309 return 0;
18310 dtprel = dtprel_true;
18311 /* We check for DWARF 5 here because gdb did not implement
18312 DW_OP_form_tls_address until after 7.12. */
18313 tls_op = (dwarf_version >= 5 ? DW_OP_form_tls_address
18314 : DW_OP_GNU_push_tls_address);
18315 }
18316 else
18317 {
18318 if (!targetm.emutls.debug_form_tls_address
18319 || !(dwarf_version >= 3 || !dwarf_strict))
18320 return 0;
18321 /* We stuffed the control variable into the DECL_VALUE_EXPR
18322 to signal (via DECL_HAS_VALUE_EXPR_P) that the decl should
18323 no longer appear in gimple code. We used the control
18324 variable in specific so that we could pick it up here. */
18325 loc = DECL_VALUE_EXPR (loc);
18326 tls_op = DW_OP_form_tls_address;
18327 }
18328
18329 rtl = rtl_for_decl_location (loc);
18330 if (rtl == NULL_RTX)
18331 return 0;
18332
18333 if (!MEM_P (rtl))
18334 return 0;
18335 rtl = XEXP (rtl, 0);
18336 if (! CONSTANT_P (rtl))
18337 return 0;
18338
18339 ret = new_addr_loc_descr (rtl, dtprel);
18340 ret1 = new_loc_descr (tls_op, 0, 0);
18341 add_loc_descr (&ret, ret1);
18342
18343 have_address = 1;
18344 break;
18345 }
18346 /* FALLTHRU */
18347
18348 case PARM_DECL:
18349 if (context != NULL && context->dpi != NULL
18350 && DECL_CONTEXT (loc) == context->dpi->fndecl)
18351 {
18352 /* We are generating code for a DWARF procedure and we want to access
18353 one of its arguments: find the appropriate argument offset and let
18354 the resolve_args_picking pass compute the offset that complies
18355 with the stack frame size. */
18356 unsigned i = 0;
18357 tree cursor;
18358
18359 for (cursor = DECL_ARGUMENTS (context->dpi->fndecl);
18360 cursor != NULL_TREE && cursor != loc;
18361 cursor = TREE_CHAIN (cursor), ++i)
18362 ;
18363 /* If we are translating a DWARF procedure, all referenced parameters
18364 must belong to the current function. */
18365 gcc_assert (cursor != NULL_TREE);
18366
18367 ret = new_loc_descr (DW_OP_pick, i, 0);
18368 ret->frame_offset_rel = 1;
18369 break;
18370 }
18371 /* FALLTHRU */
18372
18373 case RESULT_DECL:
18374 if (DECL_HAS_VALUE_EXPR_P (loc))
18375 return loc_list_from_tree_1 (DECL_VALUE_EXPR (loc),
18376 want_address, context);
18377 /* FALLTHRU */
18378
18379 case FUNCTION_DECL:
18380 {
18381 rtx rtl;
18382 var_loc_list *loc_list = lookup_decl_loc (loc);
18383
18384 if (loc_list && loc_list->first)
18385 {
18386 list_ret = dw_loc_list (loc_list, loc, want_address);
18387 have_address = want_address != 0;
18388 break;
18389 }
18390 rtl = rtl_for_decl_location (loc);
18391 if (rtl == NULL_RTX)
18392 {
18393 if (TREE_CODE (loc) != FUNCTION_DECL
18394 && early_dwarf
18395 && current_function_decl
18396 && want_address != 1
18397 && ! DECL_IGNORED_P (loc)
18398 && (INTEGRAL_TYPE_P (TREE_TYPE (loc))
18399 || POINTER_TYPE_P (TREE_TYPE (loc)))
18400 && DECL_CONTEXT (loc) == current_function_decl
18401 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (loc)))
18402 <= DWARF2_ADDR_SIZE))
18403 {
18404 dw_die_ref ref = lookup_decl_die (loc);
18405 ret = new_loc_descr (DW_OP_GNU_variable_value, 0, 0);
18406 if (ref)
18407 {
18408 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18409 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
18410 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
18411 }
18412 else
18413 {
18414 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
18415 ret->dw_loc_oprnd1.v.val_decl_ref = loc;
18416 }
18417 break;
18418 }
18419 expansion_failed (loc, NULL_RTX, "DECL has no RTL");
18420 return 0;
18421 }
18422 else if (CONST_INT_P (rtl))
18423 {
18424 HOST_WIDE_INT val = INTVAL (rtl);
18425 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18426 val &= GET_MODE_MASK (DECL_MODE (loc));
18427 ret = int_loc_descriptor (val);
18428 }
18429 else if (GET_CODE (rtl) == CONST_STRING)
18430 {
18431 expansion_failed (loc, NULL_RTX, "CONST_STRING");
18432 return 0;
18433 }
18434 else if (CONSTANT_P (rtl) && const_ok_for_output (rtl))
18435 ret = new_addr_loc_descr (rtl, dtprel_false);
18436 else
18437 {
18438 machine_mode mode, mem_mode;
18439
18440 /* Certain constructs can only be represented at top-level. */
18441 if (want_address == 2)
18442 {
18443 ret = loc_descriptor (rtl, VOIDmode,
18444 VAR_INIT_STATUS_INITIALIZED);
18445 have_address = 1;
18446 }
18447 else
18448 {
18449 mode = GET_MODE (rtl);
18450 mem_mode = VOIDmode;
18451 if (MEM_P (rtl))
18452 {
18453 mem_mode = mode;
18454 mode = get_address_mode (rtl);
18455 rtl = XEXP (rtl, 0);
18456 have_address = 1;
18457 }
18458 ret = mem_loc_descriptor (rtl, mode, mem_mode,
18459 VAR_INIT_STATUS_INITIALIZED);
18460 }
18461 if (!ret)
18462 expansion_failed (loc, rtl,
18463 "failed to produce loc descriptor for rtl");
18464 }
18465 }
18466 break;
18467
18468 case MEM_REF:
18469 if (!integer_zerop (TREE_OPERAND (loc, 1)))
18470 {
18471 have_address = 1;
18472 goto do_plus;
18473 }
18474 /* Fallthru. */
18475 case INDIRECT_REF:
18476 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18477 have_address = 1;
18478 break;
18479
18480 case TARGET_MEM_REF:
18481 case SSA_NAME:
18482 case DEBUG_EXPR_DECL:
18483 return NULL;
18484
18485 case COMPOUND_EXPR:
18486 return loc_list_from_tree_1 (TREE_OPERAND (loc, 1), want_address,
18487 context);
18488
18489 CASE_CONVERT:
18490 case VIEW_CONVERT_EXPR:
18491 case SAVE_EXPR:
18492 case MODIFY_EXPR:
18493 case NON_LVALUE_EXPR:
18494 return loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address,
18495 context);
18496
18497 case COMPONENT_REF:
18498 case BIT_FIELD_REF:
18499 case ARRAY_REF:
18500 case ARRAY_RANGE_REF:
18501 case REALPART_EXPR:
18502 case IMAGPART_EXPR:
18503 {
18504 tree obj, offset;
18505 poly_int64 bitsize, bitpos, bytepos;
18506 machine_mode mode;
18507 int unsignedp, reversep, volatilep = 0;
18508
18509 obj = get_inner_reference (loc, &bitsize, &bitpos, &offset, &mode,
18510 &unsignedp, &reversep, &volatilep);
18511
18512 gcc_assert (obj != loc);
18513
18514 list_ret = loc_list_from_tree_1 (obj,
18515 want_address == 2
18516 && known_eq (bitpos, 0)
18517 && !offset ? 2 : 1,
18518 context);
18519 /* TODO: We can extract value of the small expression via shifting even
18520 for nonzero bitpos. */
18521 if (list_ret == 0)
18522 return 0;
18523 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
18524 || !multiple_p (bitsize, BITS_PER_UNIT))
18525 {
18526 expansion_failed (loc, NULL_RTX,
18527 "bitfield access");
18528 return 0;
18529 }
18530
18531 if (offset != NULL_TREE)
18532 {
18533 /* Variable offset. */
18534 list_ret1 = loc_list_from_tree_1 (offset, 0, context);
18535 if (list_ret1 == 0)
18536 return 0;
18537 add_loc_list (&list_ret, list_ret1);
18538 if (!list_ret)
18539 return 0;
18540 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus, 0, 0));
18541 }
18542
18543 HOST_WIDE_INT value;
18544 if (bytepos.is_constant (&value) && value > 0)
18545 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus_uconst,
18546 value, 0));
18547 else if (maybe_ne (bytepos, 0))
18548 loc_list_plus_const (list_ret, bytepos);
18549
18550 have_address = 1;
18551 break;
18552 }
18553
18554 case INTEGER_CST:
18555 if ((want_address || !tree_fits_shwi_p (loc))
18556 && (ret = cst_pool_loc_descr (loc)))
18557 have_address = 1;
18558 else if (want_address == 2
18559 && tree_fits_shwi_p (loc)
18560 && (ret = address_of_int_loc_descriptor
18561 (int_size_in_bytes (TREE_TYPE (loc)),
18562 tree_to_shwi (loc))))
18563 have_address = 1;
18564 else if (tree_fits_shwi_p (loc))
18565 ret = int_loc_descriptor (tree_to_shwi (loc));
18566 else if (tree_fits_uhwi_p (loc))
18567 ret = uint_loc_descriptor (tree_to_uhwi (loc));
18568 else
18569 {
18570 expansion_failed (loc, NULL_RTX,
18571 "Integer operand is not host integer");
18572 return 0;
18573 }
18574 break;
18575
18576 case POLY_INT_CST:
18577 {
18578 if (want_address)
18579 {
18580 expansion_failed (loc, NULL_RTX,
18581 "constant address with a runtime component");
18582 return 0;
18583 }
18584 poly_int64 value;
18585 if (!poly_int_tree_p (loc, &value))
18586 {
18587 expansion_failed (loc, NULL_RTX, "constant too big");
18588 return 0;
18589 }
18590 ret = int_loc_descriptor (value);
18591 }
18592 break;
18593
18594 case CONSTRUCTOR:
18595 case REAL_CST:
18596 case STRING_CST:
18597 case COMPLEX_CST:
18598 if ((ret = cst_pool_loc_descr (loc)))
18599 have_address = 1;
18600 else if (TREE_CODE (loc) == CONSTRUCTOR)
18601 {
18602 tree type = TREE_TYPE (loc);
18603 unsigned HOST_WIDE_INT size = int_size_in_bytes (type);
18604 unsigned HOST_WIDE_INT offset = 0;
18605 unsigned HOST_WIDE_INT cnt;
18606 constructor_elt *ce;
18607
18608 if (TREE_CODE (type) == RECORD_TYPE)
18609 {
18610 /* This is very limited, but it's enough to output
18611 pointers to member functions, as long as the
18612 referenced function is defined in the current
18613 translation unit. */
18614 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (loc), cnt, ce)
18615 {
18616 tree val = ce->value;
18617
18618 tree field = ce->index;
18619
18620 if (val)
18621 STRIP_NOPS (val);
18622
18623 if (!field || DECL_BIT_FIELD (field))
18624 {
18625 expansion_failed (loc, NULL_RTX,
18626 "bitfield in record type constructor");
18627 size = offset = (unsigned HOST_WIDE_INT)-1;
18628 ret = NULL;
18629 break;
18630 }
18631
18632 HOST_WIDE_INT fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
18633 unsigned HOST_WIDE_INT pos = int_byte_position (field);
18634 gcc_assert (pos + fieldsize <= size);
18635 if (pos < offset)
18636 {
18637 expansion_failed (loc, NULL_RTX,
18638 "out-of-order fields in record constructor");
18639 size = offset = (unsigned HOST_WIDE_INT)-1;
18640 ret = NULL;
18641 break;
18642 }
18643 if (pos > offset)
18644 {
18645 ret1 = new_loc_descr (DW_OP_piece, pos - offset, 0);
18646 add_loc_descr (&ret, ret1);
18647 offset = pos;
18648 }
18649 if (val && fieldsize != 0)
18650 {
18651 ret1 = loc_descriptor_from_tree (val, want_address, context);
18652 if (!ret1)
18653 {
18654 expansion_failed (loc, NULL_RTX,
18655 "unsupported expression in field");
18656 size = offset = (unsigned HOST_WIDE_INT)-1;
18657 ret = NULL;
18658 break;
18659 }
18660 add_loc_descr (&ret, ret1);
18661 }
18662 if (fieldsize)
18663 {
18664 ret1 = new_loc_descr (DW_OP_piece, fieldsize, 0);
18665 add_loc_descr (&ret, ret1);
18666 offset = pos + fieldsize;
18667 }
18668 }
18669
18670 if (offset != size)
18671 {
18672 ret1 = new_loc_descr (DW_OP_piece, size - offset, 0);
18673 add_loc_descr (&ret, ret1);
18674 offset = size;
18675 }
18676
18677 have_address = !!want_address;
18678 }
18679 else
18680 expansion_failed (loc, NULL_RTX,
18681 "constructor of non-record type");
18682 }
18683 else
18684 /* We can construct small constants here using int_loc_descriptor. */
18685 expansion_failed (loc, NULL_RTX,
18686 "constructor or constant not in constant pool");
18687 break;
18688
18689 case TRUTH_AND_EXPR:
18690 case TRUTH_ANDIF_EXPR:
18691 case BIT_AND_EXPR:
18692 op = DW_OP_and;
18693 goto do_binop;
18694
18695 case TRUTH_XOR_EXPR:
18696 case BIT_XOR_EXPR:
18697 op = DW_OP_xor;
18698 goto do_binop;
18699
18700 case TRUTH_OR_EXPR:
18701 case TRUTH_ORIF_EXPR:
18702 case BIT_IOR_EXPR:
18703 op = DW_OP_or;
18704 goto do_binop;
18705
18706 case FLOOR_DIV_EXPR:
18707 case CEIL_DIV_EXPR:
18708 case ROUND_DIV_EXPR:
18709 case TRUNC_DIV_EXPR:
18710 case EXACT_DIV_EXPR:
18711 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18712 return 0;
18713 op = DW_OP_div;
18714 goto do_binop;
18715
18716 case MINUS_EXPR:
18717 op = DW_OP_minus;
18718 goto do_binop;
18719
18720 case FLOOR_MOD_EXPR:
18721 case CEIL_MOD_EXPR:
18722 case ROUND_MOD_EXPR:
18723 case TRUNC_MOD_EXPR:
18724 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18725 {
18726 op = DW_OP_mod;
18727 goto do_binop;
18728 }
18729 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18730 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18731 if (list_ret == 0 || list_ret1 == 0)
18732 return 0;
18733
18734 add_loc_list (&list_ret, list_ret1);
18735 if (list_ret == 0)
18736 return 0;
18737 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18738 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18739 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_div, 0, 0));
18740 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_mul, 0, 0));
18741 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_minus, 0, 0));
18742 break;
18743
18744 case MULT_EXPR:
18745 op = DW_OP_mul;
18746 goto do_binop;
18747
18748 case LSHIFT_EXPR:
18749 op = DW_OP_shl;
18750 goto do_binop;
18751
18752 case RSHIFT_EXPR:
18753 op = (TYPE_UNSIGNED (TREE_TYPE (loc)) ? DW_OP_shr : DW_OP_shra);
18754 goto do_binop;
18755
18756 case POINTER_PLUS_EXPR:
18757 case PLUS_EXPR:
18758 do_plus:
18759 if (tree_fits_shwi_p (TREE_OPERAND (loc, 1)))
18760 {
18761 /* Big unsigned numbers can fit in HOST_WIDE_INT but it may be
18762 smarter to encode their opposite. The DW_OP_plus_uconst operation
18763 takes 1 + X bytes, X being the size of the ULEB128 addend. On the
18764 other hand, a "<push literal>; DW_OP_minus" pattern takes 1 + Y
18765 bytes, Y being the size of the operation that pushes the opposite
18766 of the addend. So let's choose the smallest representation. */
18767 const tree tree_addend = TREE_OPERAND (loc, 1);
18768 offset_int wi_addend;
18769 HOST_WIDE_INT shwi_addend;
18770 dw_loc_descr_ref loc_naddend;
18771
18772 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18773 if (list_ret == 0)
18774 return 0;
18775
18776 /* Try to get the literal to push. It is the opposite of the addend,
18777 so as we rely on wrapping during DWARF evaluation, first decode
18778 the literal as a "DWARF-sized" signed number. */
18779 wi_addend = wi::to_offset (tree_addend);
18780 wi_addend = wi::sext (wi_addend, DWARF2_ADDR_SIZE * 8);
18781 shwi_addend = wi_addend.to_shwi ();
18782 loc_naddend = (shwi_addend != INTTYPE_MINIMUM (HOST_WIDE_INT))
18783 ? int_loc_descriptor (-shwi_addend)
18784 : NULL;
18785
18786 if (loc_naddend != NULL
18787 && ((unsigned) size_of_uleb128 (shwi_addend)
18788 > size_of_loc_descr (loc_naddend)))
18789 {
18790 add_loc_descr_to_each (list_ret, loc_naddend);
18791 add_loc_descr_to_each (list_ret,
18792 new_loc_descr (DW_OP_minus, 0, 0));
18793 }
18794 else
18795 {
18796 for (dw_loc_descr_ref loc_cur = loc_naddend; loc_cur != NULL; )
18797 {
18798 loc_naddend = loc_cur;
18799 loc_cur = loc_cur->dw_loc_next;
18800 ggc_free (loc_naddend);
18801 }
18802 loc_list_plus_const (list_ret, wi_addend.to_shwi ());
18803 }
18804 break;
18805 }
18806
18807 op = DW_OP_plus;
18808 goto do_binop;
18809
18810 case LE_EXPR:
18811 op = DW_OP_le;
18812 goto do_comp_binop;
18813
18814 case GE_EXPR:
18815 op = DW_OP_ge;
18816 goto do_comp_binop;
18817
18818 case LT_EXPR:
18819 op = DW_OP_lt;
18820 goto do_comp_binop;
18821
18822 case GT_EXPR:
18823 op = DW_OP_gt;
18824 goto do_comp_binop;
18825
18826 do_comp_binop:
18827 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (loc, 0))))
18828 {
18829 list_ret = loc_list_from_tree (TREE_OPERAND (loc, 0), 0, context);
18830 list_ret1 = loc_list_from_tree (TREE_OPERAND (loc, 1), 0, context);
18831 list_ret = loc_list_from_uint_comparison (list_ret, list_ret1,
18832 TREE_CODE (loc));
18833 break;
18834 }
18835 else
18836 goto do_binop;
18837
18838 case EQ_EXPR:
18839 op = DW_OP_eq;
18840 goto do_binop;
18841
18842 case NE_EXPR:
18843 op = DW_OP_ne;
18844 goto do_binop;
18845
18846 do_binop:
18847 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18848 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18849 if (list_ret == 0 || list_ret1 == 0)
18850 return 0;
18851
18852 add_loc_list (&list_ret, list_ret1);
18853 if (list_ret == 0)
18854 return 0;
18855 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18856 break;
18857
18858 case TRUTH_NOT_EXPR:
18859 case BIT_NOT_EXPR:
18860 op = DW_OP_not;
18861 goto do_unop;
18862
18863 case ABS_EXPR:
18864 op = DW_OP_abs;
18865 goto do_unop;
18866
18867 case NEGATE_EXPR:
18868 op = DW_OP_neg;
18869 goto do_unop;
18870
18871 do_unop:
18872 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18873 if (list_ret == 0)
18874 return 0;
18875
18876 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18877 break;
18878
18879 case MIN_EXPR:
18880 case MAX_EXPR:
18881 {
18882 const enum tree_code code =
18883 TREE_CODE (loc) == MIN_EXPR ? GT_EXPR : LT_EXPR;
18884
18885 loc = build3 (COND_EXPR, TREE_TYPE (loc),
18886 build2 (code, integer_type_node,
18887 TREE_OPERAND (loc, 0), TREE_OPERAND (loc, 1)),
18888 TREE_OPERAND (loc, 1), TREE_OPERAND (loc, 0));
18889 }
18890
18891 /* fall through */
18892
18893 case COND_EXPR:
18894 {
18895 dw_loc_descr_ref lhs
18896 = loc_descriptor_from_tree (TREE_OPERAND (loc, 1), 0, context);
18897 dw_loc_list_ref rhs
18898 = loc_list_from_tree_1 (TREE_OPERAND (loc, 2), 0, context);
18899 dw_loc_descr_ref bra_node, jump_node, tmp;
18900
18901 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18902 if (list_ret == 0 || lhs == 0 || rhs == 0)
18903 return 0;
18904
18905 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
18906 add_loc_descr_to_each (list_ret, bra_node);
18907
18908 add_loc_list (&list_ret, rhs);
18909 jump_node = new_loc_descr (DW_OP_skip, 0, 0);
18910 add_loc_descr_to_each (list_ret, jump_node);
18911
18912 add_loc_descr_to_each (list_ret, lhs);
18913 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18914 bra_node->dw_loc_oprnd1.v.val_loc = lhs;
18915
18916 /* ??? Need a node to point the skip at. Use a nop. */
18917 tmp = new_loc_descr (DW_OP_nop, 0, 0);
18918 add_loc_descr_to_each (list_ret, tmp);
18919 jump_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18920 jump_node->dw_loc_oprnd1.v.val_loc = tmp;
18921 }
18922 break;
18923
18924 case FIX_TRUNC_EXPR:
18925 return 0;
18926
18927 default:
18928 /* Leave front-end specific codes as simply unknown. This comes
18929 up, for instance, with the C STMT_EXPR. */
18930 if ((unsigned int) TREE_CODE (loc)
18931 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE)
18932 {
18933 expansion_failed (loc, NULL_RTX,
18934 "language specific tree node");
18935 return 0;
18936 }
18937
18938 /* Otherwise this is a generic code; we should just lists all of
18939 these explicitly. We forgot one. */
18940 if (flag_checking)
18941 gcc_unreachable ();
18942
18943 /* In a release build, we want to degrade gracefully: better to
18944 generate incomplete debugging information than to crash. */
18945 return NULL;
18946 }
18947
18948 if (!ret && !list_ret)
18949 return 0;
18950
18951 if (want_address == 2 && !have_address
18952 && (dwarf_version >= 4 || !dwarf_strict))
18953 {
18954 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
18955 {
18956 expansion_failed (loc, NULL_RTX,
18957 "DWARF address size mismatch");
18958 return 0;
18959 }
18960 if (ret)
18961 add_loc_descr (&ret, new_loc_descr (DW_OP_stack_value, 0, 0));
18962 else
18963 add_loc_descr_to_each (list_ret,
18964 new_loc_descr (DW_OP_stack_value, 0, 0));
18965 have_address = 1;
18966 }
18967 /* Show if we can't fill the request for an address. */
18968 if (want_address && !have_address)
18969 {
18970 expansion_failed (loc, NULL_RTX,
18971 "Want address and only have value");
18972 return 0;
18973 }
18974
18975 gcc_assert (!ret || !list_ret);
18976
18977 /* If we've got an address and don't want one, dereference. */
18978 if (!want_address && have_address)
18979 {
18980 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
18981
18982 if (size > DWARF2_ADDR_SIZE || size == -1)
18983 {
18984 expansion_failed (loc, NULL_RTX,
18985 "DWARF address size mismatch");
18986 return 0;
18987 }
18988 else if (size == DWARF2_ADDR_SIZE)
18989 op = DW_OP_deref;
18990 else
18991 op = DW_OP_deref_size;
18992
18993 if (ret)
18994 add_loc_descr (&ret, new_loc_descr (op, size, 0));
18995 else
18996 add_loc_descr_to_each (list_ret, new_loc_descr (op, size, 0));
18997 }
18998 if (ret)
18999 list_ret = new_loc_list (ret, NULL, 0, NULL, 0, NULL);
19000
19001 return list_ret;
19002 }
19003
19004 /* Likewise, but strip useless DW_OP_nop operations in the resulting
19005 expressions. */
19006
19007 static dw_loc_list_ref
19008 loc_list_from_tree (tree loc, int want_address,
19009 struct loc_descr_context *context)
19010 {
19011 dw_loc_list_ref result = loc_list_from_tree_1 (loc, want_address, context);
19012
19013 for (dw_loc_list_ref loc_cur = result;
19014 loc_cur != NULL; loc_cur = loc_cur->dw_loc_next)
19015 loc_descr_without_nops (loc_cur->expr);
19016 return result;
19017 }
19018
19019 /* Same as above but return only single location expression. */
19020 static dw_loc_descr_ref
19021 loc_descriptor_from_tree (tree loc, int want_address,
19022 struct loc_descr_context *context)
19023 {
19024 dw_loc_list_ref ret = loc_list_from_tree (loc, want_address, context);
19025 if (!ret)
19026 return NULL;
19027 if (ret->dw_loc_next)
19028 {
19029 expansion_failed (loc, NULL_RTX,
19030 "Location list where only loc descriptor needed");
19031 return NULL;
19032 }
19033 return ret->expr;
19034 }
19035
19036 /* Given a pointer to what is assumed to be a FIELD_DECL node, return a
19037 pointer to the declared type for the relevant field variable, or return
19038 `integer_type_node' if the given node turns out to be an
19039 ERROR_MARK node. */
19040
19041 static inline tree
19042 field_type (const_tree decl)
19043 {
19044 tree type;
19045
19046 if (TREE_CODE (decl) == ERROR_MARK)
19047 return integer_type_node;
19048
19049 type = DECL_BIT_FIELD_TYPE (decl);
19050 if (type == NULL_TREE)
19051 type = TREE_TYPE (decl);
19052
19053 return type;
19054 }
19055
19056 /* Given a pointer to a tree node, return the alignment in bits for
19057 it, or else return BITS_PER_WORD if the node actually turns out to
19058 be an ERROR_MARK node. */
19059
19060 static inline unsigned
19061 simple_type_align_in_bits (const_tree type)
19062 {
19063 return (TREE_CODE (type) != ERROR_MARK) ? TYPE_ALIGN (type) : BITS_PER_WORD;
19064 }
19065
19066 static inline unsigned
19067 simple_decl_align_in_bits (const_tree decl)
19068 {
19069 return (TREE_CODE (decl) != ERROR_MARK) ? DECL_ALIGN (decl) : BITS_PER_WORD;
19070 }
19071
19072 /* Return the result of rounding T up to ALIGN. */
19073
19074 static inline offset_int
19075 round_up_to_align (const offset_int &t, unsigned int align)
19076 {
19077 return wi::udiv_trunc (t + align - 1, align) * align;
19078 }
19079
19080 /* Compute the size of TYPE in bytes. If possible, return NULL and store the
19081 size as an integer constant in CST_SIZE. Otherwise, if possible, return a
19082 DWARF expression that computes the size. Return NULL and set CST_SIZE to -1
19083 if we fail to return the size in one of these two forms. */
19084
19085 static dw_loc_descr_ref
19086 type_byte_size (const_tree type, HOST_WIDE_INT *cst_size)
19087 {
19088 tree tree_size;
19089 struct loc_descr_context ctx;
19090
19091 /* Return a constant integer in priority, if possible. */
19092 *cst_size = int_size_in_bytes (type);
19093 if (*cst_size != -1)
19094 return NULL;
19095
19096 ctx.context_type = const_cast<tree> (type);
19097 ctx.base_decl = NULL_TREE;
19098 ctx.dpi = NULL;
19099 ctx.placeholder_arg = false;
19100 ctx.placeholder_seen = false;
19101
19102 type = TYPE_MAIN_VARIANT (type);
19103 tree_size = TYPE_SIZE_UNIT (type);
19104 return ((tree_size != NULL_TREE)
19105 ? loc_descriptor_from_tree (tree_size, 0, &ctx)
19106 : NULL);
19107 }
19108
19109 /* Helper structure for RECORD_TYPE processing. */
19110 struct vlr_context
19111 {
19112 /* Root RECORD_TYPE. It is needed to generate data member location
19113 descriptions in variable-length records (VLR), but also to cope with
19114 variants, which are composed of nested structures multiplexed with
19115 QUAL_UNION_TYPE nodes. Each time such a structure is passed to a
19116 function processing a FIELD_DECL, it is required to be non null. */
19117 tree struct_type;
19118 /* When generating a variant part in a RECORD_TYPE (i.e. a nested
19119 QUAL_UNION_TYPE), this holds an expression that computes the offset for
19120 this variant part as part of the root record (in storage units). For
19121 regular records, it must be NULL_TREE. */
19122 tree variant_part_offset;
19123 };
19124
19125 /* Given a pointer to a FIELD_DECL, compute the byte offset of the lowest
19126 addressed byte of the "containing object" for the given FIELD_DECL. If
19127 possible, return a native constant through CST_OFFSET (in which case NULL is
19128 returned); otherwise return a DWARF expression that computes the offset.
19129
19130 Set *CST_OFFSET to 0 and return NULL if we are unable to determine what
19131 that offset is, either because the argument turns out to be a pointer to an
19132 ERROR_MARK node, or because the offset expression is too complex for us.
19133
19134 CTX is required: see the comment for VLR_CONTEXT. */
19135
19136 static dw_loc_descr_ref
19137 field_byte_offset (const_tree decl, struct vlr_context *ctx,
19138 HOST_WIDE_INT *cst_offset)
19139 {
19140 tree tree_result;
19141 dw_loc_list_ref loc_result;
19142
19143 *cst_offset = 0;
19144
19145 if (TREE_CODE (decl) == ERROR_MARK)
19146 return NULL;
19147 else
19148 gcc_assert (TREE_CODE (decl) == FIELD_DECL);
19149
19150 /* We cannot handle variable bit offsets at the moment, so abort if it's the
19151 case. */
19152 if (TREE_CODE (DECL_FIELD_BIT_OFFSET (decl)) != INTEGER_CST)
19153 return NULL;
19154
19155 /* We used to handle only constant offsets in all cases. Now, we handle
19156 properly dynamic byte offsets only when PCC bitfield type doesn't
19157 matter. */
19158 if (PCC_BITFIELD_TYPE_MATTERS
19159 && TREE_CODE (DECL_FIELD_OFFSET (decl)) == INTEGER_CST)
19160 {
19161 offset_int object_offset_in_bits;
19162 offset_int object_offset_in_bytes;
19163 offset_int bitpos_int;
19164 tree type;
19165 tree field_size_tree;
19166 offset_int deepest_bitpos;
19167 offset_int field_size_in_bits;
19168 unsigned int type_align_in_bits;
19169 unsigned int decl_align_in_bits;
19170 offset_int type_size_in_bits;
19171
19172 bitpos_int = wi::to_offset (bit_position (decl));
19173 type = field_type (decl);
19174 type_size_in_bits = offset_int_type_size_in_bits (type);
19175 type_align_in_bits = simple_type_align_in_bits (type);
19176
19177 field_size_tree = DECL_SIZE (decl);
19178
19179 /* The size could be unspecified if there was an error, or for
19180 a flexible array member. */
19181 if (!field_size_tree)
19182 field_size_tree = bitsize_zero_node;
19183
19184 /* If the size of the field is not constant, use the type size. */
19185 if (TREE_CODE (field_size_tree) == INTEGER_CST)
19186 field_size_in_bits = wi::to_offset (field_size_tree);
19187 else
19188 field_size_in_bits = type_size_in_bits;
19189
19190 decl_align_in_bits = simple_decl_align_in_bits (decl);
19191
19192 /* The GCC front-end doesn't make any attempt to keep track of the
19193 starting bit offset (relative to the start of the containing
19194 structure type) of the hypothetical "containing object" for a
19195 bit-field. Thus, when computing the byte offset value for the
19196 start of the "containing object" of a bit-field, we must deduce
19197 this information on our own. This can be rather tricky to do in
19198 some cases. For example, handling the following structure type
19199 definition when compiling for an i386/i486 target (which only
19200 aligns long long's to 32-bit boundaries) can be very tricky:
19201
19202 struct S { int field1; long long field2:31; };
19203
19204 Fortunately, there is a simple rule-of-thumb which can be used
19205 in such cases. When compiling for an i386/i486, GCC will
19206 allocate 8 bytes for the structure shown above. It decides to
19207 do this based upon one simple rule for bit-field allocation.
19208 GCC allocates each "containing object" for each bit-field at
19209 the first (i.e. lowest addressed) legitimate alignment boundary
19210 (based upon the required minimum alignment for the declared
19211 type of the field) which it can possibly use, subject to the
19212 condition that there is still enough available space remaining
19213 in the containing object (when allocated at the selected point)
19214 to fully accommodate all of the bits of the bit-field itself.
19215
19216 This simple rule makes it obvious why GCC allocates 8 bytes for
19217 each object of the structure type shown above. When looking
19218 for a place to allocate the "containing object" for `field2',
19219 the compiler simply tries to allocate a 64-bit "containing
19220 object" at each successive 32-bit boundary (starting at zero)
19221 until it finds a place to allocate that 64- bit field such that
19222 at least 31 contiguous (and previously unallocated) bits remain
19223 within that selected 64 bit field. (As it turns out, for the
19224 example above, the compiler finds it is OK to allocate the
19225 "containing object" 64-bit field at bit-offset zero within the
19226 structure type.)
19227
19228 Here we attempt to work backwards from the limited set of facts
19229 we're given, and we try to deduce from those facts, where GCC
19230 must have believed that the containing object started (within
19231 the structure type). The value we deduce is then used (by the
19232 callers of this routine) to generate DW_AT_location and
19233 DW_AT_bit_offset attributes for fields (both bit-fields and, in
19234 the case of DW_AT_location, regular fields as well). */
19235
19236 /* Figure out the bit-distance from the start of the structure to
19237 the "deepest" bit of the bit-field. */
19238 deepest_bitpos = bitpos_int + field_size_in_bits;
19239
19240 /* This is the tricky part. Use some fancy footwork to deduce
19241 where the lowest addressed bit of the containing object must
19242 be. */
19243 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19244
19245 /* Round up to type_align by default. This works best for
19246 bitfields. */
19247 object_offset_in_bits
19248 = round_up_to_align (object_offset_in_bits, type_align_in_bits);
19249
19250 if (wi::gtu_p (object_offset_in_bits, bitpos_int))
19251 {
19252 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19253
19254 /* Round up to decl_align instead. */
19255 object_offset_in_bits
19256 = round_up_to_align (object_offset_in_bits, decl_align_in_bits);
19257 }
19258
19259 object_offset_in_bytes
19260 = wi::lrshift (object_offset_in_bits, LOG2_BITS_PER_UNIT);
19261 if (ctx->variant_part_offset == NULL_TREE)
19262 {
19263 *cst_offset = object_offset_in_bytes.to_shwi ();
19264 return NULL;
19265 }
19266 tree_result = wide_int_to_tree (sizetype, object_offset_in_bytes);
19267 }
19268 else
19269 tree_result = byte_position (decl);
19270
19271 if (ctx->variant_part_offset != NULL_TREE)
19272 tree_result = fold_build2 (PLUS_EXPR, TREE_TYPE (tree_result),
19273 ctx->variant_part_offset, tree_result);
19274
19275 /* If the byte offset is a constant, it's simplier to handle a native
19276 constant rather than a DWARF expression. */
19277 if (TREE_CODE (tree_result) == INTEGER_CST)
19278 {
19279 *cst_offset = wi::to_offset (tree_result).to_shwi ();
19280 return NULL;
19281 }
19282 struct loc_descr_context loc_ctx = {
19283 ctx->struct_type, /* context_type */
19284 NULL_TREE, /* base_decl */
19285 NULL, /* dpi */
19286 false, /* placeholder_arg */
19287 false /* placeholder_seen */
19288 };
19289 loc_result = loc_list_from_tree (tree_result, 0, &loc_ctx);
19290
19291 /* We want a DWARF expression: abort if we only have a location list with
19292 multiple elements. */
19293 if (!loc_result || !single_element_loc_list_p (loc_result))
19294 return NULL;
19295 else
19296 return loc_result->expr;
19297 }
19298 \f
19299 /* The following routines define various Dwarf attributes and any data
19300 associated with them. */
19301
19302 /* Add a location description attribute value to a DIE.
19303
19304 This emits location attributes suitable for whole variables and
19305 whole parameters. Note that the location attributes for struct fields are
19306 generated by the routine `data_member_location_attribute' below. */
19307
19308 static inline void
19309 add_AT_location_description (dw_die_ref die, enum dwarf_attribute attr_kind,
19310 dw_loc_list_ref descr)
19311 {
19312 bool check_no_locviews = true;
19313 if (descr == 0)
19314 return;
19315 if (single_element_loc_list_p (descr))
19316 add_AT_loc (die, attr_kind, descr->expr);
19317 else
19318 {
19319 add_AT_loc_list (die, attr_kind, descr);
19320 gcc_assert (descr->ll_symbol);
19321 if (attr_kind == DW_AT_location && descr->vl_symbol
19322 && dwarf2out_locviews_in_attribute ())
19323 {
19324 add_AT_view_list (die, DW_AT_GNU_locviews);
19325 check_no_locviews = false;
19326 }
19327 }
19328
19329 if (check_no_locviews)
19330 gcc_assert (!get_AT (die, DW_AT_GNU_locviews));
19331 }
19332
19333 /* Add DW_AT_accessibility attribute to DIE if needed. */
19334
19335 static void
19336 add_accessibility_attribute (dw_die_ref die, tree decl)
19337 {
19338 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
19339 children, otherwise the default is DW_ACCESS_public. In DWARF2
19340 the default has always been DW_ACCESS_public. */
19341 if (TREE_PROTECTED (decl))
19342 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
19343 else if (TREE_PRIVATE (decl))
19344 {
19345 if (dwarf_version == 2
19346 || die->die_parent == NULL
19347 || die->die_parent->die_tag != DW_TAG_class_type)
19348 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
19349 }
19350 else if (dwarf_version > 2
19351 && die->die_parent
19352 && die->die_parent->die_tag == DW_TAG_class_type)
19353 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
19354 }
19355
19356 /* Attach the specialized form of location attribute used for data members of
19357 struct and union types. In the special case of a FIELD_DECL node which
19358 represents a bit-field, the "offset" part of this special location
19359 descriptor must indicate the distance in bytes from the lowest-addressed
19360 byte of the containing struct or union type to the lowest-addressed byte of
19361 the "containing object" for the bit-field. (See the `field_byte_offset'
19362 function above).
19363
19364 For any given bit-field, the "containing object" is a hypothetical object
19365 (of some integral or enum type) within which the given bit-field lives. The
19366 type of this hypothetical "containing object" is always the same as the
19367 declared type of the individual bit-field itself (for GCC anyway... the
19368 DWARF spec doesn't actually mandate this). Note that it is the size (in
19369 bytes) of the hypothetical "containing object" which will be given in the
19370 DW_AT_byte_size attribute for this bit-field. (See the
19371 `byte_size_attribute' function below.) It is also used when calculating the
19372 value of the DW_AT_bit_offset attribute. (See the `bit_offset_attribute'
19373 function below.)
19374
19375 CTX is required: see the comment for VLR_CONTEXT. */
19376
19377 static void
19378 add_data_member_location_attribute (dw_die_ref die,
19379 tree decl,
19380 struct vlr_context *ctx)
19381 {
19382 HOST_WIDE_INT offset;
19383 dw_loc_descr_ref loc_descr = 0;
19384
19385 if (TREE_CODE (decl) == TREE_BINFO)
19386 {
19387 /* We're working on the TAG_inheritance for a base class. */
19388 if (BINFO_VIRTUAL_P (decl) && is_cxx ())
19389 {
19390 /* For C++ virtual bases we can't just use BINFO_OFFSET, as they
19391 aren't at a fixed offset from all (sub)objects of the same
19392 type. We need to extract the appropriate offset from our
19393 vtable. The following dwarf expression means
19394
19395 BaseAddr = ObAddr + *((*ObAddr) - Offset)
19396
19397 This is specific to the V3 ABI, of course. */
19398
19399 dw_loc_descr_ref tmp;
19400
19401 /* Make a copy of the object address. */
19402 tmp = new_loc_descr (DW_OP_dup, 0, 0);
19403 add_loc_descr (&loc_descr, tmp);
19404
19405 /* Extract the vtable address. */
19406 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19407 add_loc_descr (&loc_descr, tmp);
19408
19409 /* Calculate the address of the offset. */
19410 offset = tree_to_shwi (BINFO_VPTR_FIELD (decl));
19411 gcc_assert (offset < 0);
19412
19413 tmp = int_loc_descriptor (-offset);
19414 add_loc_descr (&loc_descr, tmp);
19415 tmp = new_loc_descr (DW_OP_minus, 0, 0);
19416 add_loc_descr (&loc_descr, tmp);
19417
19418 /* Extract the offset. */
19419 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19420 add_loc_descr (&loc_descr, tmp);
19421
19422 /* Add it to the object address. */
19423 tmp = new_loc_descr (DW_OP_plus, 0, 0);
19424 add_loc_descr (&loc_descr, tmp);
19425 }
19426 else
19427 offset = tree_to_shwi (BINFO_OFFSET (decl));
19428 }
19429 else
19430 {
19431 loc_descr = field_byte_offset (decl, ctx, &offset);
19432
19433 /* If loc_descr is available then we know the field offset is dynamic.
19434 However, GDB does not handle dynamic field offsets very well at the
19435 moment. */
19436 if (loc_descr != NULL && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
19437 {
19438 loc_descr = NULL;
19439 offset = 0;
19440 }
19441
19442 /* Data member location evalutation starts with the base address on the
19443 stack. Compute the field offset and add it to this base address. */
19444 else if (loc_descr != NULL)
19445 add_loc_descr (&loc_descr, new_loc_descr (DW_OP_plus, 0, 0));
19446 }
19447
19448 if (! loc_descr)
19449 {
19450 /* While DW_AT_data_bit_offset has been added already in DWARF4,
19451 e.g. GDB only added support to it in November 2016. For DWARF5
19452 we need newer debug info consumers anyway. We might change this
19453 to dwarf_version >= 4 once most consumers catched up. */
19454 if (dwarf_version >= 5
19455 && TREE_CODE (decl) == FIELD_DECL
19456 && DECL_BIT_FIELD_TYPE (decl))
19457 {
19458 tree off = bit_position (decl);
19459 if (tree_fits_uhwi_p (off) && get_AT (die, DW_AT_bit_size))
19460 {
19461 remove_AT (die, DW_AT_byte_size);
19462 remove_AT (die, DW_AT_bit_offset);
19463 add_AT_unsigned (die, DW_AT_data_bit_offset, tree_to_uhwi (off));
19464 return;
19465 }
19466 }
19467 if (dwarf_version > 2)
19468 {
19469 /* Don't need to output a location expression, just the constant. */
19470 if (offset < 0)
19471 add_AT_int (die, DW_AT_data_member_location, offset);
19472 else
19473 add_AT_unsigned (die, DW_AT_data_member_location, offset);
19474 return;
19475 }
19476 else
19477 {
19478 enum dwarf_location_atom op;
19479
19480 /* The DWARF2 standard says that we should assume that the structure
19481 address is already on the stack, so we can specify a structure
19482 field address by using DW_OP_plus_uconst. */
19483 op = DW_OP_plus_uconst;
19484 loc_descr = new_loc_descr (op, offset, 0);
19485 }
19486 }
19487
19488 add_AT_loc (die, DW_AT_data_member_location, loc_descr);
19489 }
19490
19491 /* Writes integer values to dw_vec_const array. */
19492
19493 static void
19494 insert_int (HOST_WIDE_INT val, unsigned int size, unsigned char *dest)
19495 {
19496 while (size != 0)
19497 {
19498 *dest++ = val & 0xff;
19499 val >>= 8;
19500 --size;
19501 }
19502 }
19503
19504 /* Reads integers from dw_vec_const array. Inverse of insert_int. */
19505
19506 static HOST_WIDE_INT
19507 extract_int (const unsigned char *src, unsigned int size)
19508 {
19509 HOST_WIDE_INT val = 0;
19510
19511 src += size;
19512 while (size != 0)
19513 {
19514 val <<= 8;
19515 val |= *--src & 0xff;
19516 --size;
19517 }
19518 return val;
19519 }
19520
19521 /* Writes wide_int values to dw_vec_const array. */
19522
19523 static void
19524 insert_wide_int (const wide_int &val, unsigned char *dest, int elt_size)
19525 {
19526 int i;
19527
19528 if (elt_size <= HOST_BITS_PER_WIDE_INT/BITS_PER_UNIT)
19529 {
19530 insert_int ((HOST_WIDE_INT) val.elt (0), elt_size, dest);
19531 return;
19532 }
19533
19534 /* We'd have to extend this code to support odd sizes. */
19535 gcc_assert (elt_size % (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT) == 0);
19536
19537 int n = elt_size / (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
19538
19539 if (WORDS_BIG_ENDIAN)
19540 for (i = n - 1; i >= 0; i--)
19541 {
19542 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19543 dest += sizeof (HOST_WIDE_INT);
19544 }
19545 else
19546 for (i = 0; i < n; i++)
19547 {
19548 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19549 dest += sizeof (HOST_WIDE_INT);
19550 }
19551 }
19552
19553 /* Writes floating point values to dw_vec_const array. */
19554
19555 static void
19556 insert_float (const_rtx rtl, unsigned char *array)
19557 {
19558 long val[4];
19559 int i;
19560 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19561
19562 real_to_target (val, CONST_DOUBLE_REAL_VALUE (rtl), mode);
19563
19564 /* real_to_target puts 32-bit pieces in each long. Pack them. */
19565 for (i = 0; i < GET_MODE_SIZE (mode) / 4; i++)
19566 {
19567 insert_int (val[i], 4, array);
19568 array += 4;
19569 }
19570 }
19571
19572 /* Attach a DW_AT_const_value attribute for a variable or a parameter which
19573 does not have a "location" either in memory or in a register. These
19574 things can arise in GNU C when a constant is passed as an actual parameter
19575 to an inlined function. They can also arise in C++ where declared
19576 constants do not necessarily get memory "homes". */
19577
19578 static bool
19579 add_const_value_attribute (dw_die_ref die, rtx rtl)
19580 {
19581 switch (GET_CODE (rtl))
19582 {
19583 case CONST_INT:
19584 {
19585 HOST_WIDE_INT val = INTVAL (rtl);
19586
19587 if (val < 0)
19588 add_AT_int (die, DW_AT_const_value, val);
19589 else
19590 add_AT_unsigned (die, DW_AT_const_value, (unsigned HOST_WIDE_INT) val);
19591 }
19592 return true;
19593
19594 case CONST_WIDE_INT:
19595 {
19596 wide_int w1 = rtx_mode_t (rtl, MAX_MODE_INT);
19597 unsigned int prec = MIN (wi::min_precision (w1, UNSIGNED),
19598 (unsigned int)CONST_WIDE_INT_NUNITS (rtl) * HOST_BITS_PER_WIDE_INT);
19599 wide_int w = wi::zext (w1, prec);
19600 add_AT_wide (die, DW_AT_const_value, w);
19601 }
19602 return true;
19603
19604 case CONST_DOUBLE:
19605 /* Note that a CONST_DOUBLE rtx could represent either an integer or a
19606 floating-point constant. A CONST_DOUBLE is used whenever the
19607 constant requires more than one word in order to be adequately
19608 represented. */
19609 if (TARGET_SUPPORTS_WIDE_INT == 0
19610 && !SCALAR_FLOAT_MODE_P (GET_MODE (rtl)))
19611 add_AT_double (die, DW_AT_const_value,
19612 CONST_DOUBLE_HIGH (rtl), CONST_DOUBLE_LOW (rtl));
19613 else
19614 {
19615 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19616 unsigned int length = GET_MODE_SIZE (mode);
19617 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
19618
19619 insert_float (rtl, array);
19620 add_AT_vec (die, DW_AT_const_value, length / 4, 4, array);
19621 }
19622 return true;
19623
19624 case CONST_VECTOR:
19625 {
19626 unsigned int length;
19627 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
19628 return false;
19629
19630 machine_mode mode = GET_MODE (rtl);
19631 /* The combination of a length and byte elt_size doesn't extend
19632 naturally to boolean vectors, where several elements are packed
19633 into the same byte. */
19634 if (GET_MODE_CLASS (mode) == MODE_VECTOR_BOOL)
19635 return false;
19636
19637 unsigned int elt_size = GET_MODE_UNIT_SIZE (mode);
19638 unsigned char *array
19639 = ggc_vec_alloc<unsigned char> (length * elt_size);
19640 unsigned int i;
19641 unsigned char *p;
19642 machine_mode imode = GET_MODE_INNER (mode);
19643
19644 switch (GET_MODE_CLASS (mode))
19645 {
19646 case MODE_VECTOR_INT:
19647 for (i = 0, p = array; i < length; i++, p += elt_size)
19648 {
19649 rtx elt = CONST_VECTOR_ELT (rtl, i);
19650 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
19651 }
19652 break;
19653
19654 case MODE_VECTOR_FLOAT:
19655 for (i = 0, p = array; i < length; i++, p += elt_size)
19656 {
19657 rtx elt = CONST_VECTOR_ELT (rtl, i);
19658 insert_float (elt, p);
19659 }
19660 break;
19661
19662 default:
19663 gcc_unreachable ();
19664 }
19665
19666 add_AT_vec (die, DW_AT_const_value, length, elt_size, array);
19667 }
19668 return true;
19669
19670 case CONST_STRING:
19671 if (dwarf_version >= 4 || !dwarf_strict)
19672 {
19673 dw_loc_descr_ref loc_result;
19674 resolve_one_addr (&rtl);
19675 rtl_addr:
19676 loc_result = new_addr_loc_descr (rtl, dtprel_false);
19677 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
19678 add_AT_loc (die, DW_AT_location, loc_result);
19679 vec_safe_push (used_rtx_array, rtl);
19680 return true;
19681 }
19682 return false;
19683
19684 case CONST:
19685 if (CONSTANT_P (XEXP (rtl, 0)))
19686 return add_const_value_attribute (die, XEXP (rtl, 0));
19687 /* FALLTHROUGH */
19688 case SYMBOL_REF:
19689 if (!const_ok_for_output (rtl))
19690 return false;
19691 /* FALLTHROUGH */
19692 case LABEL_REF:
19693 if (dwarf_version >= 4 || !dwarf_strict)
19694 goto rtl_addr;
19695 return false;
19696
19697 case PLUS:
19698 /* In cases where an inlined instance of an inline function is passed
19699 the address of an `auto' variable (which is local to the caller) we
19700 can get a situation where the DECL_RTL of the artificial local
19701 variable (for the inlining) which acts as a stand-in for the
19702 corresponding formal parameter (of the inline function) will look
19703 like (plus:SI (reg:SI FRAME_PTR) (const_int ...)). This is not
19704 exactly a compile-time constant expression, but it isn't the address
19705 of the (artificial) local variable either. Rather, it represents the
19706 *value* which the artificial local variable always has during its
19707 lifetime. We currently have no way to represent such quasi-constant
19708 values in Dwarf, so for now we just punt and generate nothing. */
19709 return false;
19710
19711 case HIGH:
19712 case CONST_FIXED:
19713 case MINUS:
19714 case SIGN_EXTEND:
19715 case ZERO_EXTEND:
19716 case CONST_POLY_INT:
19717 return false;
19718
19719 case MEM:
19720 if (GET_CODE (XEXP (rtl, 0)) == CONST_STRING
19721 && MEM_READONLY_P (rtl)
19722 && GET_MODE (rtl) == BLKmode)
19723 {
19724 add_AT_string (die, DW_AT_const_value, XSTR (XEXP (rtl, 0), 0));
19725 return true;
19726 }
19727 return false;
19728
19729 default:
19730 /* No other kinds of rtx should be possible here. */
19731 gcc_unreachable ();
19732 }
19733 return false;
19734 }
19735
19736 /* Determine whether the evaluation of EXPR references any variables
19737 or functions which aren't otherwise used (and therefore may not be
19738 output). */
19739 static tree
19740 reference_to_unused (tree * tp, int * walk_subtrees,
19741 void * data ATTRIBUTE_UNUSED)
19742 {
19743 if (! EXPR_P (*tp) && ! CONSTANT_CLASS_P (*tp))
19744 *walk_subtrees = 0;
19745
19746 if (DECL_P (*tp) && ! TREE_PUBLIC (*tp) && ! TREE_USED (*tp)
19747 && ! TREE_ASM_WRITTEN (*tp))
19748 return *tp;
19749 /* ??? The C++ FE emits debug information for using decls, so
19750 putting gcc_unreachable here falls over. See PR31899. For now
19751 be conservative. */
19752 else if (!symtab->global_info_ready && VAR_OR_FUNCTION_DECL_P (*tp))
19753 return *tp;
19754 else if (VAR_P (*tp))
19755 {
19756 varpool_node *node = varpool_node::get (*tp);
19757 if (!node || !node->definition)
19758 return *tp;
19759 }
19760 else if (TREE_CODE (*tp) == FUNCTION_DECL
19761 && (!DECL_EXTERNAL (*tp) || DECL_DECLARED_INLINE_P (*tp)))
19762 {
19763 /* The call graph machinery must have finished analyzing,
19764 optimizing and gimplifying the CU by now.
19765 So if *TP has no call graph node associated
19766 to it, it means *TP will not be emitted. */
19767 if (!cgraph_node::get (*tp))
19768 return *tp;
19769 }
19770 else if (TREE_CODE (*tp) == STRING_CST && !TREE_ASM_WRITTEN (*tp))
19771 return *tp;
19772
19773 return NULL_TREE;
19774 }
19775
19776 /* Generate an RTL constant from a decl initializer INIT with decl type TYPE,
19777 for use in a later add_const_value_attribute call. */
19778
19779 static rtx
19780 rtl_for_decl_init (tree init, tree type)
19781 {
19782 rtx rtl = NULL_RTX;
19783
19784 STRIP_NOPS (init);
19785
19786 /* If a variable is initialized with a string constant without embedded
19787 zeros, build CONST_STRING. */
19788 if (TREE_CODE (init) == STRING_CST && TREE_CODE (type) == ARRAY_TYPE)
19789 {
19790 tree enttype = TREE_TYPE (type);
19791 tree domain = TYPE_DOMAIN (type);
19792 scalar_int_mode mode;
19793
19794 if (is_int_mode (TYPE_MODE (enttype), &mode)
19795 && GET_MODE_SIZE (mode) == 1
19796 && domain
19797 && TYPE_MAX_VALUE (domain)
19798 && TREE_CODE (TYPE_MAX_VALUE (domain)) == INTEGER_CST
19799 && integer_zerop (TYPE_MIN_VALUE (domain))
19800 && compare_tree_int (TYPE_MAX_VALUE (domain),
19801 TREE_STRING_LENGTH (init) - 1) == 0
19802 && ((size_t) TREE_STRING_LENGTH (init)
19803 == strlen (TREE_STRING_POINTER (init)) + 1))
19804 {
19805 rtl = gen_rtx_CONST_STRING (VOIDmode,
19806 ggc_strdup (TREE_STRING_POINTER (init)));
19807 rtl = gen_rtx_MEM (BLKmode, rtl);
19808 MEM_READONLY_P (rtl) = 1;
19809 }
19810 }
19811 /* Other aggregates, and complex values, could be represented using
19812 CONCAT: FIXME! */
19813 else if (AGGREGATE_TYPE_P (type)
19814 || (TREE_CODE (init) == VIEW_CONVERT_EXPR
19815 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (init, 0))))
19816 || TREE_CODE (type) == COMPLEX_TYPE)
19817 ;
19818 /* Vectors only work if their mode is supported by the target.
19819 FIXME: generic vectors ought to work too. */
19820 else if (TREE_CODE (type) == VECTOR_TYPE
19821 && !VECTOR_MODE_P (TYPE_MODE (type)))
19822 ;
19823 /* If the initializer is something that we know will expand into an
19824 immediate RTL constant, expand it now. We must be careful not to
19825 reference variables which won't be output. */
19826 else if (initializer_constant_valid_p (init, type)
19827 && ! walk_tree (&init, reference_to_unused, NULL, NULL))
19828 {
19829 /* Convert vector CONSTRUCTOR initializers to VECTOR_CST if
19830 possible. */
19831 if (TREE_CODE (type) == VECTOR_TYPE)
19832 switch (TREE_CODE (init))
19833 {
19834 case VECTOR_CST:
19835 break;
19836 case CONSTRUCTOR:
19837 if (TREE_CONSTANT (init))
19838 {
19839 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (init);
19840 bool constant_p = true;
19841 tree value;
19842 unsigned HOST_WIDE_INT ix;
19843
19844 /* Even when ctor is constant, it might contain non-*_CST
19845 elements (e.g. { 1.0/0.0 - 1.0/0.0, 0.0 }) and those don't
19846 belong into VECTOR_CST nodes. */
19847 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
19848 if (!CONSTANT_CLASS_P (value))
19849 {
19850 constant_p = false;
19851 break;
19852 }
19853
19854 if (constant_p)
19855 {
19856 init = build_vector_from_ctor (type, elts);
19857 break;
19858 }
19859 }
19860 /* FALLTHRU */
19861
19862 default:
19863 return NULL;
19864 }
19865
19866 rtl = expand_expr (init, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
19867
19868 /* If expand_expr returns a MEM, it wasn't immediate. */
19869 gcc_assert (!rtl || !MEM_P (rtl));
19870 }
19871
19872 return rtl;
19873 }
19874
19875 /* Generate RTL for the variable DECL to represent its location. */
19876
19877 static rtx
19878 rtl_for_decl_location (tree decl)
19879 {
19880 rtx rtl;
19881
19882 /* Here we have to decide where we are going to say the parameter "lives"
19883 (as far as the debugger is concerned). We only have a couple of
19884 choices. GCC provides us with DECL_RTL and with DECL_INCOMING_RTL.
19885
19886 DECL_RTL normally indicates where the parameter lives during most of the
19887 activation of the function. If optimization is enabled however, this
19888 could be either NULL or else a pseudo-reg. Both of those cases indicate
19889 that the parameter doesn't really live anywhere (as far as the code
19890 generation parts of GCC are concerned) during most of the function's
19891 activation. That will happen (for example) if the parameter is never
19892 referenced within the function.
19893
19894 We could just generate a location descriptor here for all non-NULL
19895 non-pseudo values of DECL_RTL and ignore all of the rest, but we can be
19896 a little nicer than that if we also consider DECL_INCOMING_RTL in cases
19897 where DECL_RTL is NULL or is a pseudo-reg.
19898
19899 Note however that we can only get away with using DECL_INCOMING_RTL as
19900 a backup substitute for DECL_RTL in certain limited cases. In cases
19901 where DECL_ARG_TYPE (decl) indicates the same type as TREE_TYPE (decl),
19902 we can be sure that the parameter was passed using the same type as it is
19903 declared to have within the function, and that its DECL_INCOMING_RTL
19904 points us to a place where a value of that type is passed.
19905
19906 In cases where DECL_ARG_TYPE (decl) and TREE_TYPE (decl) are different,
19907 we cannot (in general) use DECL_INCOMING_RTL as a substitute for DECL_RTL
19908 because in these cases DECL_INCOMING_RTL points us to a value of some
19909 type which is *different* from the type of the parameter itself. Thus,
19910 if we tried to use DECL_INCOMING_RTL to generate a location attribute in
19911 such cases, the debugger would end up (for example) trying to fetch a
19912 `float' from a place which actually contains the first part of a
19913 `double'. That would lead to really incorrect and confusing
19914 output at debug-time.
19915
19916 So, in general, we *do not* use DECL_INCOMING_RTL as a backup for DECL_RTL
19917 in cases where DECL_ARG_TYPE (decl) != TREE_TYPE (decl). There
19918 are a couple of exceptions however. On little-endian machines we can
19919 get away with using DECL_INCOMING_RTL even when DECL_ARG_TYPE (decl) is
19920 not the same as TREE_TYPE (decl), but only when DECL_ARG_TYPE (decl) is
19921 an integral type that is smaller than TREE_TYPE (decl). These cases arise
19922 when (on a little-endian machine) a non-prototyped function has a
19923 parameter declared to be of type `short' or `char'. In such cases,
19924 TREE_TYPE (decl) will be `short' or `char', DECL_ARG_TYPE (decl) will
19925 be `int', and DECL_INCOMING_RTL will point to the lowest-order byte of the
19926 passed `int' value. If the debugger then uses that address to fetch
19927 a `short' or a `char' (on a little-endian machine) the result will be
19928 the correct data, so we allow for such exceptional cases below.
19929
19930 Note that our goal here is to describe the place where the given formal
19931 parameter lives during most of the function's activation (i.e. between the
19932 end of the prologue and the start of the epilogue). We'll do that as best
19933 as we can. Note however that if the given formal parameter is modified
19934 sometime during the execution of the function, then a stack backtrace (at
19935 debug-time) will show the function as having been called with the *new*
19936 value rather than the value which was originally passed in. This happens
19937 rarely enough that it is not a major problem, but it *is* a problem, and
19938 I'd like to fix it.
19939
19940 A future version of dwarf2out.c may generate two additional attributes for
19941 any given DW_TAG_formal_parameter DIE which will describe the "passed
19942 type" and the "passed location" for the given formal parameter in addition
19943 to the attributes we now generate to indicate the "declared type" and the
19944 "active location" for each parameter. This additional set of attributes
19945 could be used by debuggers for stack backtraces. Separately, note that
19946 sometimes DECL_RTL can be NULL and DECL_INCOMING_RTL can be NULL also.
19947 This happens (for example) for inlined-instances of inline function formal
19948 parameters which are never referenced. This really shouldn't be
19949 happening. All PARM_DECL nodes should get valid non-NULL
19950 DECL_INCOMING_RTL values. FIXME. */
19951
19952 /* Use DECL_RTL as the "location" unless we find something better. */
19953 rtl = DECL_RTL_IF_SET (decl);
19954
19955 /* When generating abstract instances, ignore everything except
19956 constants, symbols living in memory, and symbols living in
19957 fixed registers. */
19958 if (! reload_completed)
19959 {
19960 if (rtl
19961 && (CONSTANT_P (rtl)
19962 || (MEM_P (rtl)
19963 && CONSTANT_P (XEXP (rtl, 0)))
19964 || (REG_P (rtl)
19965 && VAR_P (decl)
19966 && TREE_STATIC (decl))))
19967 {
19968 rtl = targetm.delegitimize_address (rtl);
19969 return rtl;
19970 }
19971 rtl = NULL_RTX;
19972 }
19973 else if (TREE_CODE (decl) == PARM_DECL)
19974 {
19975 if (rtl == NULL_RTX
19976 || is_pseudo_reg (rtl)
19977 || (MEM_P (rtl)
19978 && is_pseudo_reg (XEXP (rtl, 0))
19979 && DECL_INCOMING_RTL (decl)
19980 && MEM_P (DECL_INCOMING_RTL (decl))
19981 && GET_MODE (rtl) == GET_MODE (DECL_INCOMING_RTL (decl))))
19982 {
19983 tree declared_type = TREE_TYPE (decl);
19984 tree passed_type = DECL_ARG_TYPE (decl);
19985 machine_mode dmode = TYPE_MODE (declared_type);
19986 machine_mode pmode = TYPE_MODE (passed_type);
19987
19988 /* This decl represents a formal parameter which was optimized out.
19989 Note that DECL_INCOMING_RTL may be NULL in here, but we handle
19990 all cases where (rtl == NULL_RTX) just below. */
19991 if (dmode == pmode)
19992 rtl = DECL_INCOMING_RTL (decl);
19993 else if ((rtl == NULL_RTX || is_pseudo_reg (rtl))
19994 && SCALAR_INT_MODE_P (dmode)
19995 && known_le (GET_MODE_SIZE (dmode), GET_MODE_SIZE (pmode))
19996 && DECL_INCOMING_RTL (decl))
19997 {
19998 rtx inc = DECL_INCOMING_RTL (decl);
19999 if (REG_P (inc))
20000 rtl = inc;
20001 else if (MEM_P (inc))
20002 {
20003 if (BYTES_BIG_ENDIAN)
20004 rtl = adjust_address_nv (inc, dmode,
20005 GET_MODE_SIZE (pmode)
20006 - GET_MODE_SIZE (dmode));
20007 else
20008 rtl = inc;
20009 }
20010 }
20011 }
20012
20013 /* If the parm was passed in registers, but lives on the stack, then
20014 make a big endian correction if the mode of the type of the
20015 parameter is not the same as the mode of the rtl. */
20016 /* ??? This is the same series of checks that are made in dbxout.c before
20017 we reach the big endian correction code there. It isn't clear if all
20018 of these checks are necessary here, but keeping them all is the safe
20019 thing to do. */
20020 else if (MEM_P (rtl)
20021 && XEXP (rtl, 0) != const0_rtx
20022 && ! CONSTANT_P (XEXP (rtl, 0))
20023 /* Not passed in memory. */
20024 && !MEM_P (DECL_INCOMING_RTL (decl))
20025 /* Not passed by invisible reference. */
20026 && (!REG_P (XEXP (rtl, 0))
20027 || REGNO (XEXP (rtl, 0)) == HARD_FRAME_POINTER_REGNUM
20028 || REGNO (XEXP (rtl, 0)) == STACK_POINTER_REGNUM
20029 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
20030 || REGNO (XEXP (rtl, 0)) == ARG_POINTER_REGNUM
20031 #endif
20032 )
20033 /* Big endian correction check. */
20034 && BYTES_BIG_ENDIAN
20035 && TYPE_MODE (TREE_TYPE (decl)) != GET_MODE (rtl)
20036 && known_lt (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))),
20037 UNITS_PER_WORD))
20038 {
20039 machine_mode addr_mode = get_address_mode (rtl);
20040 poly_int64 offset = (UNITS_PER_WORD
20041 - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))));
20042
20043 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
20044 plus_constant (addr_mode, XEXP (rtl, 0), offset));
20045 }
20046 }
20047 else if (VAR_P (decl)
20048 && rtl
20049 && MEM_P (rtl)
20050 && GET_MODE (rtl) != TYPE_MODE (TREE_TYPE (decl)))
20051 {
20052 machine_mode addr_mode = get_address_mode (rtl);
20053 poly_int64 offset = byte_lowpart_offset (TYPE_MODE (TREE_TYPE (decl)),
20054 GET_MODE (rtl));
20055
20056 /* If a variable is declared "register" yet is smaller than
20057 a register, then if we store the variable to memory, it
20058 looks like we're storing a register-sized value, when in
20059 fact we are not. We need to adjust the offset of the
20060 storage location to reflect the actual value's bytes,
20061 else gdb will not be able to display it. */
20062 if (maybe_ne (offset, 0))
20063 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
20064 plus_constant (addr_mode, XEXP (rtl, 0), offset));
20065 }
20066
20067 /* A variable with no DECL_RTL but a DECL_INITIAL is a compile-time constant,
20068 and will have been substituted directly into all expressions that use it.
20069 C does not have such a concept, but C++ and other languages do. */
20070 if (!rtl && VAR_P (decl) && DECL_INITIAL (decl))
20071 rtl = rtl_for_decl_init (DECL_INITIAL (decl), TREE_TYPE (decl));
20072
20073 if (rtl)
20074 rtl = targetm.delegitimize_address (rtl);
20075
20076 /* If we don't look past the constant pool, we risk emitting a
20077 reference to a constant pool entry that isn't referenced from
20078 code, and thus is not emitted. */
20079 if (rtl)
20080 rtl = avoid_constant_pool_reference (rtl);
20081
20082 /* Try harder to get a rtl. If this symbol ends up not being emitted
20083 in the current CU, resolve_addr will remove the expression referencing
20084 it. */
20085 if (rtl == NULL_RTX
20086 && !(early_dwarf && (flag_generate_lto || flag_generate_offload))
20087 && VAR_P (decl)
20088 && !DECL_EXTERNAL (decl)
20089 && TREE_STATIC (decl)
20090 && DECL_NAME (decl)
20091 && !DECL_HARD_REGISTER (decl)
20092 && DECL_MODE (decl) != VOIDmode)
20093 {
20094 rtl = make_decl_rtl_for_debug (decl);
20095 if (!MEM_P (rtl)
20096 || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF
20097 || SYMBOL_REF_DECL (XEXP (rtl, 0)) != decl)
20098 rtl = NULL_RTX;
20099 }
20100
20101 return rtl;
20102 }
20103
20104 /* Check whether decl is a Fortran COMMON symbol. If not, NULL_TREE is
20105 returned. If so, the decl for the COMMON block is returned, and the
20106 value is the offset into the common block for the symbol. */
20107
20108 static tree
20109 fortran_common (tree decl, HOST_WIDE_INT *value)
20110 {
20111 tree val_expr, cvar;
20112 machine_mode mode;
20113 poly_int64 bitsize, bitpos;
20114 tree offset;
20115 HOST_WIDE_INT cbitpos;
20116 int unsignedp, reversep, volatilep = 0;
20117
20118 /* If the decl isn't a VAR_DECL, or if it isn't static, or if
20119 it does not have a value (the offset into the common area), or if it
20120 is thread local (as opposed to global) then it isn't common, and shouldn't
20121 be handled as such. */
20122 if (!VAR_P (decl)
20123 || !TREE_STATIC (decl)
20124 || !DECL_HAS_VALUE_EXPR_P (decl)
20125 || !is_fortran ())
20126 return NULL_TREE;
20127
20128 val_expr = DECL_VALUE_EXPR (decl);
20129 if (TREE_CODE (val_expr) != COMPONENT_REF)
20130 return NULL_TREE;
20131
20132 cvar = get_inner_reference (val_expr, &bitsize, &bitpos, &offset, &mode,
20133 &unsignedp, &reversep, &volatilep);
20134
20135 if (cvar == NULL_TREE
20136 || !VAR_P (cvar)
20137 || DECL_ARTIFICIAL (cvar)
20138 || !TREE_PUBLIC (cvar)
20139 /* We don't expect to have to cope with variable offsets,
20140 since at present all static data must have a constant size. */
20141 || !bitpos.is_constant (&cbitpos))
20142 return NULL_TREE;
20143
20144 *value = 0;
20145 if (offset != NULL)
20146 {
20147 if (!tree_fits_shwi_p (offset))
20148 return NULL_TREE;
20149 *value = tree_to_shwi (offset);
20150 }
20151 if (cbitpos != 0)
20152 *value += cbitpos / BITS_PER_UNIT;
20153
20154 return cvar;
20155 }
20156
20157 /* Generate *either* a DW_AT_location attribute or else a DW_AT_const_value
20158 data attribute for a variable or a parameter. We generate the
20159 DW_AT_const_value attribute only in those cases where the given variable
20160 or parameter does not have a true "location" either in memory or in a
20161 register. This can happen (for example) when a constant is passed as an
20162 actual argument in a call to an inline function. (It's possible that
20163 these things can crop up in other ways also.) Note that one type of
20164 constant value which can be passed into an inlined function is a constant
20165 pointer. This can happen for example if an actual argument in an inlined
20166 function call evaluates to a compile-time constant address.
20167
20168 CACHE_P is true if it is worth caching the location list for DECL,
20169 so that future calls can reuse it rather than regenerate it from scratch.
20170 This is true for BLOCK_NONLOCALIZED_VARS in inlined subroutines,
20171 since we will need to refer to them each time the function is inlined. */
20172
20173 static bool
20174 add_location_or_const_value_attribute (dw_die_ref die, tree decl, bool cache_p)
20175 {
20176 rtx rtl;
20177 dw_loc_list_ref list;
20178 var_loc_list *loc_list;
20179 cached_dw_loc_list *cache;
20180
20181 if (early_dwarf)
20182 return false;
20183
20184 if (TREE_CODE (decl) == ERROR_MARK)
20185 return false;
20186
20187 if (get_AT (die, DW_AT_location)
20188 || get_AT (die, DW_AT_const_value))
20189 return true;
20190
20191 gcc_assert (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL
20192 || TREE_CODE (decl) == RESULT_DECL);
20193
20194 /* Try to get some constant RTL for this decl, and use that as the value of
20195 the location. */
20196
20197 rtl = rtl_for_decl_location (decl);
20198 if (rtl && (CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20199 && add_const_value_attribute (die, rtl))
20200 return true;
20201
20202 /* See if we have single element location list that is equivalent to
20203 a constant value. That way we are better to use add_const_value_attribute
20204 rather than expanding constant value equivalent. */
20205 loc_list = lookup_decl_loc (decl);
20206 if (loc_list
20207 && loc_list->first
20208 && loc_list->first->next == NULL
20209 && NOTE_P (loc_list->first->loc)
20210 && NOTE_VAR_LOCATION (loc_list->first->loc)
20211 && NOTE_VAR_LOCATION_LOC (loc_list->first->loc))
20212 {
20213 struct var_loc_node *node;
20214
20215 node = loc_list->first;
20216 rtl = NOTE_VAR_LOCATION_LOC (node->loc);
20217 if (GET_CODE (rtl) == EXPR_LIST)
20218 rtl = XEXP (rtl, 0);
20219 if ((CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20220 && add_const_value_attribute (die, rtl))
20221 return true;
20222 }
20223 /* If this decl is from BLOCK_NONLOCALIZED_VARS, we might need its
20224 list several times. See if we've already cached the contents. */
20225 list = NULL;
20226 if (loc_list == NULL || cached_dw_loc_list_table == NULL)
20227 cache_p = false;
20228 if (cache_p)
20229 {
20230 cache = cached_dw_loc_list_table->find_with_hash (decl, DECL_UID (decl));
20231 if (cache)
20232 list = cache->loc_list;
20233 }
20234 if (list == NULL)
20235 {
20236 list = loc_list_from_tree (decl, decl_by_reference_p (decl) ? 0 : 2,
20237 NULL);
20238 /* It is usually worth caching this result if the decl is from
20239 BLOCK_NONLOCALIZED_VARS and if the list has at least two elements. */
20240 if (cache_p && list && list->dw_loc_next)
20241 {
20242 cached_dw_loc_list **slot
20243 = cached_dw_loc_list_table->find_slot_with_hash (decl,
20244 DECL_UID (decl),
20245 INSERT);
20246 cache = ggc_cleared_alloc<cached_dw_loc_list> ();
20247 cache->decl_id = DECL_UID (decl);
20248 cache->loc_list = list;
20249 *slot = cache;
20250 }
20251 }
20252 if (list)
20253 {
20254 add_AT_location_description (die, DW_AT_location, list);
20255 return true;
20256 }
20257 /* None of that worked, so it must not really have a location;
20258 try adding a constant value attribute from the DECL_INITIAL. */
20259 return tree_add_const_value_attribute_for_decl (die, decl);
20260 }
20261
20262 /* Attach a DW_AT_const_value attribute to DIE. The value of the
20263 attribute is the const value T. */
20264
20265 static bool
20266 tree_add_const_value_attribute (dw_die_ref die, tree t)
20267 {
20268 tree init;
20269 tree type = TREE_TYPE (t);
20270 rtx rtl;
20271
20272 if (!t || !TREE_TYPE (t) || TREE_TYPE (t) == error_mark_node)
20273 return false;
20274
20275 init = t;
20276 gcc_assert (!DECL_P (init));
20277
20278 if (TREE_CODE (init) == INTEGER_CST)
20279 {
20280 if (tree_fits_uhwi_p (init))
20281 {
20282 add_AT_unsigned (die, DW_AT_const_value, tree_to_uhwi (init));
20283 return true;
20284 }
20285 if (tree_fits_shwi_p (init))
20286 {
20287 add_AT_int (die, DW_AT_const_value, tree_to_shwi (init));
20288 return true;
20289 }
20290 }
20291 if (! early_dwarf)
20292 {
20293 rtl = rtl_for_decl_init (init, type);
20294 if (rtl)
20295 return add_const_value_attribute (die, rtl);
20296 }
20297 /* If the host and target are sane, try harder. */
20298 if (CHAR_BIT == 8 && BITS_PER_UNIT == 8
20299 && initializer_constant_valid_p (init, type))
20300 {
20301 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (init));
20302 if (size > 0 && (int) size == size)
20303 {
20304 unsigned char *array = ggc_cleared_vec_alloc<unsigned char> (size);
20305
20306 if (native_encode_initializer (init, array, size) == size)
20307 {
20308 add_AT_vec (die, DW_AT_const_value, size, 1, array);
20309 return true;
20310 }
20311 ggc_free (array);
20312 }
20313 }
20314 return false;
20315 }
20316
20317 /* Attach a DW_AT_const_value attribute to VAR_DIE. The value of the
20318 attribute is the const value of T, where T is an integral constant
20319 variable with static storage duration
20320 (so it can't be a PARM_DECL or a RESULT_DECL). */
20321
20322 static bool
20323 tree_add_const_value_attribute_for_decl (dw_die_ref var_die, tree decl)
20324 {
20325
20326 if (!decl
20327 || (!VAR_P (decl) && TREE_CODE (decl) != CONST_DECL)
20328 || (VAR_P (decl) && !TREE_STATIC (decl)))
20329 return false;
20330
20331 if (TREE_READONLY (decl)
20332 && ! TREE_THIS_VOLATILE (decl)
20333 && DECL_INITIAL (decl))
20334 /* OK */;
20335 else
20336 return false;
20337
20338 /* Don't add DW_AT_const_value if abstract origin already has one. */
20339 if (get_AT (var_die, DW_AT_const_value))
20340 return false;
20341
20342 return tree_add_const_value_attribute (var_die, DECL_INITIAL (decl));
20343 }
20344
20345 /* Convert the CFI instructions for the current function into a
20346 location list. This is used for DW_AT_frame_base when we targeting
20347 a dwarf2 consumer that does not support the dwarf3
20348 DW_OP_call_frame_cfa. OFFSET is a constant to be added to all CFA
20349 expressions. */
20350
20351 static dw_loc_list_ref
20352 convert_cfa_to_fb_loc_list (HOST_WIDE_INT offset)
20353 {
20354 int ix;
20355 dw_fde_ref fde;
20356 dw_loc_list_ref list, *list_tail;
20357 dw_cfi_ref cfi;
20358 dw_cfa_location last_cfa, next_cfa;
20359 const char *start_label, *last_label, *section;
20360 dw_cfa_location remember;
20361
20362 fde = cfun->fde;
20363 gcc_assert (fde != NULL);
20364
20365 section = secname_for_decl (current_function_decl);
20366 list_tail = &list;
20367 list = NULL;
20368
20369 memset (&next_cfa, 0, sizeof (next_cfa));
20370 next_cfa.reg = INVALID_REGNUM;
20371 remember = next_cfa;
20372
20373 start_label = fde->dw_fde_begin;
20374
20375 /* ??? Bald assumption that the CIE opcode list does not contain
20376 advance opcodes. */
20377 FOR_EACH_VEC_ELT (*cie_cfi_vec, ix, cfi)
20378 lookup_cfa_1 (cfi, &next_cfa, &remember);
20379
20380 last_cfa = next_cfa;
20381 last_label = start_label;
20382
20383 if (fde->dw_fde_second_begin && fde->dw_fde_switch_cfi_index == 0)
20384 {
20385 /* If the first partition contained no CFI adjustments, the
20386 CIE opcodes apply to the whole first partition. */
20387 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20388 fde->dw_fde_begin, 0, fde->dw_fde_end, 0, section);
20389 list_tail =&(*list_tail)->dw_loc_next;
20390 start_label = last_label = fde->dw_fde_second_begin;
20391 }
20392
20393 FOR_EACH_VEC_SAFE_ELT (fde->dw_fde_cfi, ix, cfi)
20394 {
20395 switch (cfi->dw_cfi_opc)
20396 {
20397 case DW_CFA_set_loc:
20398 case DW_CFA_advance_loc1:
20399 case DW_CFA_advance_loc2:
20400 case DW_CFA_advance_loc4:
20401 if (!cfa_equal_p (&last_cfa, &next_cfa))
20402 {
20403 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20404 start_label, 0, last_label, 0, section);
20405
20406 list_tail = &(*list_tail)->dw_loc_next;
20407 last_cfa = next_cfa;
20408 start_label = last_label;
20409 }
20410 last_label = cfi->dw_cfi_oprnd1.dw_cfi_addr;
20411 break;
20412
20413 case DW_CFA_advance_loc:
20414 /* The encoding is complex enough that we should never emit this. */
20415 gcc_unreachable ();
20416
20417 default:
20418 lookup_cfa_1 (cfi, &next_cfa, &remember);
20419 break;
20420 }
20421 if (ix + 1 == fde->dw_fde_switch_cfi_index)
20422 {
20423 if (!cfa_equal_p (&last_cfa, &next_cfa))
20424 {
20425 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20426 start_label, 0, last_label, 0, section);
20427
20428 list_tail = &(*list_tail)->dw_loc_next;
20429 last_cfa = next_cfa;
20430 start_label = last_label;
20431 }
20432 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20433 start_label, 0, fde->dw_fde_end, 0, section);
20434 list_tail = &(*list_tail)->dw_loc_next;
20435 start_label = last_label = fde->dw_fde_second_begin;
20436 }
20437 }
20438
20439 if (!cfa_equal_p (&last_cfa, &next_cfa))
20440 {
20441 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20442 start_label, 0, last_label, 0, section);
20443 list_tail = &(*list_tail)->dw_loc_next;
20444 start_label = last_label;
20445 }
20446
20447 *list_tail = new_loc_list (build_cfa_loc (&next_cfa, offset),
20448 start_label, 0,
20449 fde->dw_fde_second_begin
20450 ? fde->dw_fde_second_end : fde->dw_fde_end, 0,
20451 section);
20452
20453 maybe_gen_llsym (list);
20454
20455 return list;
20456 }
20457
20458 /* Compute a displacement from the "steady-state frame pointer" to the
20459 frame base (often the same as the CFA), and store it in
20460 frame_pointer_fb_offset. OFFSET is added to the displacement
20461 before the latter is negated. */
20462
20463 static void
20464 compute_frame_pointer_to_fb_displacement (poly_int64 offset)
20465 {
20466 rtx reg, elim;
20467
20468 #ifdef FRAME_POINTER_CFA_OFFSET
20469 reg = frame_pointer_rtx;
20470 offset += FRAME_POINTER_CFA_OFFSET (current_function_decl);
20471 #else
20472 reg = arg_pointer_rtx;
20473 offset += ARG_POINTER_CFA_OFFSET (current_function_decl);
20474 #endif
20475
20476 elim = (ira_use_lra_p
20477 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
20478 : eliminate_regs (reg, VOIDmode, NULL_RTX));
20479 elim = strip_offset_and_add (elim, &offset);
20480
20481 frame_pointer_fb_offset = -offset;
20482
20483 /* ??? AVR doesn't set up valid eliminations when there is no stack frame
20484 in which to eliminate. This is because it's stack pointer isn't
20485 directly accessible as a register within the ISA. To work around
20486 this, assume that while we cannot provide a proper value for
20487 frame_pointer_fb_offset, we won't need one either. We can use
20488 hard frame pointer in debug info even if frame pointer isn't used
20489 since hard frame pointer in debug info is encoded with DW_OP_fbreg
20490 which uses the DW_AT_frame_base attribute, not hard frame pointer
20491 directly. */
20492 frame_pointer_fb_offset_valid
20493 = (elim == hard_frame_pointer_rtx || elim == stack_pointer_rtx);
20494 }
20495
20496 /* Generate a DW_AT_name attribute given some string value to be included as
20497 the value of the attribute. */
20498
20499 static void
20500 add_name_attribute (dw_die_ref die, const char *name_string)
20501 {
20502 if (name_string != NULL && *name_string != 0)
20503 {
20504 if (demangle_name_func)
20505 name_string = (*demangle_name_func) (name_string);
20506
20507 add_AT_string (die, DW_AT_name, name_string);
20508 }
20509 }
20510
20511 /* Generate a DW_AT_description attribute given some string value to be included
20512 as the value of the attribute. */
20513
20514 static void
20515 add_desc_attribute (dw_die_ref die, const char *name_string)
20516 {
20517 if (!flag_describe_dies || (dwarf_version < 3 && dwarf_strict))
20518 return;
20519
20520 if (name_string == NULL || *name_string == 0)
20521 return;
20522
20523 if (demangle_name_func)
20524 name_string = (*demangle_name_func) (name_string);
20525
20526 add_AT_string (die, DW_AT_description, name_string);
20527 }
20528
20529 /* Generate a DW_AT_description attribute given some decl to be included
20530 as the value of the attribute. */
20531
20532 static void
20533 add_desc_attribute (dw_die_ref die, tree decl)
20534 {
20535 tree decl_name;
20536
20537 if (!flag_describe_dies || (dwarf_version < 3 && dwarf_strict))
20538 return;
20539
20540 if (decl == NULL_TREE || !DECL_P (decl))
20541 return;
20542 decl_name = DECL_NAME (decl);
20543
20544 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
20545 {
20546 const char *name = dwarf2_name (decl, 0);
20547 add_desc_attribute (die, name ? name : IDENTIFIER_POINTER (decl_name));
20548 }
20549 else
20550 {
20551 char *desc = print_generic_expr_to_str (decl);
20552 add_desc_attribute (die, desc);
20553 free (desc);
20554 }
20555 }
20556
20557 /* Retrieve the descriptive type of TYPE, if any, make sure it has a
20558 DIE and attach a DW_AT_GNAT_descriptive_type attribute to the DIE
20559 of TYPE accordingly.
20560
20561 ??? This is a temporary measure until after we're able to generate
20562 regular DWARF for the complex Ada type system. */
20563
20564 static void
20565 add_gnat_descriptive_type_attribute (dw_die_ref die, tree type,
20566 dw_die_ref context_die)
20567 {
20568 tree dtype;
20569 dw_die_ref dtype_die;
20570
20571 if (!lang_hooks.types.descriptive_type)
20572 return;
20573
20574 dtype = lang_hooks.types.descriptive_type (type);
20575 if (!dtype)
20576 return;
20577
20578 dtype_die = lookup_type_die (dtype);
20579 if (!dtype_die)
20580 {
20581 gen_type_die (dtype, context_die);
20582 dtype_die = lookup_type_die (dtype);
20583 gcc_assert (dtype_die);
20584 }
20585
20586 add_AT_die_ref (die, DW_AT_GNAT_descriptive_type, dtype_die);
20587 }
20588
20589 /* Retrieve the comp_dir string suitable for use with DW_AT_comp_dir. */
20590
20591 static const char *
20592 comp_dir_string (void)
20593 {
20594 const char *wd;
20595 char *wd_plus_sep = NULL;
20596 static const char *cached_wd = NULL;
20597
20598 if (cached_wd != NULL)
20599 return cached_wd;
20600
20601 wd = get_src_pwd ();
20602 if (wd == NULL)
20603 return NULL;
20604
20605 if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR)
20606 {
20607 size_t wdlen = strlen (wd);
20608 wd_plus_sep = XNEWVEC (char, wdlen + 2);
20609 strcpy (wd_plus_sep, wd);
20610 wd_plus_sep [wdlen] = DIR_SEPARATOR;
20611 wd_plus_sep [wdlen + 1] = 0;
20612 wd = wd_plus_sep;
20613 }
20614
20615 cached_wd = remap_debug_filename (wd);
20616
20617 /* remap_debug_filename can just pass through wd or return a new gc string.
20618 These two types can't be both stored in a GTY(())-tagged string, but since
20619 the cached value lives forever just copy it if needed. */
20620 if (cached_wd != wd)
20621 {
20622 cached_wd = xstrdup (cached_wd);
20623 if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR && wd_plus_sep != NULL)
20624 free (wd_plus_sep);
20625 }
20626
20627 return cached_wd;
20628 }
20629
20630 /* Generate a DW_AT_comp_dir attribute for DIE. */
20631
20632 static void
20633 add_comp_dir_attribute (dw_die_ref die)
20634 {
20635 const char * wd = comp_dir_string ();
20636 if (wd != NULL)
20637 add_AT_string (die, DW_AT_comp_dir, wd);
20638 }
20639
20640 /* Given a tree node VALUE describing a scalar attribute ATTR (i.e. a bound, a
20641 pointer computation, ...), output a representation for that bound according
20642 to the accepted FORMS (see enum dw_scalar_form) and add it to DIE. See
20643 loc_list_from_tree for the meaning of CONTEXT. */
20644
20645 static void
20646 add_scalar_info (dw_die_ref die, enum dwarf_attribute attr, tree value,
20647 int forms, struct loc_descr_context *context)
20648 {
20649 dw_die_ref context_die, decl_die = NULL;
20650 dw_loc_list_ref list;
20651 bool strip_conversions = true;
20652 bool placeholder_seen = false;
20653
20654 while (strip_conversions)
20655 switch (TREE_CODE (value))
20656 {
20657 case ERROR_MARK:
20658 case SAVE_EXPR:
20659 return;
20660
20661 CASE_CONVERT:
20662 case VIEW_CONVERT_EXPR:
20663 value = TREE_OPERAND (value, 0);
20664 break;
20665
20666 default:
20667 strip_conversions = false;
20668 break;
20669 }
20670
20671 /* If possible and permitted, output the attribute as a constant. */
20672 if ((forms & dw_scalar_form_constant) != 0
20673 && TREE_CODE (value) == INTEGER_CST)
20674 {
20675 unsigned int prec = simple_type_size_in_bits (TREE_TYPE (value));
20676
20677 /* If HOST_WIDE_INT is big enough then represent the bound as
20678 a constant value. We need to choose a form based on
20679 whether the type is signed or unsigned. We cannot just
20680 call add_AT_unsigned if the value itself is positive
20681 (add_AT_unsigned might add the unsigned value encoded as
20682 DW_FORM_data[1248]). Some DWARF consumers will lookup the
20683 bounds type and then sign extend any unsigned values found
20684 for signed types. This is needed only for
20685 DW_AT_{lower,upper}_bound, since for most other attributes,
20686 consumers will treat DW_FORM_data[1248] as unsigned values,
20687 regardless of the underlying type. */
20688 if (prec <= HOST_BITS_PER_WIDE_INT
20689 || tree_fits_uhwi_p (value))
20690 {
20691 if (TYPE_UNSIGNED (TREE_TYPE (value)))
20692 add_AT_unsigned (die, attr, TREE_INT_CST_LOW (value));
20693 else
20694 add_AT_int (die, attr, TREE_INT_CST_LOW (value));
20695 }
20696 else
20697 /* Otherwise represent the bound as an unsigned value with
20698 the precision of its type. The precision and signedness
20699 of the type will be necessary to re-interpret it
20700 unambiguously. */
20701 add_AT_wide (die, attr, wi::to_wide (value));
20702 return;
20703 }
20704
20705 /* Otherwise, if it's possible and permitted too, output a reference to
20706 another DIE. */
20707 if ((forms & dw_scalar_form_reference) != 0)
20708 {
20709 tree decl = NULL_TREE;
20710
20711 /* Some type attributes reference an outer type. For instance, the upper
20712 bound of an array may reference an embedding record (this happens in
20713 Ada). */
20714 if (TREE_CODE (value) == COMPONENT_REF
20715 && TREE_CODE (TREE_OPERAND (value, 0)) == PLACEHOLDER_EXPR
20716 && TREE_CODE (TREE_OPERAND (value, 1)) == FIELD_DECL)
20717 decl = TREE_OPERAND (value, 1);
20718
20719 else if (VAR_P (value)
20720 || TREE_CODE (value) == PARM_DECL
20721 || TREE_CODE (value) == RESULT_DECL)
20722 decl = value;
20723
20724 if (decl != NULL_TREE)
20725 {
20726 decl_die = lookup_decl_die (decl);
20727
20728 /* ??? Can this happen, or should the variable have been bound
20729 first? Probably it can, since I imagine that we try to create
20730 the types of parameters in the order in which they exist in
20731 the list, and won't have created a forward reference to a
20732 later parameter. */
20733 if (decl_die != NULL)
20734 {
20735 if (get_AT (decl_die, DW_AT_location)
20736 || get_AT (decl_die, DW_AT_data_member_location)
20737 || get_AT (decl_die, DW_AT_const_value))
20738 {
20739 add_AT_die_ref (die, attr, decl_die);
20740 return;
20741 }
20742 }
20743 }
20744 }
20745
20746 /* Last chance: try to create a stack operation procedure to evaluate the
20747 value. Do nothing if even that is not possible or permitted. */
20748 if ((forms & dw_scalar_form_exprloc) == 0)
20749 return;
20750
20751 list = loc_list_from_tree (value, 2, context);
20752 if (context && context->placeholder_arg)
20753 {
20754 placeholder_seen = context->placeholder_seen;
20755 context->placeholder_seen = false;
20756 }
20757 if (list == NULL || single_element_loc_list_p (list))
20758 {
20759 /* If this attribute is not a reference nor constant, it is
20760 a DWARF expression rather than location description. For that
20761 loc_list_from_tree (value, 0, &context) is needed. */
20762 dw_loc_list_ref list2 = loc_list_from_tree (value, 0, context);
20763 if (list2 && single_element_loc_list_p (list2))
20764 {
20765 if (placeholder_seen)
20766 {
20767 struct dwarf_procedure_info dpi;
20768 dpi.fndecl = NULL_TREE;
20769 dpi.args_count = 1;
20770 if (!resolve_args_picking (list2->expr, 1, &dpi))
20771 return;
20772 }
20773 add_AT_loc (die, attr, list2->expr);
20774 return;
20775 }
20776 }
20777
20778 /* If that failed to give a single element location list, fall back to
20779 outputting this as a reference... still if permitted. */
20780 if (list == NULL
20781 || (forms & dw_scalar_form_reference) == 0
20782 || placeholder_seen)
20783 return;
20784
20785 if (!decl_die)
20786 {
20787 if (current_function_decl == 0)
20788 context_die = comp_unit_die ();
20789 else
20790 context_die = lookup_decl_die (current_function_decl);
20791
20792 decl_die = new_die (DW_TAG_variable, context_die, value);
20793 add_AT_flag (decl_die, DW_AT_artificial, 1);
20794 add_type_attribute (decl_die, TREE_TYPE (value), TYPE_QUAL_CONST, false,
20795 context_die);
20796 }
20797
20798 add_AT_location_description (decl_die, DW_AT_location, list);
20799 add_AT_die_ref (die, attr, decl_die);
20800 }
20801
20802 /* Return the default for DW_AT_lower_bound, or -1 if there is not any
20803 default. */
20804
20805 static int
20806 lower_bound_default (void)
20807 {
20808 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
20809 {
20810 case DW_LANG_C:
20811 case DW_LANG_C89:
20812 case DW_LANG_C99:
20813 case DW_LANG_C11:
20814 case DW_LANG_C_plus_plus:
20815 case DW_LANG_C_plus_plus_11:
20816 case DW_LANG_C_plus_plus_14:
20817 case DW_LANG_ObjC:
20818 case DW_LANG_ObjC_plus_plus:
20819 return 0;
20820 case DW_LANG_Fortran77:
20821 case DW_LANG_Fortran90:
20822 case DW_LANG_Fortran95:
20823 case DW_LANG_Fortran03:
20824 case DW_LANG_Fortran08:
20825 return 1;
20826 case DW_LANG_UPC:
20827 case DW_LANG_D:
20828 case DW_LANG_Python:
20829 return dwarf_version >= 4 ? 0 : -1;
20830 case DW_LANG_Ada95:
20831 case DW_LANG_Ada83:
20832 case DW_LANG_Cobol74:
20833 case DW_LANG_Cobol85:
20834 case DW_LANG_Modula2:
20835 case DW_LANG_PLI:
20836 return dwarf_version >= 4 ? 1 : -1;
20837 default:
20838 return -1;
20839 }
20840 }
20841
20842 /* Given a tree node describing an array bound (either lower or upper) output
20843 a representation for that bound. */
20844
20845 static void
20846 add_bound_info (dw_die_ref subrange_die, enum dwarf_attribute bound_attr,
20847 tree bound, struct loc_descr_context *context)
20848 {
20849 int dflt;
20850
20851 while (1)
20852 switch (TREE_CODE (bound))
20853 {
20854 /* Strip all conversions. */
20855 CASE_CONVERT:
20856 case VIEW_CONVERT_EXPR:
20857 bound = TREE_OPERAND (bound, 0);
20858 break;
20859
20860 /* All fixed-bounds are represented by INTEGER_CST nodes. Lower bounds
20861 are even omitted when they are the default. */
20862 case INTEGER_CST:
20863 /* If the value for this bound is the default one, we can even omit the
20864 attribute. */
20865 if (bound_attr == DW_AT_lower_bound
20866 && tree_fits_shwi_p (bound)
20867 && (dflt = lower_bound_default ()) != -1
20868 && tree_to_shwi (bound) == dflt)
20869 return;
20870
20871 /* FALLTHRU */
20872
20873 default:
20874 /* Because of the complex interaction there can be with other GNAT
20875 encodings, GDB isn't ready yet to handle proper DWARF description
20876 for self-referencial subrange bounds: let GNAT encodings do the
20877 magic in such a case. */
20878 if (is_ada ()
20879 && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL
20880 && contains_placeholder_p (bound))
20881 return;
20882
20883 add_scalar_info (subrange_die, bound_attr, bound,
20884 dw_scalar_form_constant
20885 | dw_scalar_form_exprloc
20886 | dw_scalar_form_reference,
20887 context);
20888 return;
20889 }
20890 }
20891
20892 /* Add subscript info to TYPE_DIE, describing an array TYPE, collapsing
20893 possibly nested array subscripts in a flat sequence if COLLAPSE_P is true.
20894 Note that the block of subscript information for an array type also
20895 includes information about the element type of the given array type.
20896
20897 This function reuses previously set type and bound information if
20898 available. */
20899
20900 static void
20901 add_subscript_info (dw_die_ref type_die, tree type, bool collapse_p)
20902 {
20903 unsigned dimension_number;
20904 tree lower, upper;
20905 dw_die_ref child = type_die->die_child;
20906
20907 for (dimension_number = 0;
20908 TREE_CODE (type) == ARRAY_TYPE && (dimension_number == 0 || collapse_p);
20909 type = TREE_TYPE (type), dimension_number++)
20910 {
20911 tree domain = TYPE_DOMAIN (type);
20912
20913 if (TYPE_STRING_FLAG (type) && is_fortran () && dimension_number > 0)
20914 break;
20915
20916 /* Arrays come in three flavors: Unspecified bounds, fixed bounds,
20917 and (in GNU C only) variable bounds. Handle all three forms
20918 here. */
20919
20920 /* Find and reuse a previously generated DW_TAG_subrange_type if
20921 available.
20922
20923 For multi-dimensional arrays, as we iterate through the
20924 various dimensions in the enclosing for loop above, we also
20925 iterate through the DIE children and pick at each
20926 DW_TAG_subrange_type previously generated (if available).
20927 Each child DW_TAG_subrange_type DIE describes the range of
20928 the current dimension. At this point we should have as many
20929 DW_TAG_subrange_type's as we have dimensions in the
20930 array. */
20931 dw_die_ref subrange_die = NULL;
20932 if (child)
20933 while (1)
20934 {
20935 child = child->die_sib;
20936 if (child->die_tag == DW_TAG_subrange_type)
20937 subrange_die = child;
20938 if (child == type_die->die_child)
20939 {
20940 /* If we wrapped around, stop looking next time. */
20941 child = NULL;
20942 break;
20943 }
20944 if (child->die_tag == DW_TAG_subrange_type)
20945 break;
20946 }
20947 if (!subrange_die)
20948 subrange_die = new_die (DW_TAG_subrange_type, type_die, NULL);
20949
20950 if (domain)
20951 {
20952 /* We have an array type with specified bounds. */
20953 lower = TYPE_MIN_VALUE (domain);
20954 upper = TYPE_MAX_VALUE (domain);
20955
20956 /* Define the index type. */
20957 if (TREE_TYPE (domain)
20958 && !get_AT (subrange_die, DW_AT_type))
20959 {
20960 /* ??? This is probably an Ada unnamed subrange type. Ignore the
20961 TREE_TYPE field. We can't emit debug info for this
20962 because it is an unnamed integral type. */
20963 if (TREE_CODE (domain) == INTEGER_TYPE
20964 && TYPE_NAME (domain) == NULL_TREE
20965 && TREE_CODE (TREE_TYPE (domain)) == INTEGER_TYPE
20966 && TYPE_NAME (TREE_TYPE (domain)) == NULL_TREE)
20967 ;
20968 else
20969 add_type_attribute (subrange_die, TREE_TYPE (domain),
20970 TYPE_UNQUALIFIED, false, type_die);
20971 }
20972
20973 /* ??? If upper is NULL, the array has unspecified length,
20974 but it does have a lower bound. This happens with Fortran
20975 dimension arr(N:*)
20976 Since the debugger is definitely going to need to know N
20977 to produce useful results, go ahead and output the lower
20978 bound solo, and hope the debugger can cope. */
20979
20980 if (!get_AT (subrange_die, DW_AT_lower_bound))
20981 add_bound_info (subrange_die, DW_AT_lower_bound, lower, NULL);
20982 if (!get_AT (subrange_die, DW_AT_upper_bound)
20983 && !get_AT (subrange_die, DW_AT_count))
20984 {
20985 if (upper)
20986 add_bound_info (subrange_die, DW_AT_upper_bound, upper, NULL);
20987 else if ((is_c () || is_cxx ()) && COMPLETE_TYPE_P (type))
20988 /* Zero-length array. */
20989 add_bound_info (subrange_die, DW_AT_count,
20990 build_int_cst (TREE_TYPE (lower), 0), NULL);
20991 }
20992 }
20993
20994 /* Otherwise we have an array type with an unspecified length. The
20995 DWARF-2 spec does not say how to handle this; let's just leave out the
20996 bounds. */
20997 }
20998 }
20999
21000 /* Add a DW_AT_byte_size attribute to DIE with TREE_NODE's size. */
21001
21002 static void
21003 add_byte_size_attribute (dw_die_ref die, tree tree_node)
21004 {
21005 dw_die_ref decl_die;
21006 HOST_WIDE_INT size;
21007 dw_loc_descr_ref size_expr = NULL;
21008
21009 switch (TREE_CODE (tree_node))
21010 {
21011 case ERROR_MARK:
21012 size = 0;
21013 break;
21014 case ENUMERAL_TYPE:
21015 case RECORD_TYPE:
21016 case UNION_TYPE:
21017 case QUAL_UNION_TYPE:
21018 if (TREE_CODE (TYPE_SIZE_UNIT (tree_node)) == VAR_DECL
21019 && (decl_die = lookup_decl_die (TYPE_SIZE_UNIT (tree_node))))
21020 {
21021 add_AT_die_ref (die, DW_AT_byte_size, decl_die);
21022 return;
21023 }
21024 size_expr = type_byte_size (tree_node, &size);
21025 break;
21026 case FIELD_DECL:
21027 /* For a data member of a struct or union, the DW_AT_byte_size is
21028 generally given as the number of bytes normally allocated for an
21029 object of the *declared* type of the member itself. This is true
21030 even for bit-fields. */
21031 size = int_size_in_bytes (field_type (tree_node));
21032 break;
21033 default:
21034 gcc_unreachable ();
21035 }
21036
21037 /* Support for dynamically-sized objects was introduced by DWARFv3.
21038 At the moment, GDB does not handle variable byte sizes very well,
21039 though. */
21040 if ((dwarf_version >= 3 || !dwarf_strict)
21041 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL
21042 && size_expr != NULL)
21043 add_AT_loc (die, DW_AT_byte_size, size_expr);
21044
21045 /* Note that `size' might be -1 when we get to this point. If it is, that
21046 indicates that the byte size of the entity in question is variable and
21047 that we could not generate a DWARF expression that computes it. */
21048 if (size >= 0)
21049 add_AT_unsigned (die, DW_AT_byte_size, size);
21050 }
21051
21052 /* Add a DW_AT_alignment attribute to DIE with TREE_NODE's non-default
21053 alignment. */
21054
21055 static void
21056 add_alignment_attribute (dw_die_ref die, tree tree_node)
21057 {
21058 if (dwarf_version < 5 && dwarf_strict)
21059 return;
21060
21061 unsigned align;
21062
21063 if (DECL_P (tree_node))
21064 {
21065 if (!DECL_USER_ALIGN (tree_node))
21066 return;
21067
21068 align = DECL_ALIGN_UNIT (tree_node);
21069 }
21070 else if (TYPE_P (tree_node))
21071 {
21072 if (!TYPE_USER_ALIGN (tree_node))
21073 return;
21074
21075 align = TYPE_ALIGN_UNIT (tree_node);
21076 }
21077 else
21078 gcc_unreachable ();
21079
21080 add_AT_unsigned (die, DW_AT_alignment, align);
21081 }
21082
21083 /* For a FIELD_DECL node which represents a bit-field, output an attribute
21084 which specifies the distance in bits from the highest order bit of the
21085 "containing object" for the bit-field to the highest order bit of the
21086 bit-field itself.
21087
21088 For any given bit-field, the "containing object" is a hypothetical object
21089 (of some integral or enum type) within which the given bit-field lives. The
21090 type of this hypothetical "containing object" is always the same as the
21091 declared type of the individual bit-field itself. The determination of the
21092 exact location of the "containing object" for a bit-field is rather
21093 complicated. It's handled by the `field_byte_offset' function (above).
21094
21095 CTX is required: see the comment for VLR_CONTEXT.
21096
21097 Note that it is the size (in bytes) of the hypothetical "containing object"
21098 which will be given in the DW_AT_byte_size attribute for this bit-field.
21099 (See `byte_size_attribute' above). */
21100
21101 static inline void
21102 add_bit_offset_attribute (dw_die_ref die, tree decl, struct vlr_context *ctx)
21103 {
21104 HOST_WIDE_INT object_offset_in_bytes;
21105 tree original_type = DECL_BIT_FIELD_TYPE (decl);
21106 HOST_WIDE_INT bitpos_int;
21107 HOST_WIDE_INT highest_order_object_bit_offset;
21108 HOST_WIDE_INT highest_order_field_bit_offset;
21109 HOST_WIDE_INT bit_offset;
21110
21111 field_byte_offset (decl, ctx, &object_offset_in_bytes);
21112
21113 /* Must be a field and a bit field. */
21114 gcc_assert (original_type && TREE_CODE (decl) == FIELD_DECL);
21115
21116 /* We can't yet handle bit-fields whose offsets are variable, so if we
21117 encounter such things, just return without generating any attribute
21118 whatsoever. Likewise for variable or too large size. */
21119 if (! tree_fits_shwi_p (bit_position (decl))
21120 || ! tree_fits_uhwi_p (DECL_SIZE (decl)))
21121 return;
21122
21123 bitpos_int = int_bit_position (decl);
21124
21125 /* Note that the bit offset is always the distance (in bits) from the
21126 highest-order bit of the "containing object" to the highest-order bit of
21127 the bit-field itself. Since the "high-order end" of any object or field
21128 is different on big-endian and little-endian machines, the computation
21129 below must take account of these differences. */
21130 highest_order_object_bit_offset = object_offset_in_bytes * BITS_PER_UNIT;
21131 highest_order_field_bit_offset = bitpos_int;
21132
21133 if (! BYTES_BIG_ENDIAN)
21134 {
21135 highest_order_field_bit_offset += tree_to_shwi (DECL_SIZE (decl));
21136 highest_order_object_bit_offset +=
21137 simple_type_size_in_bits (original_type);
21138 }
21139
21140 bit_offset
21141 = (! BYTES_BIG_ENDIAN
21142 ? highest_order_object_bit_offset - highest_order_field_bit_offset
21143 : highest_order_field_bit_offset - highest_order_object_bit_offset);
21144
21145 if (bit_offset < 0)
21146 add_AT_int (die, DW_AT_bit_offset, bit_offset);
21147 else
21148 add_AT_unsigned (die, DW_AT_bit_offset, (unsigned HOST_WIDE_INT) bit_offset);
21149 }
21150
21151 /* For a FIELD_DECL node which represents a bit field, output an attribute
21152 which specifies the length in bits of the given field. */
21153
21154 static inline void
21155 add_bit_size_attribute (dw_die_ref die, tree decl)
21156 {
21157 /* Must be a field and a bit field. */
21158 gcc_assert (TREE_CODE (decl) == FIELD_DECL
21159 && DECL_BIT_FIELD_TYPE (decl));
21160
21161 if (tree_fits_uhwi_p (DECL_SIZE (decl)))
21162 add_AT_unsigned (die, DW_AT_bit_size, tree_to_uhwi (DECL_SIZE (decl)));
21163 }
21164
21165 /* If the compiled language is ANSI C, then add a 'prototyped'
21166 attribute, if arg types are given for the parameters of a function. */
21167
21168 static inline void
21169 add_prototyped_attribute (dw_die_ref die, tree func_type)
21170 {
21171 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
21172 {
21173 case DW_LANG_C:
21174 case DW_LANG_C89:
21175 case DW_LANG_C99:
21176 case DW_LANG_C11:
21177 case DW_LANG_ObjC:
21178 if (prototype_p (func_type))
21179 add_AT_flag (die, DW_AT_prototyped, 1);
21180 break;
21181 default:
21182 break;
21183 }
21184 }
21185
21186 /* Add an 'abstract_origin' attribute below a given DIE. The DIE is found
21187 by looking in the type declaration, the object declaration equate table or
21188 the block mapping. */
21189
21190 static inline void
21191 add_abstract_origin_attribute (dw_die_ref die, tree origin)
21192 {
21193 dw_die_ref origin_die = NULL;
21194
21195 /* For late LTO debug output we want to refer directly to the abstract
21196 DIE in the early debug rather to the possibly existing concrete
21197 instance and avoid creating that just for this purpose. */
21198 sym_off_pair *desc;
21199 if (in_lto_p
21200 && external_die_map
21201 && (desc = external_die_map->get (origin)))
21202 {
21203 add_AT_external_die_ref (die, DW_AT_abstract_origin,
21204 desc->sym, desc->off);
21205 return;
21206 }
21207
21208 if (DECL_P (origin))
21209 origin_die = lookup_decl_die (origin);
21210 else if (TYPE_P (origin))
21211 origin_die = lookup_type_die (origin);
21212 else if (TREE_CODE (origin) == BLOCK)
21213 origin_die = lookup_block_die (origin);
21214
21215 /* XXX: Functions that are never lowered don't always have correct block
21216 trees (in the case of java, they simply have no block tree, in some other
21217 languages). For these functions, there is nothing we can really do to
21218 output correct debug info for inlined functions in all cases. Rather
21219 than die, we'll just produce deficient debug info now, in that we will
21220 have variables without a proper abstract origin. In the future, when all
21221 functions are lowered, we should re-add a gcc_assert (origin_die)
21222 here. */
21223
21224 if (origin_die)
21225 add_AT_die_ref (die, DW_AT_abstract_origin, origin_die);
21226 }
21227
21228 /* We do not currently support the pure_virtual attribute. */
21229
21230 static inline void
21231 add_pure_or_virtual_attribute (dw_die_ref die, tree func_decl)
21232 {
21233 if (DECL_VINDEX (func_decl))
21234 {
21235 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
21236
21237 if (tree_fits_shwi_p (DECL_VINDEX (func_decl)))
21238 add_AT_loc (die, DW_AT_vtable_elem_location,
21239 new_loc_descr (DW_OP_constu,
21240 tree_to_shwi (DECL_VINDEX (func_decl)),
21241 0));
21242
21243 /* GNU extension: Record what type this method came from originally. */
21244 if (debug_info_level > DINFO_LEVEL_TERSE
21245 && DECL_CONTEXT (func_decl))
21246 add_AT_die_ref (die, DW_AT_containing_type,
21247 lookup_type_die (DECL_CONTEXT (func_decl)));
21248 }
21249 }
21250 \f
21251 /* Add a DW_AT_linkage_name or DW_AT_MIPS_linkage_name attribute for the
21252 given decl. This used to be a vendor extension until after DWARF 4
21253 standardized it. */
21254
21255 static void
21256 add_linkage_attr (dw_die_ref die, tree decl)
21257 {
21258 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
21259
21260 /* Mimic what assemble_name_raw does with a leading '*'. */
21261 if (name[0] == '*')
21262 name = &name[1];
21263
21264 if (dwarf_version >= 4)
21265 add_AT_string (die, DW_AT_linkage_name, name);
21266 else
21267 add_AT_string (die, DW_AT_MIPS_linkage_name, name);
21268 }
21269
21270 /* Add source coordinate attributes for the given decl. */
21271
21272 static void
21273 add_src_coords_attributes (dw_die_ref die, tree decl)
21274 {
21275 expanded_location s;
21276
21277 if (LOCATION_LOCUS (DECL_SOURCE_LOCATION (decl)) == UNKNOWN_LOCATION)
21278 return;
21279 s = expand_location (DECL_SOURCE_LOCATION (decl));
21280 add_AT_file (die, DW_AT_decl_file, lookup_filename (s.file));
21281 add_AT_unsigned (die, DW_AT_decl_line, s.line);
21282 if (debug_column_info && s.column)
21283 add_AT_unsigned (die, DW_AT_decl_column, s.column);
21284 }
21285
21286 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl. */
21287
21288 static void
21289 add_linkage_name_raw (dw_die_ref die, tree decl)
21290 {
21291 /* Defer until we have an assembler name set. */
21292 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
21293 {
21294 limbo_die_node *asm_name;
21295
21296 asm_name = ggc_cleared_alloc<limbo_die_node> ();
21297 asm_name->die = die;
21298 asm_name->created_for = decl;
21299 asm_name->next = deferred_asm_name;
21300 deferred_asm_name = asm_name;
21301 }
21302 else if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl))
21303 add_linkage_attr (die, decl);
21304 }
21305
21306 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl if desired. */
21307
21308 static void
21309 add_linkage_name (dw_die_ref die, tree decl)
21310 {
21311 if (debug_info_level > DINFO_LEVEL_NONE
21312 && VAR_OR_FUNCTION_DECL_P (decl)
21313 && TREE_PUBLIC (decl)
21314 && !(VAR_P (decl) && DECL_REGISTER (decl))
21315 && die->die_tag != DW_TAG_member)
21316 add_linkage_name_raw (die, decl);
21317 }
21318
21319 /* Add a DW_AT_name attribute and source coordinate attribute for the
21320 given decl, but only if it actually has a name. */
21321
21322 static void
21323 add_name_and_src_coords_attributes (dw_die_ref die, tree decl,
21324 bool no_linkage_name)
21325 {
21326 tree decl_name;
21327
21328 decl_name = DECL_NAME (decl);
21329 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
21330 {
21331 const char *name = dwarf2_name (decl, 0);
21332 if (name)
21333 add_name_attribute (die, name);
21334 else
21335 add_desc_attribute (die, decl);
21336
21337 if (! DECL_ARTIFICIAL (decl))
21338 add_src_coords_attributes (die, decl);
21339
21340 if (!no_linkage_name)
21341 add_linkage_name (die, decl);
21342 }
21343 else
21344 add_desc_attribute (die, decl);
21345
21346 #ifdef VMS_DEBUGGING_INFO
21347 /* Get the function's name, as described by its RTL. This may be different
21348 from the DECL_NAME name used in the source file. */
21349 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_ASM_WRITTEN (decl))
21350 {
21351 add_AT_addr (die, DW_AT_VMS_rtnbeg_pd_address,
21352 XEXP (DECL_RTL (decl), 0), false);
21353 vec_safe_push (used_rtx_array, XEXP (DECL_RTL (decl), 0));
21354 }
21355 #endif /* VMS_DEBUGGING_INFO */
21356 }
21357
21358 /* Add VALUE as a DW_AT_discr_value attribute to DIE. */
21359
21360 static void
21361 add_discr_value (dw_die_ref die, dw_discr_value *value)
21362 {
21363 dw_attr_node attr;
21364
21365 attr.dw_attr = DW_AT_discr_value;
21366 attr.dw_attr_val.val_class = dw_val_class_discr_value;
21367 attr.dw_attr_val.val_entry = NULL;
21368 attr.dw_attr_val.v.val_discr_value.pos = value->pos;
21369 if (value->pos)
21370 attr.dw_attr_val.v.val_discr_value.v.uval = value->v.uval;
21371 else
21372 attr.dw_attr_val.v.val_discr_value.v.sval = value->v.sval;
21373 add_dwarf_attr (die, &attr);
21374 }
21375
21376 /* Add DISCR_LIST as a DW_AT_discr_list to DIE. */
21377
21378 static void
21379 add_discr_list (dw_die_ref die, dw_discr_list_ref discr_list)
21380 {
21381 dw_attr_node attr;
21382
21383 attr.dw_attr = DW_AT_discr_list;
21384 attr.dw_attr_val.val_class = dw_val_class_discr_list;
21385 attr.dw_attr_val.val_entry = NULL;
21386 attr.dw_attr_val.v.val_discr_list = discr_list;
21387 add_dwarf_attr (die, &attr);
21388 }
21389
21390 static inline dw_discr_list_ref
21391 AT_discr_list (dw_attr_node *attr)
21392 {
21393 return attr->dw_attr_val.v.val_discr_list;
21394 }
21395
21396 #ifdef VMS_DEBUGGING_INFO
21397 /* Output the debug main pointer die for VMS */
21398
21399 void
21400 dwarf2out_vms_debug_main_pointer (void)
21401 {
21402 char label[MAX_ARTIFICIAL_LABEL_BYTES];
21403 dw_die_ref die;
21404
21405 /* Allocate the VMS debug main subprogram die. */
21406 die = new_die_raw (DW_TAG_subprogram);
21407 add_name_attribute (die, VMS_DEBUG_MAIN_POINTER);
21408 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
21409 current_function_funcdef_no);
21410 add_AT_lbl_id (die, DW_AT_entry_pc, label);
21411
21412 /* Make it the first child of comp_unit_die (). */
21413 die->die_parent = comp_unit_die ();
21414 if (comp_unit_die ()->die_child)
21415 {
21416 die->die_sib = comp_unit_die ()->die_child->die_sib;
21417 comp_unit_die ()->die_child->die_sib = die;
21418 }
21419 else
21420 {
21421 die->die_sib = die;
21422 comp_unit_die ()->die_child = die;
21423 }
21424 }
21425 #endif /* VMS_DEBUGGING_INFO */
21426
21427 /* walk_tree helper function for uses_local_type, below. */
21428
21429 static tree
21430 uses_local_type_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
21431 {
21432 if (!TYPE_P (*tp))
21433 *walk_subtrees = 0;
21434 else
21435 {
21436 tree name = TYPE_NAME (*tp);
21437 if (name && DECL_P (name) && decl_function_context (name))
21438 return *tp;
21439 }
21440 return NULL_TREE;
21441 }
21442
21443 /* If TYPE involves a function-local type (including a local typedef to a
21444 non-local type), returns that type; otherwise returns NULL_TREE. */
21445
21446 static tree
21447 uses_local_type (tree type)
21448 {
21449 tree used = walk_tree_without_duplicates (&type, uses_local_type_r, NULL);
21450 return used;
21451 }
21452
21453 /* Return the DIE for the scope that immediately contains this type.
21454 Non-named types that do not involve a function-local type get global
21455 scope. Named types nested in namespaces or other types get their
21456 containing scope. All other types (i.e. function-local named types) get
21457 the current active scope. */
21458
21459 static dw_die_ref
21460 scope_die_for (tree t, dw_die_ref context_die)
21461 {
21462 dw_die_ref scope_die = NULL;
21463 tree containing_scope;
21464
21465 /* Non-types always go in the current scope. */
21466 gcc_assert (TYPE_P (t));
21467
21468 /* Use the scope of the typedef, rather than the scope of the type
21469 it refers to. */
21470 if (TYPE_NAME (t) && DECL_P (TYPE_NAME (t)))
21471 containing_scope = DECL_CONTEXT (TYPE_NAME (t));
21472 else
21473 containing_scope = TYPE_CONTEXT (t);
21474
21475 /* Use the containing namespace if there is one. */
21476 if (containing_scope && TREE_CODE (containing_scope) == NAMESPACE_DECL)
21477 {
21478 if (context_die == lookup_decl_die (containing_scope))
21479 /* OK */;
21480 else if (debug_info_level > DINFO_LEVEL_TERSE)
21481 context_die = get_context_die (containing_scope);
21482 else
21483 containing_scope = NULL_TREE;
21484 }
21485
21486 /* Ignore function type "scopes" from the C frontend. They mean that
21487 a tagged type is local to a parmlist of a function declarator, but
21488 that isn't useful to DWARF. */
21489 if (containing_scope && TREE_CODE (containing_scope) == FUNCTION_TYPE)
21490 containing_scope = NULL_TREE;
21491
21492 if (SCOPE_FILE_SCOPE_P (containing_scope))
21493 {
21494 /* If T uses a local type keep it local as well, to avoid references
21495 to function-local DIEs from outside the function. */
21496 if (current_function_decl && uses_local_type (t))
21497 scope_die = context_die;
21498 else
21499 scope_die = comp_unit_die ();
21500 }
21501 else if (TYPE_P (containing_scope))
21502 {
21503 /* For types, we can just look up the appropriate DIE. */
21504 if (debug_info_level > DINFO_LEVEL_TERSE)
21505 scope_die = get_context_die (containing_scope);
21506 else
21507 {
21508 scope_die = lookup_type_die_strip_naming_typedef (containing_scope);
21509 if (scope_die == NULL)
21510 scope_die = comp_unit_die ();
21511 }
21512 }
21513 else
21514 scope_die = context_die;
21515
21516 return scope_die;
21517 }
21518
21519 /* Returns nonzero if CONTEXT_DIE is internal to a function. */
21520
21521 static inline int
21522 local_scope_p (dw_die_ref context_die)
21523 {
21524 for (; context_die; context_die = context_die->die_parent)
21525 if (context_die->die_tag == DW_TAG_inlined_subroutine
21526 || context_die->die_tag == DW_TAG_subprogram)
21527 return 1;
21528
21529 return 0;
21530 }
21531
21532 /* Returns nonzero if CONTEXT_DIE is a class. */
21533
21534 static inline int
21535 class_scope_p (dw_die_ref context_die)
21536 {
21537 return (context_die
21538 && (context_die->die_tag == DW_TAG_structure_type
21539 || context_die->die_tag == DW_TAG_class_type
21540 || context_die->die_tag == DW_TAG_interface_type
21541 || context_die->die_tag == DW_TAG_union_type));
21542 }
21543
21544 /* Returns nonzero if CONTEXT_DIE is a class or namespace, for deciding
21545 whether or not to treat a DIE in this context as a declaration. */
21546
21547 static inline int
21548 class_or_namespace_scope_p (dw_die_ref context_die)
21549 {
21550 return (class_scope_p (context_die)
21551 || (context_die && context_die->die_tag == DW_TAG_namespace));
21552 }
21553
21554 /* Many forms of DIEs require a "type description" attribute. This
21555 routine locates the proper "type descriptor" die for the type given
21556 by 'type' plus any additional qualifiers given by 'cv_quals', and
21557 adds a DW_AT_type attribute below the given die. */
21558
21559 static void
21560 add_type_attribute (dw_die_ref object_die, tree type, int cv_quals,
21561 bool reverse, dw_die_ref context_die)
21562 {
21563 enum tree_code code = TREE_CODE (type);
21564 dw_die_ref type_die = NULL;
21565
21566 /* ??? If this type is an unnamed subrange type of an integral, floating-point
21567 or fixed-point type, use the inner type. This is because we have no
21568 support for unnamed types in base_type_die. This can happen if this is
21569 an Ada subrange type. Correct solution is emit a subrange type die. */
21570 if ((code == INTEGER_TYPE || code == REAL_TYPE || code == FIXED_POINT_TYPE)
21571 && TREE_TYPE (type) != 0 && TYPE_NAME (type) == 0)
21572 type = TREE_TYPE (type), code = TREE_CODE (type);
21573
21574 if (code == ERROR_MARK
21575 /* Handle a special case. For functions whose return type is void, we
21576 generate *no* type attribute. (Note that no object may have type
21577 `void', so this only applies to function return types). */
21578 || code == VOID_TYPE)
21579 return;
21580
21581 type_die = modified_type_die (type,
21582 cv_quals | TYPE_QUALS (type),
21583 reverse,
21584 context_die);
21585
21586 if (type_die != NULL)
21587 add_AT_die_ref (object_die, DW_AT_type, type_die);
21588 }
21589
21590 /* Given an object die, add the calling convention attribute for the
21591 function call type. */
21592 static void
21593 add_calling_convention_attribute (dw_die_ref subr_die, tree decl)
21594 {
21595 enum dwarf_calling_convention value = DW_CC_normal;
21596
21597 value = ((enum dwarf_calling_convention)
21598 targetm.dwarf_calling_convention (TREE_TYPE (decl)));
21599
21600 if (is_fortran ()
21601 && id_equal (DECL_ASSEMBLER_NAME (decl), "MAIN__"))
21602 {
21603 /* DWARF 2 doesn't provide a way to identify a program's source-level
21604 entry point. DW_AT_calling_convention attributes are only meant
21605 to describe functions' calling conventions. However, lacking a
21606 better way to signal the Fortran main program, we used this for
21607 a long time, following existing custom. Now, DWARF 4 has
21608 DW_AT_main_subprogram, which we add below, but some tools still
21609 rely on the old way, which we thus keep. */
21610 value = DW_CC_program;
21611
21612 if (dwarf_version >= 4 || !dwarf_strict)
21613 add_AT_flag (subr_die, DW_AT_main_subprogram, 1);
21614 }
21615
21616 /* Only add the attribute if the backend requests it, and
21617 is not DW_CC_normal. */
21618 if (value && (value != DW_CC_normal))
21619 add_AT_unsigned (subr_die, DW_AT_calling_convention, value);
21620 }
21621
21622 /* Given a tree pointer to a struct, class, union, or enum type node, return
21623 a pointer to the (string) tag name for the given type, or zero if the type
21624 was declared without a tag. */
21625
21626 static const char *
21627 type_tag (const_tree type)
21628 {
21629 const char *name = 0;
21630
21631 if (TYPE_NAME (type) != 0)
21632 {
21633 tree t = 0;
21634
21635 /* Find the IDENTIFIER_NODE for the type name. */
21636 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
21637 && !TYPE_NAMELESS (type))
21638 t = TYPE_NAME (type);
21639
21640 /* The g++ front end makes the TYPE_NAME of *each* tagged type point to
21641 a TYPE_DECL node, regardless of whether or not a `typedef' was
21642 involved. */
21643 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
21644 && ! DECL_IGNORED_P (TYPE_NAME (type)))
21645 {
21646 /* We want to be extra verbose. Don't call dwarf_name if
21647 DECL_NAME isn't set. The default hook for decl_printable_name
21648 doesn't like that, and in this context it's correct to return
21649 0, instead of "<anonymous>" or the like. */
21650 if (DECL_NAME (TYPE_NAME (type))
21651 && !DECL_NAMELESS (TYPE_NAME (type)))
21652 name = lang_hooks.dwarf_name (TYPE_NAME (type), 2);
21653 }
21654
21655 /* Now get the name as a string, or invent one. */
21656 if (!name && t != 0)
21657 name = IDENTIFIER_POINTER (t);
21658 }
21659
21660 return (name == 0 || *name == '\0') ? 0 : name;
21661 }
21662
21663 /* Return the type associated with a data member, make a special check
21664 for bit field types. */
21665
21666 static inline tree
21667 member_declared_type (const_tree member)
21668 {
21669 return (DECL_BIT_FIELD_TYPE (member)
21670 ? DECL_BIT_FIELD_TYPE (member) : TREE_TYPE (member));
21671 }
21672
21673 /* Get the decl's label, as described by its RTL. This may be different
21674 from the DECL_NAME name used in the source file. */
21675
21676 #if 0
21677 static const char *
21678 decl_start_label (tree decl)
21679 {
21680 rtx x;
21681 const char *fnname;
21682
21683 x = DECL_RTL (decl);
21684 gcc_assert (MEM_P (x));
21685
21686 x = XEXP (x, 0);
21687 gcc_assert (GET_CODE (x) == SYMBOL_REF);
21688
21689 fnname = XSTR (x, 0);
21690 return fnname;
21691 }
21692 #endif
21693 \f
21694 /* For variable-length arrays that have been previously generated, but
21695 may be incomplete due to missing subscript info, fill the subscript
21696 info. Return TRUE if this is one of those cases. */
21697 static bool
21698 fill_variable_array_bounds (tree type)
21699 {
21700 if (TREE_ASM_WRITTEN (type)
21701 && TREE_CODE (type) == ARRAY_TYPE
21702 && variably_modified_type_p (type, NULL))
21703 {
21704 dw_die_ref array_die = lookup_type_die (type);
21705 if (!array_die)
21706 return false;
21707 add_subscript_info (array_die, type, !is_ada ());
21708 return true;
21709 }
21710 return false;
21711 }
21712
21713 /* These routines generate the internal representation of the DIE's for
21714 the compilation unit. Debugging information is collected by walking
21715 the declaration trees passed in from dwarf2out_decl(). */
21716
21717 static void
21718 gen_array_type_die (tree type, dw_die_ref context_die)
21719 {
21720 dw_die_ref array_die;
21721
21722 /* GNU compilers represent multidimensional array types as sequences of one
21723 dimensional array types whose element types are themselves array types.
21724 We sometimes squish that down to a single array_type DIE with multiple
21725 subscripts in the Dwarf debugging info. The draft Dwarf specification
21726 say that we are allowed to do this kind of compression in C, because
21727 there is no difference between an array of arrays and a multidimensional
21728 array. We don't do this for Ada to remain as close as possible to the
21729 actual representation, which is especially important against the language
21730 flexibilty wrt arrays of variable size. */
21731
21732 bool collapse_nested_arrays = !is_ada ();
21733
21734 if (fill_variable_array_bounds (type))
21735 return;
21736
21737 dw_die_ref scope_die = scope_die_for (type, context_die);
21738 tree element_type;
21739
21740 /* Emit DW_TAG_string_type for Fortran character types (with kind 1 only, as
21741 DW_TAG_string_type doesn't have DW_AT_type attribute). */
21742 if (TREE_CODE (type) == ARRAY_TYPE
21743 && TYPE_STRING_FLAG (type)
21744 && is_fortran ()
21745 && TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (char_type_node))
21746 {
21747 HOST_WIDE_INT size;
21748
21749 array_die = new_die (DW_TAG_string_type, scope_die, type);
21750 add_name_attribute (array_die, type_tag (type));
21751 equate_type_number_to_die (type, array_die);
21752 size = int_size_in_bytes (type);
21753 if (size >= 0)
21754 add_AT_unsigned (array_die, DW_AT_byte_size, size);
21755 /* ??? We can't annotate types late, but for LTO we may not
21756 generate a location early either (gfortran.dg/save_6.f90). */
21757 else if (! (early_dwarf && (flag_generate_lto || flag_generate_offload))
21758 && TYPE_DOMAIN (type) != NULL_TREE
21759 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != NULL_TREE)
21760 {
21761 tree szdecl = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
21762 tree rszdecl = szdecl;
21763
21764 size = int_size_in_bytes (TREE_TYPE (szdecl));
21765 if (!DECL_P (szdecl))
21766 {
21767 if (TREE_CODE (szdecl) == INDIRECT_REF
21768 && DECL_P (TREE_OPERAND (szdecl, 0)))
21769 {
21770 rszdecl = TREE_OPERAND (szdecl, 0);
21771 if (int_size_in_bytes (TREE_TYPE (rszdecl))
21772 != DWARF2_ADDR_SIZE)
21773 size = 0;
21774 }
21775 else
21776 size = 0;
21777 }
21778 if (size > 0)
21779 {
21780 dw_loc_list_ref loc
21781 = loc_list_from_tree (rszdecl, szdecl == rszdecl ? 2 : 0,
21782 NULL);
21783 if (loc)
21784 {
21785 add_AT_location_description (array_die, DW_AT_string_length,
21786 loc);
21787 if (size != DWARF2_ADDR_SIZE)
21788 add_AT_unsigned (array_die, dwarf_version >= 5
21789 ? DW_AT_string_length_byte_size
21790 : DW_AT_byte_size, size);
21791 }
21792 }
21793 }
21794 return;
21795 }
21796
21797 array_die = new_die (DW_TAG_array_type, scope_die, type);
21798 add_name_attribute (array_die, type_tag (type));
21799 equate_type_number_to_die (type, array_die);
21800
21801 if (TREE_CODE (type) == VECTOR_TYPE)
21802 add_AT_flag (array_die, DW_AT_GNU_vector, 1);
21803
21804 /* For Fortran multidimensional arrays use DW_ORD_col_major ordering. */
21805 if (is_fortran ()
21806 && TREE_CODE (type) == ARRAY_TYPE
21807 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE
21808 && !TYPE_STRING_FLAG (TREE_TYPE (type)))
21809 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21810
21811 #if 0
21812 /* We default the array ordering. Debuggers will probably do the right
21813 things even if DW_AT_ordering is not present. It's not even an issue
21814 until we start to get into multidimensional arrays anyway. If a debugger
21815 is ever caught doing the Wrong Thing for multi-dimensional arrays,
21816 then we'll have to put the DW_AT_ordering attribute back in. (But if
21817 and when we find out that we need to put these in, we will only do so
21818 for multidimensional arrays. */
21819 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21820 #endif
21821
21822 if (TREE_CODE (type) == VECTOR_TYPE)
21823 {
21824 /* For VECTOR_TYPEs we use an array die with appropriate bounds. */
21825 dw_die_ref subrange_die = new_die (DW_TAG_subrange_type, array_die, NULL);
21826 add_bound_info (subrange_die, DW_AT_lower_bound, size_zero_node, NULL);
21827 add_bound_info (subrange_die, DW_AT_upper_bound,
21828 size_int (TYPE_VECTOR_SUBPARTS (type) - 1), NULL);
21829 }
21830 else
21831 add_subscript_info (array_die, type, collapse_nested_arrays);
21832
21833 /* Add representation of the type of the elements of this array type and
21834 emit the corresponding DIE if we haven't done it already. */
21835 element_type = TREE_TYPE (type);
21836 if (collapse_nested_arrays)
21837 while (TREE_CODE (element_type) == ARRAY_TYPE)
21838 {
21839 if (TYPE_STRING_FLAG (element_type) && is_fortran ())
21840 break;
21841 element_type = TREE_TYPE (element_type);
21842 }
21843
21844 add_type_attribute (array_die, element_type, TYPE_UNQUALIFIED,
21845 TREE_CODE (type) == ARRAY_TYPE
21846 && TYPE_REVERSE_STORAGE_ORDER (type),
21847 context_die);
21848
21849 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21850 if (TYPE_ARTIFICIAL (type))
21851 add_AT_flag (array_die, DW_AT_artificial, 1);
21852
21853 if (get_AT (array_die, DW_AT_name))
21854 add_pubtype (type, array_die);
21855
21856 add_alignment_attribute (array_die, type);
21857 }
21858
21859 /* This routine generates DIE for array with hidden descriptor, details
21860 are filled into *info by a langhook. */
21861
21862 static void
21863 gen_descr_array_type_die (tree type, struct array_descr_info *info,
21864 dw_die_ref context_die)
21865 {
21866 const dw_die_ref scope_die = scope_die_for (type, context_die);
21867 const dw_die_ref array_die = new_die (DW_TAG_array_type, scope_die, type);
21868 struct loc_descr_context context = { type, info->base_decl, NULL,
21869 false, false };
21870 enum dwarf_tag subrange_tag = DW_TAG_subrange_type;
21871 int dim;
21872
21873 add_name_attribute (array_die, type_tag (type));
21874 equate_type_number_to_die (type, array_die);
21875
21876 if (info->ndimensions > 1)
21877 switch (info->ordering)
21878 {
21879 case array_descr_ordering_row_major:
21880 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21881 break;
21882 case array_descr_ordering_column_major:
21883 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21884 break;
21885 default:
21886 break;
21887 }
21888
21889 if (dwarf_version >= 3 || !dwarf_strict)
21890 {
21891 if (info->data_location)
21892 add_scalar_info (array_die, DW_AT_data_location, info->data_location,
21893 dw_scalar_form_exprloc, &context);
21894 if (info->associated)
21895 add_scalar_info (array_die, DW_AT_associated, info->associated,
21896 dw_scalar_form_constant
21897 | dw_scalar_form_exprloc
21898 | dw_scalar_form_reference, &context);
21899 if (info->allocated)
21900 add_scalar_info (array_die, DW_AT_allocated, info->allocated,
21901 dw_scalar_form_constant
21902 | dw_scalar_form_exprloc
21903 | dw_scalar_form_reference, &context);
21904 if (info->stride)
21905 {
21906 const enum dwarf_attribute attr
21907 = (info->stride_in_bits) ? DW_AT_bit_stride : DW_AT_byte_stride;
21908 const int forms
21909 = (info->stride_in_bits)
21910 ? dw_scalar_form_constant
21911 : (dw_scalar_form_constant
21912 | dw_scalar_form_exprloc
21913 | dw_scalar_form_reference);
21914
21915 add_scalar_info (array_die, attr, info->stride, forms, &context);
21916 }
21917 }
21918 if (dwarf_version >= 5)
21919 {
21920 if (info->rank)
21921 {
21922 add_scalar_info (array_die, DW_AT_rank, info->rank,
21923 dw_scalar_form_constant
21924 | dw_scalar_form_exprloc, &context);
21925 subrange_tag = DW_TAG_generic_subrange;
21926 context.placeholder_arg = true;
21927 }
21928 }
21929
21930 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21931
21932 for (dim = 0; dim < info->ndimensions; dim++)
21933 {
21934 dw_die_ref subrange_die = new_die (subrange_tag, array_die, NULL);
21935
21936 if (info->dimen[dim].bounds_type)
21937 add_type_attribute (subrange_die,
21938 info->dimen[dim].bounds_type, TYPE_UNQUALIFIED,
21939 false, context_die);
21940 if (info->dimen[dim].lower_bound)
21941 add_bound_info (subrange_die, DW_AT_lower_bound,
21942 info->dimen[dim].lower_bound, &context);
21943 if (info->dimen[dim].upper_bound)
21944 add_bound_info (subrange_die, DW_AT_upper_bound,
21945 info->dimen[dim].upper_bound, &context);
21946 if ((dwarf_version >= 3 || !dwarf_strict) && info->dimen[dim].stride)
21947 add_scalar_info (subrange_die, DW_AT_byte_stride,
21948 info->dimen[dim].stride,
21949 dw_scalar_form_constant
21950 | dw_scalar_form_exprloc
21951 | dw_scalar_form_reference,
21952 &context);
21953 }
21954
21955 gen_type_die (info->element_type, context_die);
21956 add_type_attribute (array_die, info->element_type, TYPE_UNQUALIFIED,
21957 TREE_CODE (type) == ARRAY_TYPE
21958 && TYPE_REVERSE_STORAGE_ORDER (type),
21959 context_die);
21960
21961 if (get_AT (array_die, DW_AT_name))
21962 add_pubtype (type, array_die);
21963
21964 add_alignment_attribute (array_die, type);
21965 }
21966
21967 #if 0
21968 static void
21969 gen_entry_point_die (tree decl, dw_die_ref context_die)
21970 {
21971 tree origin = decl_ultimate_origin (decl);
21972 dw_die_ref decl_die = new_die (DW_TAG_entry_point, context_die, decl);
21973
21974 if (origin != NULL)
21975 add_abstract_origin_attribute (decl_die, origin);
21976 else
21977 {
21978 add_name_and_src_coords_attributes (decl_die, decl);
21979 add_type_attribute (decl_die, TREE_TYPE (TREE_TYPE (decl)),
21980 TYPE_UNQUALIFIED, false, context_die);
21981 }
21982
21983 if (DECL_ABSTRACT_P (decl))
21984 equate_decl_number_to_die (decl, decl_die);
21985 else
21986 add_AT_lbl_id (decl_die, DW_AT_low_pc, decl_start_label (decl));
21987 }
21988 #endif
21989
21990 /* Walk through the list of incomplete types again, trying once more to
21991 emit full debugging info for them. */
21992
21993 static void
21994 retry_incomplete_types (void)
21995 {
21996 set_early_dwarf s;
21997 int i;
21998
21999 for (i = vec_safe_length (incomplete_types) - 1; i >= 0; i--)
22000 if (should_emit_struct_debug ((*incomplete_types)[i], DINFO_USAGE_DIR_USE))
22001 gen_type_die ((*incomplete_types)[i], comp_unit_die ());
22002 vec_safe_truncate (incomplete_types, 0);
22003 }
22004
22005 /* Determine what tag to use for a record type. */
22006
22007 static enum dwarf_tag
22008 record_type_tag (tree type)
22009 {
22010 if (! lang_hooks.types.classify_record)
22011 return DW_TAG_structure_type;
22012
22013 switch (lang_hooks.types.classify_record (type))
22014 {
22015 case RECORD_IS_STRUCT:
22016 return DW_TAG_structure_type;
22017
22018 case RECORD_IS_CLASS:
22019 return DW_TAG_class_type;
22020
22021 case RECORD_IS_INTERFACE:
22022 if (dwarf_version >= 3 || !dwarf_strict)
22023 return DW_TAG_interface_type;
22024 return DW_TAG_structure_type;
22025
22026 default:
22027 gcc_unreachable ();
22028 }
22029 }
22030
22031 /* Generate a DIE to represent an enumeration type. Note that these DIEs
22032 include all of the information about the enumeration values also. Each
22033 enumerated type name/value is listed as a child of the enumerated type
22034 DIE. */
22035
22036 static dw_die_ref
22037 gen_enumeration_type_die (tree type, dw_die_ref context_die)
22038 {
22039 dw_die_ref type_die = lookup_type_die (type);
22040 dw_die_ref orig_type_die = type_die;
22041
22042 if (type_die == NULL)
22043 {
22044 type_die = new_die (DW_TAG_enumeration_type,
22045 scope_die_for (type, context_die), type);
22046 equate_type_number_to_die (type, type_die);
22047 add_name_attribute (type_die, type_tag (type));
22048 if ((dwarf_version >= 4 || !dwarf_strict)
22049 && ENUM_IS_SCOPED (type))
22050 add_AT_flag (type_die, DW_AT_enum_class, 1);
22051 if (ENUM_IS_OPAQUE (type) && TYPE_SIZE (type))
22052 add_AT_flag (type_die, DW_AT_declaration, 1);
22053 if (!dwarf_strict)
22054 add_AT_unsigned (type_die, DW_AT_encoding,
22055 TYPE_UNSIGNED (type)
22056 ? DW_ATE_unsigned
22057 : DW_ATE_signed);
22058 }
22059 else if (! TYPE_SIZE (type) || ENUM_IS_OPAQUE (type))
22060 return type_die;
22061 else
22062 remove_AT (type_die, DW_AT_declaration);
22063
22064 /* Handle a GNU C/C++ extension, i.e. incomplete enum types. If the
22065 given enum type is incomplete, do not generate the DW_AT_byte_size
22066 attribute or the DW_AT_element_list attribute. */
22067 if (TYPE_SIZE (type))
22068 {
22069 tree link;
22070
22071 if (!ENUM_IS_OPAQUE (type))
22072 TREE_ASM_WRITTEN (type) = 1;
22073 if (!orig_type_die || !get_AT (type_die, DW_AT_byte_size))
22074 add_byte_size_attribute (type_die, type);
22075 if (!orig_type_die || !get_AT (type_die, DW_AT_alignment))
22076 add_alignment_attribute (type_die, type);
22077 if ((dwarf_version >= 3 || !dwarf_strict)
22078 && (!orig_type_die || !get_AT (type_die, DW_AT_type)))
22079 {
22080 tree underlying = lang_hooks.types.enum_underlying_base_type (type);
22081 add_type_attribute (type_die, underlying, TYPE_UNQUALIFIED, false,
22082 context_die);
22083 }
22084 if (TYPE_STUB_DECL (type) != NULL_TREE)
22085 {
22086 if (!orig_type_die || !get_AT (type_die, DW_AT_decl_file))
22087 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
22088 if (!orig_type_die || !get_AT (type_die, DW_AT_accessibility))
22089 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
22090 }
22091
22092 /* If the first reference to this type was as the return type of an
22093 inline function, then it may not have a parent. Fix this now. */
22094 if (type_die->die_parent == NULL)
22095 add_child_die (scope_die_for (type, context_die), type_die);
22096
22097 for (link = TYPE_VALUES (type);
22098 link != NULL; link = TREE_CHAIN (link))
22099 {
22100 dw_die_ref enum_die = new_die (DW_TAG_enumerator, type_die, link);
22101 tree value = TREE_VALUE (link);
22102
22103 gcc_assert (!ENUM_IS_OPAQUE (type));
22104 add_name_attribute (enum_die,
22105 IDENTIFIER_POINTER (TREE_PURPOSE (link)));
22106
22107 if (TREE_CODE (value) == CONST_DECL)
22108 value = DECL_INITIAL (value);
22109
22110 if (simple_type_size_in_bits (TREE_TYPE (value))
22111 <= HOST_BITS_PER_WIDE_INT || tree_fits_shwi_p (value))
22112 {
22113 /* For constant forms created by add_AT_unsigned DWARF
22114 consumers (GDB, elfutils, etc.) always zero extend
22115 the value. Only when the actual value is negative
22116 do we need to use add_AT_int to generate a constant
22117 form that can represent negative values. */
22118 HOST_WIDE_INT val = TREE_INT_CST_LOW (value);
22119 if (TYPE_UNSIGNED (TREE_TYPE (value)) || val >= 0)
22120 add_AT_unsigned (enum_die, DW_AT_const_value,
22121 (unsigned HOST_WIDE_INT) val);
22122 else
22123 add_AT_int (enum_die, DW_AT_const_value, val);
22124 }
22125 else
22126 /* Enumeration constants may be wider than HOST_WIDE_INT. Handle
22127 that here. TODO: This should be re-worked to use correct
22128 signed/unsigned double tags for all cases. */
22129 add_AT_wide (enum_die, DW_AT_const_value, wi::to_wide (value));
22130 }
22131
22132 add_gnat_descriptive_type_attribute (type_die, type, context_die);
22133 if (TYPE_ARTIFICIAL (type)
22134 && (!orig_type_die || !get_AT (type_die, DW_AT_artificial)))
22135 add_AT_flag (type_die, DW_AT_artificial, 1);
22136 }
22137 else
22138 add_AT_flag (type_die, DW_AT_declaration, 1);
22139
22140 add_pubtype (type, type_die);
22141
22142 return type_die;
22143 }
22144
22145 /* Generate a DIE to represent either a real live formal parameter decl or to
22146 represent just the type of some formal parameter position in some function
22147 type.
22148
22149 Note that this routine is a bit unusual because its argument may be a
22150 ..._DECL node (i.e. either a PARM_DECL or perhaps a VAR_DECL which
22151 represents an inlining of some PARM_DECL) or else some sort of a ..._TYPE
22152 node. If it's the former then this function is being called to output a
22153 DIE to represent a formal parameter object (or some inlining thereof). If
22154 it's the latter, then this function is only being called to output a
22155 DW_TAG_formal_parameter DIE to stand as a placeholder for some formal
22156 argument type of some subprogram type.
22157 If EMIT_NAME_P is true, name and source coordinate attributes
22158 are emitted. */
22159
22160 static dw_die_ref
22161 gen_formal_parameter_die (tree node, tree origin, bool emit_name_p,
22162 dw_die_ref context_die)
22163 {
22164 tree node_or_origin = node ? node : origin;
22165 tree ultimate_origin;
22166 dw_die_ref parm_die = NULL;
22167
22168 if (DECL_P (node_or_origin))
22169 {
22170 parm_die = lookup_decl_die (node);
22171
22172 /* If the contexts differ, we may not be talking about the same
22173 thing.
22174 ??? When in LTO the DIE parent is the "abstract" copy and the
22175 context_die is the specification "copy". */
22176 if (parm_die
22177 && parm_die->die_parent != context_die
22178 && (parm_die->die_parent->die_tag != DW_TAG_GNU_formal_parameter_pack
22179 || parm_die->die_parent->die_parent != context_die)
22180 && !in_lto_p)
22181 {
22182 gcc_assert (!DECL_ABSTRACT_P (node));
22183 /* This can happen when creating a concrete instance, in
22184 which case we need to create a new DIE that will get
22185 annotated with DW_AT_abstract_origin. */
22186 parm_die = NULL;
22187 }
22188
22189 if (parm_die && parm_die->die_parent == NULL)
22190 {
22191 /* Check that parm_die already has the right attributes that
22192 we would have added below. If any attributes are
22193 missing, fall through to add them. */
22194 if (! DECL_ABSTRACT_P (node_or_origin)
22195 && !get_AT (parm_die, DW_AT_location)
22196 && !get_AT (parm_die, DW_AT_const_value))
22197 /* We are missing location info, and are about to add it. */
22198 ;
22199 else
22200 {
22201 add_child_die (context_die, parm_die);
22202 return parm_die;
22203 }
22204 }
22205 }
22206
22207 /* If we have a previously generated DIE, use it, unless this is an
22208 concrete instance (origin != NULL), in which case we need a new
22209 DIE with a corresponding DW_AT_abstract_origin. */
22210 bool reusing_die;
22211 if (parm_die && origin == NULL)
22212 reusing_die = true;
22213 else
22214 {
22215 parm_die = new_die (DW_TAG_formal_parameter, context_die, node);
22216 reusing_die = false;
22217 }
22218
22219 switch (TREE_CODE_CLASS (TREE_CODE (node_or_origin)))
22220 {
22221 case tcc_declaration:
22222 ultimate_origin = decl_ultimate_origin (node_or_origin);
22223 if (node || ultimate_origin)
22224 origin = ultimate_origin;
22225
22226 if (reusing_die)
22227 goto add_location;
22228
22229 if (origin != NULL)
22230 add_abstract_origin_attribute (parm_die, origin);
22231 else if (emit_name_p)
22232 add_name_and_src_coords_attributes (parm_die, node);
22233 if (origin == NULL
22234 || (! DECL_ABSTRACT_P (node_or_origin)
22235 && variably_modified_type_p (TREE_TYPE (node_or_origin),
22236 decl_function_context
22237 (node_or_origin))))
22238 {
22239 tree type = TREE_TYPE (node_or_origin);
22240 if (decl_by_reference_p (node_or_origin))
22241 add_type_attribute (parm_die, TREE_TYPE (type),
22242 TYPE_UNQUALIFIED,
22243 false, context_die);
22244 else
22245 add_type_attribute (parm_die, type,
22246 decl_quals (node_or_origin),
22247 false, context_die);
22248 }
22249 if (origin == NULL && DECL_ARTIFICIAL (node))
22250 add_AT_flag (parm_die, DW_AT_artificial, 1);
22251 add_location:
22252 if (node && node != origin)
22253 equate_decl_number_to_die (node, parm_die);
22254 if (! DECL_ABSTRACT_P (node_or_origin))
22255 add_location_or_const_value_attribute (parm_die, node_or_origin,
22256 node == NULL);
22257
22258 break;
22259
22260 case tcc_type:
22261 /* We were called with some kind of a ..._TYPE node. */
22262 add_type_attribute (parm_die, node_or_origin, TYPE_UNQUALIFIED, false,
22263 context_die);
22264 break;
22265
22266 default:
22267 gcc_unreachable ();
22268 }
22269
22270 return parm_die;
22271 }
22272
22273 /* Generate and return a DW_TAG_GNU_formal_parameter_pack. Also generate
22274 children DW_TAG_formal_parameter DIEs representing the arguments of the
22275 parameter pack.
22276
22277 PARM_PACK must be a function parameter pack.
22278 PACK_ARG is the first argument of the parameter pack. Its TREE_CHAIN
22279 must point to the subsequent arguments of the function PACK_ARG belongs to.
22280 SUBR_DIE is the DIE of the function PACK_ARG belongs to.
22281 If NEXT_ARG is non NULL, *NEXT_ARG is set to the function argument
22282 following the last one for which a DIE was generated. */
22283
22284 static dw_die_ref
22285 gen_formal_parameter_pack_die (tree parm_pack,
22286 tree pack_arg,
22287 dw_die_ref subr_die,
22288 tree *next_arg)
22289 {
22290 tree arg;
22291 dw_die_ref parm_pack_die;
22292
22293 gcc_assert (parm_pack
22294 && lang_hooks.function_parameter_pack_p (parm_pack)
22295 && subr_die);
22296
22297 parm_pack_die = new_die (DW_TAG_GNU_formal_parameter_pack, subr_die, parm_pack);
22298 add_src_coords_attributes (parm_pack_die, parm_pack);
22299
22300 for (arg = pack_arg; arg; arg = DECL_CHAIN (arg))
22301 {
22302 if (! lang_hooks.decls.function_parm_expanded_from_pack_p (arg,
22303 parm_pack))
22304 break;
22305 gen_formal_parameter_die (arg, NULL,
22306 false /* Don't emit name attribute. */,
22307 parm_pack_die);
22308 }
22309 if (next_arg)
22310 *next_arg = arg;
22311 return parm_pack_die;
22312 }
22313
22314 /* Generate a special type of DIE used as a stand-in for a trailing ellipsis
22315 at the end of an (ANSI prototyped) formal parameters list. */
22316
22317 static void
22318 gen_unspecified_parameters_die (tree decl_or_type, dw_die_ref context_die)
22319 {
22320 new_die (DW_TAG_unspecified_parameters, context_die, decl_or_type);
22321 }
22322
22323 /* Generate a list of nameless DW_TAG_formal_parameter DIEs (and perhaps a
22324 DW_TAG_unspecified_parameters DIE) to represent the types of the formal
22325 parameters as specified in some function type specification (except for
22326 those which appear as part of a function *definition*). */
22327
22328 static void
22329 gen_formal_types_die (tree function_or_method_type, dw_die_ref context_die)
22330 {
22331 tree link;
22332 tree formal_type = NULL;
22333 tree first_parm_type;
22334 tree arg;
22335
22336 if (TREE_CODE (function_or_method_type) == FUNCTION_DECL)
22337 {
22338 arg = DECL_ARGUMENTS (function_or_method_type);
22339 function_or_method_type = TREE_TYPE (function_or_method_type);
22340 }
22341 else
22342 arg = NULL_TREE;
22343
22344 first_parm_type = TYPE_ARG_TYPES (function_or_method_type);
22345
22346 /* Make our first pass over the list of formal parameter types and output a
22347 DW_TAG_formal_parameter DIE for each one. */
22348 for (link = first_parm_type; link; )
22349 {
22350 dw_die_ref parm_die;
22351
22352 formal_type = TREE_VALUE (link);
22353 if (formal_type == void_type_node)
22354 break;
22355
22356 /* Output a (nameless) DIE to represent the formal parameter itself. */
22357 parm_die = gen_formal_parameter_die (formal_type, NULL,
22358 true /* Emit name attribute. */,
22359 context_die);
22360 if (TREE_CODE (function_or_method_type) == METHOD_TYPE
22361 && link == first_parm_type)
22362 {
22363 add_AT_flag (parm_die, DW_AT_artificial, 1);
22364 if (dwarf_version >= 3 || !dwarf_strict)
22365 add_AT_die_ref (context_die, DW_AT_object_pointer, parm_die);
22366 }
22367 else if (arg && DECL_ARTIFICIAL (arg))
22368 add_AT_flag (parm_die, DW_AT_artificial, 1);
22369
22370 link = TREE_CHAIN (link);
22371 if (arg)
22372 arg = DECL_CHAIN (arg);
22373 }
22374
22375 /* If this function type has an ellipsis, add a
22376 DW_TAG_unspecified_parameters DIE to the end of the parameter list. */
22377 if (formal_type != void_type_node)
22378 gen_unspecified_parameters_die (function_or_method_type, context_die);
22379
22380 /* Make our second (and final) pass over the list of formal parameter types
22381 and output DIEs to represent those types (as necessary). */
22382 for (link = TYPE_ARG_TYPES (function_or_method_type);
22383 link && TREE_VALUE (link);
22384 link = TREE_CHAIN (link))
22385 gen_type_die (TREE_VALUE (link), context_die);
22386 }
22387
22388 /* We want to generate the DIE for TYPE so that we can generate the
22389 die for MEMBER, which has been defined; we will need to refer back
22390 to the member declaration nested within TYPE. If we're trying to
22391 generate minimal debug info for TYPE, processing TYPE won't do the
22392 trick; we need to attach the member declaration by hand. */
22393
22394 static void
22395 gen_type_die_for_member (tree type, tree member, dw_die_ref context_die)
22396 {
22397 gen_type_die (type, context_die);
22398
22399 /* If we're trying to avoid duplicate debug info, we may not have
22400 emitted the member decl for this function. Emit it now. */
22401 if (TYPE_STUB_DECL (type)
22402 && TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))
22403 && ! lookup_decl_die (member))
22404 {
22405 dw_die_ref type_die;
22406 gcc_assert (!decl_ultimate_origin (member));
22407
22408 type_die = lookup_type_die_strip_naming_typedef (type);
22409 if (TREE_CODE (member) == FUNCTION_DECL)
22410 gen_subprogram_die (member, type_die);
22411 else if (TREE_CODE (member) == FIELD_DECL)
22412 {
22413 /* Ignore the nameless fields that are used to skip bits but handle
22414 C++ anonymous unions and structs. */
22415 if (DECL_NAME (member) != NULL_TREE
22416 || TREE_CODE (TREE_TYPE (member)) == UNION_TYPE
22417 || TREE_CODE (TREE_TYPE (member)) == RECORD_TYPE)
22418 {
22419 struct vlr_context vlr_ctx = {
22420 DECL_CONTEXT (member), /* struct_type */
22421 NULL_TREE /* variant_part_offset */
22422 };
22423 gen_type_die (member_declared_type (member), type_die);
22424 gen_field_die (member, &vlr_ctx, type_die);
22425 }
22426 }
22427 else
22428 gen_variable_die (member, NULL_TREE, type_die);
22429 }
22430 }
22431 \f
22432 /* Forward declare these functions, because they are mutually recursive
22433 with their set_block_* pairing functions. */
22434 static void set_decl_origin_self (tree);
22435
22436 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
22437 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
22438 that it points to the node itself, thus indicating that the node is its
22439 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
22440 the given node is NULL, recursively descend the decl/block tree which
22441 it is the root of, and for each other ..._DECL or BLOCK node contained
22442 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
22443 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
22444 values to point to themselves. */
22445
22446 static void
22447 set_block_origin_self (tree stmt)
22448 {
22449 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
22450 {
22451 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
22452
22453 {
22454 tree local_decl;
22455
22456 for (local_decl = BLOCK_VARS (stmt);
22457 local_decl != NULL_TREE;
22458 local_decl = DECL_CHAIN (local_decl))
22459 /* Do not recurse on nested functions since the inlining status
22460 of parent and child can be different as per the DWARF spec. */
22461 if (TREE_CODE (local_decl) != FUNCTION_DECL
22462 && !DECL_EXTERNAL (local_decl))
22463 set_decl_origin_self (local_decl);
22464 }
22465
22466 {
22467 tree subblock;
22468
22469 for (subblock = BLOCK_SUBBLOCKS (stmt);
22470 subblock != NULL_TREE;
22471 subblock = BLOCK_CHAIN (subblock))
22472 set_block_origin_self (subblock); /* Recurse. */
22473 }
22474 }
22475 }
22476
22477 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
22478 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
22479 node to so that it points to the node itself, thus indicating that the
22480 node represents its own (abstract) origin. Additionally, if the
22481 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
22482 the decl/block tree of which the given node is the root of, and for
22483 each other ..._DECL or BLOCK node contained therein whose
22484 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
22485 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
22486 point to themselves. */
22487
22488 static void
22489 set_decl_origin_self (tree decl)
22490 {
22491 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
22492 {
22493 DECL_ABSTRACT_ORIGIN (decl) = decl;
22494 if (TREE_CODE (decl) == FUNCTION_DECL)
22495 {
22496 tree arg;
22497
22498 for (arg = DECL_ARGUMENTS (decl); arg; arg = DECL_CHAIN (arg))
22499 DECL_ABSTRACT_ORIGIN (arg) = arg;
22500 if (DECL_INITIAL (decl) != NULL_TREE
22501 && DECL_INITIAL (decl) != error_mark_node)
22502 set_block_origin_self (DECL_INITIAL (decl));
22503 }
22504 }
22505 }
22506 \f
22507 /* Mark the early DIE for DECL as the abstract instance. */
22508
22509 static void
22510 dwarf2out_abstract_function (tree decl)
22511 {
22512 dw_die_ref old_die;
22513
22514 /* Make sure we have the actual abstract inline, not a clone. */
22515 decl = DECL_ORIGIN (decl);
22516
22517 if (DECL_IGNORED_P (decl))
22518 return;
22519
22520 /* In LTO we're all set. We already created abstract instances
22521 early and we want to avoid creating a concrete instance of that
22522 if we don't output it. */
22523 if (in_lto_p)
22524 return;
22525
22526 old_die = lookup_decl_die (decl);
22527 gcc_assert (old_die != NULL);
22528 if (get_AT (old_die, DW_AT_inline))
22529 /* We've already generated the abstract instance. */
22530 return;
22531
22532 /* Go ahead and put DW_AT_inline on the DIE. */
22533 if (DECL_DECLARED_INLINE_P (decl))
22534 {
22535 if (cgraph_function_possibly_inlined_p (decl))
22536 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_inlined);
22537 else
22538 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_not_inlined);
22539 }
22540 else
22541 {
22542 if (cgraph_function_possibly_inlined_p (decl))
22543 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_inlined);
22544 else
22545 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_not_inlined);
22546 }
22547
22548 if (DECL_DECLARED_INLINE_P (decl)
22549 && lookup_attribute ("artificial", DECL_ATTRIBUTES (decl)))
22550 add_AT_flag (old_die, DW_AT_artificial, 1);
22551
22552 set_decl_origin_self (decl);
22553 }
22554
22555 /* Helper function of premark_used_types() which gets called through
22556 htab_traverse.
22557
22558 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22559 marked as unused by prune_unused_types. */
22560
22561 bool
22562 premark_used_types_helper (tree const &type, void *)
22563 {
22564 dw_die_ref die;
22565
22566 die = lookup_type_die (type);
22567 if (die != NULL)
22568 die->die_perennial_p = 1;
22569 return true;
22570 }
22571
22572 /* Helper function of premark_types_used_by_global_vars which gets called
22573 through htab_traverse.
22574
22575 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22576 marked as unused by prune_unused_types. The DIE of the type is marked
22577 only if the global variable using the type will actually be emitted. */
22578
22579 int
22580 premark_types_used_by_global_vars_helper (types_used_by_vars_entry **slot,
22581 void *)
22582 {
22583 struct types_used_by_vars_entry *entry;
22584 dw_die_ref die;
22585
22586 entry = (struct types_used_by_vars_entry *) *slot;
22587 gcc_assert (entry->type != NULL
22588 && entry->var_decl != NULL);
22589 die = lookup_type_die (entry->type);
22590 if (die)
22591 {
22592 /* Ask cgraph if the global variable really is to be emitted.
22593 If yes, then we'll keep the DIE of ENTRY->TYPE. */
22594 varpool_node *node = varpool_node::get (entry->var_decl);
22595 if (node && node->definition)
22596 {
22597 die->die_perennial_p = 1;
22598 /* Keep the parent DIEs as well. */
22599 while ((die = die->die_parent) && die->die_perennial_p == 0)
22600 die->die_perennial_p = 1;
22601 }
22602 }
22603 return 1;
22604 }
22605
22606 /* Mark all members of used_types_hash as perennial. */
22607
22608 static void
22609 premark_used_types (struct function *fun)
22610 {
22611 if (fun && fun->used_types_hash)
22612 fun->used_types_hash->traverse<void *, premark_used_types_helper> (NULL);
22613 }
22614
22615 /* Mark all members of types_used_by_vars_entry as perennial. */
22616
22617 static void
22618 premark_types_used_by_global_vars (void)
22619 {
22620 if (types_used_by_vars_hash)
22621 types_used_by_vars_hash
22622 ->traverse<void *, premark_types_used_by_global_vars_helper> (NULL);
22623 }
22624
22625 /* Mark all variables used by the symtab as perennial. */
22626
22627 static void
22628 premark_used_variables (void)
22629 {
22630 /* Mark DIEs in the symtab as used. */
22631 varpool_node *var;
22632 FOR_EACH_VARIABLE (var)
22633 {
22634 dw_die_ref die = lookup_decl_die (var->decl);
22635 if (die)
22636 die->die_perennial_p = 1;
22637 }
22638 }
22639
22640 /* Generate a DW_TAG_call_site DIE in function DECL under SUBR_DIE
22641 for CA_LOC call arg loc node. */
22642
22643 static dw_die_ref
22644 gen_call_site_die (tree decl, dw_die_ref subr_die,
22645 struct call_arg_loc_node *ca_loc)
22646 {
22647 dw_die_ref stmt_die = NULL, die;
22648 tree block = ca_loc->block;
22649
22650 while (block
22651 && block != DECL_INITIAL (decl)
22652 && TREE_CODE (block) == BLOCK)
22653 {
22654 stmt_die = lookup_block_die (block);
22655 if (stmt_die)
22656 break;
22657 block = BLOCK_SUPERCONTEXT (block);
22658 }
22659 if (stmt_die == NULL)
22660 stmt_die = subr_die;
22661 die = new_die (dwarf_TAG (DW_TAG_call_site), stmt_die, NULL_TREE);
22662 add_AT_lbl_id (die, dwarf_AT (DW_AT_call_return_pc), ca_loc->label);
22663 if (ca_loc->tail_call_p)
22664 add_AT_flag (die, dwarf_AT (DW_AT_call_tail_call), 1);
22665 if (ca_loc->symbol_ref)
22666 {
22667 dw_die_ref tdie = lookup_decl_die (SYMBOL_REF_DECL (ca_loc->symbol_ref));
22668 if (tdie)
22669 add_AT_die_ref (die, dwarf_AT (DW_AT_call_origin), tdie);
22670 else
22671 add_AT_addr (die, dwarf_AT (DW_AT_call_origin), ca_loc->symbol_ref,
22672 false);
22673 }
22674 return die;
22675 }
22676
22677 /* Generate a DIE to represent a declared function (either file-scope or
22678 block-local). */
22679
22680 static void
22681 gen_subprogram_die (tree decl, dw_die_ref context_die)
22682 {
22683 tree origin = decl_ultimate_origin (decl);
22684 dw_die_ref subr_die;
22685 dw_die_ref old_die = lookup_decl_die (decl);
22686
22687 /* This function gets called multiple times for different stages of
22688 the debug process. For example, for func() in this code:
22689
22690 namespace S
22691 {
22692 void func() { ... }
22693 }
22694
22695 ...we get called 4 times. Twice in early debug and twice in
22696 late debug:
22697
22698 Early debug
22699 -----------
22700
22701 1. Once while generating func() within the namespace. This is
22702 the declaration. The declaration bit below is set, as the
22703 context is the namespace.
22704
22705 A new DIE will be generated with DW_AT_declaration set.
22706
22707 2. Once for func() itself. This is the specification. The
22708 declaration bit below is clear as the context is the CU.
22709
22710 We will use the cached DIE from (1) to create a new DIE with
22711 DW_AT_specification pointing to the declaration in (1).
22712
22713 Late debug via rest_of_handle_final()
22714 -------------------------------------
22715
22716 3. Once generating func() within the namespace. This is also the
22717 declaration, as in (1), but this time we will early exit below
22718 as we have a cached DIE and a declaration needs no additional
22719 annotations (no locations), as the source declaration line
22720 info is enough.
22721
22722 4. Once for func() itself. As in (2), this is the specification,
22723 but this time we will re-use the cached DIE, and just annotate
22724 it with the location information that should now be available.
22725
22726 For something without namespaces, but with abstract instances, we
22727 are also called a multiple times:
22728
22729 class Base
22730 {
22731 public:
22732 Base (); // constructor declaration (1)
22733 };
22734
22735 Base::Base () { } // constructor specification (2)
22736
22737 Early debug
22738 -----------
22739
22740 1. Once for the Base() constructor by virtue of it being a
22741 member of the Base class. This is done via
22742 rest_of_type_compilation.
22743
22744 This is a declaration, so a new DIE will be created with
22745 DW_AT_declaration.
22746
22747 2. Once for the Base() constructor definition, but this time
22748 while generating the abstract instance of the base
22749 constructor (__base_ctor) which is being generated via early
22750 debug of reachable functions.
22751
22752 Even though we have a cached version of the declaration (1),
22753 we will create a DW_AT_specification of the declaration DIE
22754 in (1).
22755
22756 3. Once for the __base_ctor itself, but this time, we generate
22757 an DW_AT_abstract_origin version of the DW_AT_specification in
22758 (2).
22759
22760 Late debug via rest_of_handle_final
22761 -----------------------------------
22762
22763 4. One final time for the __base_ctor (which will have a cached
22764 DIE with DW_AT_abstract_origin created in (3). This time,
22765 we will just annotate the location information now
22766 available.
22767 */
22768 int declaration = (current_function_decl != decl
22769 || class_or_namespace_scope_p (context_die));
22770
22771 /* A declaration that has been previously dumped needs no
22772 additional information. */
22773 if (old_die && declaration)
22774 return;
22775
22776 /* Now that the C++ front end lazily declares artificial member fns, we
22777 might need to retrofit the declaration into its class. */
22778 if (!declaration && !origin && !old_die
22779 && DECL_CONTEXT (decl) && TYPE_P (DECL_CONTEXT (decl))
22780 && !class_or_namespace_scope_p (context_die)
22781 && debug_info_level > DINFO_LEVEL_TERSE)
22782 old_die = force_decl_die (decl);
22783
22784 /* A concrete instance, tag a new DIE with DW_AT_abstract_origin. */
22785 if (origin != NULL)
22786 {
22787 gcc_assert (!declaration || local_scope_p (context_die));
22788
22789 /* Fixup die_parent for the abstract instance of a nested
22790 inline function. */
22791 if (old_die && old_die->die_parent == NULL)
22792 add_child_die (context_die, old_die);
22793
22794 if (old_die && get_AT_ref (old_die, DW_AT_abstract_origin))
22795 {
22796 /* If we have a DW_AT_abstract_origin we have a working
22797 cached version. */
22798 subr_die = old_die;
22799 }
22800 else
22801 {
22802 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22803 add_abstract_origin_attribute (subr_die, origin);
22804 /* This is where the actual code for a cloned function is.
22805 Let's emit linkage name attribute for it. This helps
22806 debuggers to e.g, set breakpoints into
22807 constructors/destructors when the user asks "break
22808 K::K". */
22809 add_linkage_name (subr_die, decl);
22810 }
22811 }
22812 /* A cached copy, possibly from early dwarf generation. Reuse as
22813 much as possible. */
22814 else if (old_die)
22815 {
22816 if (!get_AT_flag (old_die, DW_AT_declaration)
22817 /* We can have a normal definition following an inline one in the
22818 case of redefinition of GNU C extern inlines.
22819 It seems reasonable to use AT_specification in this case. */
22820 && !get_AT (old_die, DW_AT_inline))
22821 {
22822 /* Detect and ignore this case, where we are trying to output
22823 something we have already output. */
22824 if (get_AT (old_die, DW_AT_low_pc)
22825 || get_AT (old_die, DW_AT_ranges))
22826 return;
22827
22828 /* If we have no location information, this must be a
22829 partially generated DIE from early dwarf generation.
22830 Fall through and generate it. */
22831 }
22832
22833 /* If the definition comes from the same place as the declaration,
22834 maybe use the old DIE. We always want the DIE for this function
22835 that has the *_pc attributes to be under comp_unit_die so the
22836 debugger can find it. We also need to do this for abstract
22837 instances of inlines, since the spec requires the out-of-line copy
22838 to have the same parent. For local class methods, this doesn't
22839 apply; we just use the old DIE. */
22840 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
22841 struct dwarf_file_data * file_index = lookup_filename (s.file);
22842 if (((is_unit_die (old_die->die_parent)
22843 /* This condition fixes the inconsistency/ICE with the
22844 following Fortran test (or some derivative thereof) while
22845 building libgfortran:
22846
22847 module some_m
22848 contains
22849 logical function funky (FLAG)
22850 funky = .true.
22851 end function
22852 end module
22853 */
22854 || (old_die->die_parent
22855 && old_die->die_parent->die_tag == DW_TAG_module)
22856 || local_scope_p (old_die->die_parent)
22857 || context_die == NULL)
22858 && (DECL_ARTIFICIAL (decl)
22859 || (get_AT_file (old_die, DW_AT_decl_file) == file_index
22860 && (get_AT_unsigned (old_die, DW_AT_decl_line)
22861 == (unsigned) s.line)
22862 && (!debug_column_info
22863 || s.column == 0
22864 || (get_AT_unsigned (old_die, DW_AT_decl_column)
22865 == (unsigned) s.column)))))
22866 /* With LTO if there's an abstract instance for
22867 the old DIE, this is a concrete instance and
22868 thus re-use the DIE. */
22869 || get_AT (old_die, DW_AT_abstract_origin))
22870 {
22871 subr_die = old_die;
22872
22873 /* Clear out the declaration attribute, but leave the
22874 parameters so they can be augmented with location
22875 information later. Unless this was a declaration, in
22876 which case, wipe out the nameless parameters and recreate
22877 them further down. */
22878 if (remove_AT (subr_die, DW_AT_declaration))
22879 {
22880
22881 remove_AT (subr_die, DW_AT_object_pointer);
22882 remove_child_TAG (subr_die, DW_TAG_formal_parameter);
22883 }
22884 }
22885 /* Make a specification pointing to the previously built
22886 declaration. */
22887 else
22888 {
22889 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22890 add_AT_specification (subr_die, old_die);
22891 add_pubname (decl, subr_die);
22892 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
22893 add_AT_file (subr_die, DW_AT_decl_file, file_index);
22894 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
22895 add_AT_unsigned (subr_die, DW_AT_decl_line, s.line);
22896 if (debug_column_info
22897 && s.column
22898 && (get_AT_unsigned (old_die, DW_AT_decl_column)
22899 != (unsigned) s.column))
22900 add_AT_unsigned (subr_die, DW_AT_decl_column, s.column);
22901
22902 /* If the prototype had an 'auto' or 'decltype(auto)' return type,
22903 emit the real type on the definition die. */
22904 if (is_cxx () && debug_info_level > DINFO_LEVEL_TERSE)
22905 {
22906 dw_die_ref die = get_AT_ref (old_die, DW_AT_type);
22907 if (die == auto_die || die == decltype_auto_die)
22908 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22909 TYPE_UNQUALIFIED, false, context_die);
22910 }
22911
22912 /* When we process the method declaration, we haven't seen
22913 the out-of-class defaulted definition yet, so we have to
22914 recheck now. */
22915 if ((dwarf_version >= 5 || ! dwarf_strict)
22916 && !get_AT (subr_die, DW_AT_defaulted))
22917 {
22918 int defaulted
22919 = lang_hooks.decls.decl_dwarf_attribute (decl,
22920 DW_AT_defaulted);
22921 if (defaulted != -1)
22922 {
22923 /* Other values must have been handled before. */
22924 gcc_assert (defaulted == DW_DEFAULTED_out_of_class);
22925 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22926 }
22927 }
22928 }
22929 }
22930 /* Create a fresh DIE for anything else. */
22931 else
22932 {
22933 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22934
22935 if (TREE_PUBLIC (decl))
22936 add_AT_flag (subr_die, DW_AT_external, 1);
22937
22938 add_name_and_src_coords_attributes (subr_die, decl);
22939 add_pubname (decl, subr_die);
22940 if (debug_info_level > DINFO_LEVEL_TERSE)
22941 {
22942 add_prototyped_attribute (subr_die, TREE_TYPE (decl));
22943 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
22944 TYPE_UNQUALIFIED, false, context_die);
22945 }
22946
22947 add_pure_or_virtual_attribute (subr_die, decl);
22948 if (DECL_ARTIFICIAL (decl))
22949 add_AT_flag (subr_die, DW_AT_artificial, 1);
22950
22951 if (TREE_THIS_VOLATILE (decl) && (dwarf_version >= 5 || !dwarf_strict))
22952 add_AT_flag (subr_die, DW_AT_noreturn, 1);
22953
22954 add_alignment_attribute (subr_die, decl);
22955
22956 add_accessibility_attribute (subr_die, decl);
22957 }
22958
22959 /* Unless we have an existing non-declaration DIE, equate the new
22960 DIE. */
22961 if (!old_die || is_declaration_die (old_die))
22962 equate_decl_number_to_die (decl, subr_die);
22963
22964 if (declaration)
22965 {
22966 if (!old_die || !get_AT (old_die, DW_AT_inline))
22967 {
22968 add_AT_flag (subr_die, DW_AT_declaration, 1);
22969
22970 /* If this is an explicit function declaration then generate
22971 a DW_AT_explicit attribute. */
22972 if ((dwarf_version >= 3 || !dwarf_strict)
22973 && lang_hooks.decls.decl_dwarf_attribute (decl,
22974 DW_AT_explicit) == 1)
22975 add_AT_flag (subr_die, DW_AT_explicit, 1);
22976
22977 /* If this is a C++11 deleted special function member then generate
22978 a DW_AT_deleted attribute. */
22979 if ((dwarf_version >= 5 || !dwarf_strict)
22980 && lang_hooks.decls.decl_dwarf_attribute (decl,
22981 DW_AT_deleted) == 1)
22982 add_AT_flag (subr_die, DW_AT_deleted, 1);
22983
22984 /* If this is a C++11 defaulted special function member then
22985 generate a DW_AT_defaulted attribute. */
22986 if (dwarf_version >= 5 || !dwarf_strict)
22987 {
22988 int defaulted
22989 = lang_hooks.decls.decl_dwarf_attribute (decl,
22990 DW_AT_defaulted);
22991 if (defaulted != -1)
22992 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
22993 }
22994
22995 /* If this is a C++11 non-static member function with & ref-qualifier
22996 then generate a DW_AT_reference attribute. */
22997 if ((dwarf_version >= 5 || !dwarf_strict)
22998 && lang_hooks.decls.decl_dwarf_attribute (decl,
22999 DW_AT_reference) == 1)
23000 add_AT_flag (subr_die, DW_AT_reference, 1);
23001
23002 /* If this is a C++11 non-static member function with &&
23003 ref-qualifier then generate a DW_AT_reference attribute. */
23004 if ((dwarf_version >= 5 || !dwarf_strict)
23005 && lang_hooks.decls.decl_dwarf_attribute (decl,
23006 DW_AT_rvalue_reference)
23007 == 1)
23008 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
23009 }
23010 }
23011 /* For non DECL_EXTERNALs, if range information is available, fill
23012 the DIE with it. */
23013 else if (!DECL_EXTERNAL (decl) && !early_dwarf)
23014 {
23015 HOST_WIDE_INT cfa_fb_offset;
23016
23017 struct function *fun = DECL_STRUCT_FUNCTION (decl);
23018
23019 if (!crtl->has_bb_partition)
23020 {
23021 dw_fde_ref fde = fun->fde;
23022 if (fde->dw_fde_begin)
23023 {
23024 /* We have already generated the labels. */
23025 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
23026 fde->dw_fde_end, false);
23027 }
23028 else
23029 {
23030 /* Create start/end labels and add the range. */
23031 char label_id_low[MAX_ARTIFICIAL_LABEL_BYTES];
23032 char label_id_high[MAX_ARTIFICIAL_LABEL_BYTES];
23033 ASM_GENERATE_INTERNAL_LABEL (label_id_low, FUNC_BEGIN_LABEL,
23034 current_function_funcdef_no);
23035 ASM_GENERATE_INTERNAL_LABEL (label_id_high, FUNC_END_LABEL,
23036 current_function_funcdef_no);
23037 add_AT_low_high_pc (subr_die, label_id_low, label_id_high,
23038 false);
23039 }
23040
23041 #if VMS_DEBUGGING_INFO
23042 /* HP OpenVMS Industry Standard 64: DWARF Extensions
23043 Section 2.3 Prologue and Epilogue Attributes:
23044 When a breakpoint is set on entry to a function, it is generally
23045 desirable for execution to be suspended, not on the very first
23046 instruction of the function, but rather at a point after the
23047 function's frame has been set up, after any language defined local
23048 declaration processing has been completed, and before execution of
23049 the first statement of the function begins. Debuggers generally
23050 cannot properly determine where this point is. Similarly for a
23051 breakpoint set on exit from a function. The prologue and epilogue
23052 attributes allow a compiler to communicate the location(s) to use. */
23053
23054 {
23055 if (fde->dw_fde_vms_end_prologue)
23056 add_AT_vms_delta (subr_die, DW_AT_HP_prologue,
23057 fde->dw_fde_begin, fde->dw_fde_vms_end_prologue);
23058
23059 if (fde->dw_fde_vms_begin_epilogue)
23060 add_AT_vms_delta (subr_die, DW_AT_HP_epilogue,
23061 fde->dw_fde_begin, fde->dw_fde_vms_begin_epilogue);
23062 }
23063 #endif
23064
23065 }
23066 else
23067 {
23068 /* Generate pubnames entries for the split function code ranges. */
23069 dw_fde_ref fde = fun->fde;
23070
23071 if (fde->dw_fde_second_begin)
23072 {
23073 if (dwarf_version >= 3 || !dwarf_strict)
23074 {
23075 /* We should use ranges for non-contiguous code section
23076 addresses. Use the actual code range for the initial
23077 section, since the HOT/COLD labels might precede an
23078 alignment offset. */
23079 bool range_list_added = false;
23080 add_ranges_by_labels (subr_die, fde->dw_fde_begin,
23081 fde->dw_fde_end, &range_list_added,
23082 false);
23083 add_ranges_by_labels (subr_die, fde->dw_fde_second_begin,
23084 fde->dw_fde_second_end,
23085 &range_list_added, false);
23086 if (range_list_added)
23087 add_ranges (NULL);
23088 }
23089 else
23090 {
23091 /* There is no real support in DW2 for this .. so we make
23092 a work-around. First, emit the pub name for the segment
23093 containing the function label. Then make and emit a
23094 simplified subprogram DIE for the second segment with the
23095 name pre-fixed by __hot/cold_sect_of_. We use the same
23096 linkage name for the second die so that gdb will find both
23097 sections when given "b foo". */
23098 const char *name = NULL;
23099 tree decl_name = DECL_NAME (decl);
23100 dw_die_ref seg_die;
23101
23102 /* Do the 'primary' section. */
23103 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
23104 fde->dw_fde_end, false);
23105
23106 /* Build a minimal DIE for the secondary section. */
23107 seg_die = new_die (DW_TAG_subprogram,
23108 subr_die->die_parent, decl);
23109
23110 if (TREE_PUBLIC (decl))
23111 add_AT_flag (seg_die, DW_AT_external, 1);
23112
23113 if (decl_name != NULL
23114 && IDENTIFIER_POINTER (decl_name) != NULL)
23115 {
23116 name = dwarf2_name (decl, 1);
23117 if (! DECL_ARTIFICIAL (decl))
23118 add_src_coords_attributes (seg_die, decl);
23119
23120 add_linkage_name (seg_die, decl);
23121 }
23122 gcc_assert (name != NULL);
23123 add_pure_or_virtual_attribute (seg_die, decl);
23124 if (DECL_ARTIFICIAL (decl))
23125 add_AT_flag (seg_die, DW_AT_artificial, 1);
23126
23127 name = concat ("__second_sect_of_", name, NULL);
23128 add_AT_low_high_pc (seg_die, fde->dw_fde_second_begin,
23129 fde->dw_fde_second_end, false);
23130 add_name_attribute (seg_die, name);
23131 if (want_pubnames ())
23132 add_pubname_string (name, seg_die);
23133 }
23134 }
23135 else
23136 add_AT_low_high_pc (subr_die, fde->dw_fde_begin, fde->dw_fde_end,
23137 false);
23138 }
23139
23140 cfa_fb_offset = CFA_FRAME_BASE_OFFSET (decl);
23141
23142 /* We define the "frame base" as the function's CFA. This is more
23143 convenient for several reasons: (1) It's stable across the prologue
23144 and epilogue, which makes it better than just a frame pointer,
23145 (2) With dwarf3, there exists a one-byte encoding that allows us
23146 to reference the .debug_frame data by proxy, but failing that,
23147 (3) We can at least reuse the code inspection and interpretation
23148 code that determines the CFA position at various points in the
23149 function. */
23150 if (dwarf_version >= 3 && targetm.debug_unwind_info () == UI_DWARF2)
23151 {
23152 dw_loc_descr_ref op = new_loc_descr (DW_OP_call_frame_cfa, 0, 0);
23153 add_AT_loc (subr_die, DW_AT_frame_base, op);
23154 }
23155 else
23156 {
23157 dw_loc_list_ref list = convert_cfa_to_fb_loc_list (cfa_fb_offset);
23158 if (list->dw_loc_next)
23159 add_AT_loc_list (subr_die, DW_AT_frame_base, list);
23160 else
23161 add_AT_loc (subr_die, DW_AT_frame_base, list->expr);
23162 }
23163
23164 /* Compute a displacement from the "steady-state frame pointer" to
23165 the CFA. The former is what all stack slots and argument slots
23166 will reference in the rtl; the latter is what we've told the
23167 debugger about. We'll need to adjust all frame_base references
23168 by this displacement. */
23169 compute_frame_pointer_to_fb_displacement (cfa_fb_offset);
23170
23171 if (fun->static_chain_decl)
23172 {
23173 /* DWARF requires here a location expression that computes the
23174 address of the enclosing subprogram's frame base. The machinery
23175 in tree-nested.c is supposed to store this specific address in the
23176 last field of the FRAME record. */
23177 const tree frame_type
23178 = TREE_TYPE (TREE_TYPE (fun->static_chain_decl));
23179 const tree fb_decl = tree_last (TYPE_FIELDS (frame_type));
23180
23181 tree fb_expr
23182 = build1 (INDIRECT_REF, frame_type, fun->static_chain_decl);
23183 fb_expr = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
23184 fb_expr, fb_decl, NULL_TREE);
23185
23186 add_AT_location_description (subr_die, DW_AT_static_link,
23187 loc_list_from_tree (fb_expr, 0, NULL));
23188 }
23189
23190 resolve_variable_values ();
23191 }
23192
23193 /* Generate child dies for template paramaters. */
23194 if (early_dwarf && debug_info_level > DINFO_LEVEL_TERSE)
23195 gen_generic_params_dies (decl);
23196
23197 /* Now output descriptions of the arguments for this function. This gets
23198 (unnecessarily?) complex because of the fact that the DECL_ARGUMENT list
23199 for a FUNCTION_DECL doesn't indicate cases where there was a trailing
23200 `...' at the end of the formal parameter list. In order to find out if
23201 there was a trailing ellipsis or not, we must instead look at the type
23202 associated with the FUNCTION_DECL. This will be a node of type
23203 FUNCTION_TYPE. If the chain of type nodes hanging off of this
23204 FUNCTION_TYPE node ends with a void_type_node then there should *not* be
23205 an ellipsis at the end. */
23206
23207 /* In the case where we are describing a mere function declaration, all we
23208 need to do here (and all we *can* do here) is to describe the *types* of
23209 its formal parameters. */
23210 if (debug_info_level <= DINFO_LEVEL_TERSE)
23211 ;
23212 else if (declaration)
23213 gen_formal_types_die (decl, subr_die);
23214 else
23215 {
23216 /* Generate DIEs to represent all known formal parameters. */
23217 tree parm = DECL_ARGUMENTS (decl);
23218 tree generic_decl = early_dwarf
23219 ? lang_hooks.decls.get_generic_function_decl (decl) : NULL;
23220 tree generic_decl_parm = generic_decl
23221 ? DECL_ARGUMENTS (generic_decl)
23222 : NULL;
23223
23224 /* Now we want to walk the list of parameters of the function and
23225 emit their relevant DIEs.
23226
23227 We consider the case of DECL being an instance of a generic function
23228 as well as it being a normal function.
23229
23230 If DECL is an instance of a generic function we walk the
23231 parameters of the generic function declaration _and_ the parameters of
23232 DECL itself. This is useful because we want to emit specific DIEs for
23233 function parameter packs and those are declared as part of the
23234 generic function declaration. In that particular case,
23235 the parameter pack yields a DW_TAG_GNU_formal_parameter_pack DIE.
23236 That DIE has children DIEs representing the set of arguments
23237 of the pack. Note that the set of pack arguments can be empty.
23238 In that case, the DW_TAG_GNU_formal_parameter_pack DIE will not have any
23239 children DIE.
23240
23241 Otherwise, we just consider the parameters of DECL. */
23242 while (generic_decl_parm || parm)
23243 {
23244 if (generic_decl_parm
23245 && lang_hooks.function_parameter_pack_p (generic_decl_parm))
23246 gen_formal_parameter_pack_die (generic_decl_parm,
23247 parm, subr_die,
23248 &parm);
23249 else if (parm)
23250 {
23251 dw_die_ref parm_die = gen_decl_die (parm, NULL, NULL, subr_die);
23252
23253 if (early_dwarf
23254 && parm == DECL_ARGUMENTS (decl)
23255 && TREE_CODE (TREE_TYPE (decl)) == METHOD_TYPE
23256 && parm_die
23257 && (dwarf_version >= 3 || !dwarf_strict))
23258 add_AT_die_ref (subr_die, DW_AT_object_pointer, parm_die);
23259
23260 parm = DECL_CHAIN (parm);
23261 }
23262
23263 if (generic_decl_parm)
23264 generic_decl_parm = DECL_CHAIN (generic_decl_parm);
23265 }
23266
23267 /* Decide whether we need an unspecified_parameters DIE at the end.
23268 There are 2 more cases to do this for: 1) the ansi ... declaration -
23269 this is detectable when the end of the arg list is not a
23270 void_type_node 2) an unprototyped function declaration (not a
23271 definition). This just means that we have no info about the
23272 parameters at all. */
23273 if (early_dwarf)
23274 {
23275 if (prototype_p (TREE_TYPE (decl)))
23276 {
23277 /* This is the prototyped case, check for.... */
23278 if (stdarg_p (TREE_TYPE (decl)))
23279 gen_unspecified_parameters_die (decl, subr_die);
23280 }
23281 else if (DECL_INITIAL (decl) == NULL_TREE)
23282 gen_unspecified_parameters_die (decl, subr_die);
23283 }
23284 }
23285
23286 if (subr_die != old_die)
23287 /* Add the calling convention attribute if requested. */
23288 add_calling_convention_attribute (subr_die, decl);
23289
23290 /* Output Dwarf info for all of the stuff within the body of the function
23291 (if it has one - it may be just a declaration).
23292
23293 OUTER_SCOPE is a pointer to the outermost BLOCK node created to represent
23294 a function. This BLOCK actually represents the outermost binding contour
23295 for the function, i.e. the contour in which the function's formal
23296 parameters and labels get declared. Curiously, it appears that the front
23297 end doesn't actually put the PARM_DECL nodes for the current function onto
23298 the BLOCK_VARS list for this outer scope, but are strung off of the
23299 DECL_ARGUMENTS list for the function instead.
23300
23301 The BLOCK_VARS list for the `outer_scope' does provide us with a list of
23302 the LABEL_DECL nodes for the function however, and we output DWARF info
23303 for those in decls_for_scope. Just within the `outer_scope' there will be
23304 a BLOCK node representing the function's outermost pair of curly braces,
23305 and any blocks used for the base and member initializers of a C++
23306 constructor function. */
23307 tree outer_scope = DECL_INITIAL (decl);
23308 if (! declaration && outer_scope && TREE_CODE (outer_scope) != ERROR_MARK)
23309 {
23310 int call_site_note_count = 0;
23311 int tail_call_site_note_count = 0;
23312
23313 /* Emit a DW_TAG_variable DIE for a named return value. */
23314 if (DECL_NAME (DECL_RESULT (decl)))
23315 gen_decl_die (DECL_RESULT (decl), NULL, NULL, subr_die);
23316
23317 /* The first time through decls_for_scope we will generate the
23318 DIEs for the locals. The second time, we fill in the
23319 location info. */
23320 decls_for_scope (outer_scope, subr_die);
23321
23322 if (call_arg_locations && (!dwarf_strict || dwarf_version >= 5))
23323 {
23324 struct call_arg_loc_node *ca_loc;
23325 for (ca_loc = call_arg_locations; ca_loc; ca_loc = ca_loc->next)
23326 {
23327 dw_die_ref die = NULL;
23328 rtx tloc = NULL_RTX, tlocc = NULL_RTX;
23329 rtx arg, next_arg;
23330 tree arg_decl = NULL_TREE;
23331
23332 for (arg = (ca_loc->call_arg_loc_note != NULL_RTX
23333 ? XEXP (ca_loc->call_arg_loc_note, 0)
23334 : NULL_RTX);
23335 arg; arg = next_arg)
23336 {
23337 dw_loc_descr_ref reg, val;
23338 machine_mode mode = GET_MODE (XEXP (XEXP (arg, 0), 1));
23339 dw_die_ref cdie, tdie = NULL;
23340
23341 next_arg = XEXP (arg, 1);
23342 if (REG_P (XEXP (XEXP (arg, 0), 0))
23343 && next_arg
23344 && MEM_P (XEXP (XEXP (next_arg, 0), 0))
23345 && REG_P (XEXP (XEXP (XEXP (next_arg, 0), 0), 0))
23346 && REGNO (XEXP (XEXP (arg, 0), 0))
23347 == REGNO (XEXP (XEXP (XEXP (next_arg, 0), 0), 0)))
23348 next_arg = XEXP (next_arg, 1);
23349 if (mode == VOIDmode)
23350 {
23351 mode = GET_MODE (XEXP (XEXP (arg, 0), 0));
23352 if (mode == VOIDmode)
23353 mode = GET_MODE (XEXP (arg, 0));
23354 }
23355 if (mode == VOIDmode || mode == BLKmode)
23356 continue;
23357 /* Get dynamic information about call target only if we
23358 have no static information: we cannot generate both
23359 DW_AT_call_origin and DW_AT_call_target
23360 attributes. */
23361 if (ca_loc->symbol_ref == NULL_RTX)
23362 {
23363 if (XEXP (XEXP (arg, 0), 0) == pc_rtx)
23364 {
23365 tloc = XEXP (XEXP (arg, 0), 1);
23366 continue;
23367 }
23368 else if (GET_CODE (XEXP (XEXP (arg, 0), 0)) == CLOBBER
23369 && XEXP (XEXP (XEXP (arg, 0), 0), 0) == pc_rtx)
23370 {
23371 tlocc = XEXP (XEXP (arg, 0), 1);
23372 continue;
23373 }
23374 }
23375 reg = NULL;
23376 if (REG_P (XEXP (XEXP (arg, 0), 0)))
23377 reg = reg_loc_descriptor (XEXP (XEXP (arg, 0), 0),
23378 VAR_INIT_STATUS_INITIALIZED);
23379 else if (MEM_P (XEXP (XEXP (arg, 0), 0)))
23380 {
23381 rtx mem = XEXP (XEXP (arg, 0), 0);
23382 reg = mem_loc_descriptor (XEXP (mem, 0),
23383 get_address_mode (mem),
23384 GET_MODE (mem),
23385 VAR_INIT_STATUS_INITIALIZED);
23386 }
23387 else if (GET_CODE (XEXP (XEXP (arg, 0), 0))
23388 == DEBUG_PARAMETER_REF)
23389 {
23390 tree tdecl
23391 = DEBUG_PARAMETER_REF_DECL (XEXP (XEXP (arg, 0), 0));
23392 tdie = lookup_decl_die (tdecl);
23393 if (tdie == NULL)
23394 continue;
23395 arg_decl = tdecl;
23396 }
23397 else
23398 continue;
23399 if (reg == NULL
23400 && GET_CODE (XEXP (XEXP (arg, 0), 0))
23401 != DEBUG_PARAMETER_REF)
23402 continue;
23403 val = mem_loc_descriptor (XEXP (XEXP (arg, 0), 1), mode,
23404 VOIDmode,
23405 VAR_INIT_STATUS_INITIALIZED);
23406 if (val == NULL)
23407 continue;
23408 if (die == NULL)
23409 die = gen_call_site_die (decl, subr_die, ca_loc);
23410 cdie = new_die (dwarf_TAG (DW_TAG_call_site_parameter), die,
23411 NULL_TREE);
23412 add_desc_attribute (cdie, arg_decl);
23413 if (reg != NULL)
23414 add_AT_loc (cdie, DW_AT_location, reg);
23415 else if (tdie != NULL)
23416 add_AT_die_ref (cdie, dwarf_AT (DW_AT_call_parameter),
23417 tdie);
23418 add_AT_loc (cdie, dwarf_AT (DW_AT_call_value), val);
23419 if (next_arg != XEXP (arg, 1))
23420 {
23421 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 1));
23422 if (mode == VOIDmode)
23423 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 0));
23424 val = mem_loc_descriptor (XEXP (XEXP (XEXP (arg, 1),
23425 0), 1),
23426 mode, VOIDmode,
23427 VAR_INIT_STATUS_INITIALIZED);
23428 if (val != NULL)
23429 add_AT_loc (cdie, dwarf_AT (DW_AT_call_data_value),
23430 val);
23431 }
23432 }
23433 if (die == NULL
23434 && (ca_loc->symbol_ref || tloc))
23435 die = gen_call_site_die (decl, subr_die, ca_loc);
23436 if (die != NULL && (tloc != NULL_RTX || tlocc != NULL_RTX))
23437 {
23438 dw_loc_descr_ref tval = NULL;
23439
23440 if (tloc != NULL_RTX)
23441 tval = mem_loc_descriptor (tloc,
23442 GET_MODE (tloc) == VOIDmode
23443 ? Pmode : GET_MODE (tloc),
23444 VOIDmode,
23445 VAR_INIT_STATUS_INITIALIZED);
23446 if (tval)
23447 add_AT_loc (die, dwarf_AT (DW_AT_call_target), tval);
23448 else if (tlocc != NULL_RTX)
23449 {
23450 tval = mem_loc_descriptor (tlocc,
23451 GET_MODE (tlocc) == VOIDmode
23452 ? Pmode : GET_MODE (tlocc),
23453 VOIDmode,
23454 VAR_INIT_STATUS_INITIALIZED);
23455 if (tval)
23456 add_AT_loc (die,
23457 dwarf_AT (DW_AT_call_target_clobbered),
23458 tval);
23459 }
23460 }
23461 if (die != NULL)
23462 {
23463 call_site_note_count++;
23464 if (ca_loc->tail_call_p)
23465 tail_call_site_note_count++;
23466 }
23467 }
23468 }
23469 call_arg_locations = NULL;
23470 call_arg_loc_last = NULL;
23471 if (tail_call_site_count >= 0
23472 && tail_call_site_count == tail_call_site_note_count
23473 && (!dwarf_strict || dwarf_version >= 5))
23474 {
23475 if (call_site_count >= 0
23476 && call_site_count == call_site_note_count)
23477 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_calls), 1);
23478 else
23479 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_tail_calls), 1);
23480 }
23481 call_site_count = -1;
23482 tail_call_site_count = -1;
23483 }
23484
23485 /* Mark used types after we have created DIEs for the functions scopes. */
23486 premark_used_types (DECL_STRUCT_FUNCTION (decl));
23487 }
23488
23489 /* Returns a hash value for X (which really is a die_struct). */
23490
23491 hashval_t
23492 block_die_hasher::hash (die_struct *d)
23493 {
23494 return (hashval_t) d->decl_id ^ htab_hash_pointer (d->die_parent);
23495 }
23496
23497 /* Return nonzero if decl_id and die_parent of die_struct X is the same
23498 as decl_id and die_parent of die_struct Y. */
23499
23500 bool
23501 block_die_hasher::equal (die_struct *x, die_struct *y)
23502 {
23503 return x->decl_id == y->decl_id && x->die_parent == y->die_parent;
23504 }
23505
23506 /* Hold information about markers for inlined entry points. */
23507 struct GTY ((for_user)) inline_entry_data
23508 {
23509 /* The block that's the inlined_function_outer_scope for an inlined
23510 function. */
23511 tree block;
23512
23513 /* The label at the inlined entry point. */
23514 const char *label_pfx;
23515 unsigned int label_num;
23516
23517 /* The view number to be used as the inlined entry point. */
23518 var_loc_view view;
23519 };
23520
23521 struct inline_entry_data_hasher : ggc_ptr_hash <inline_entry_data>
23522 {
23523 typedef tree compare_type;
23524 static inline hashval_t hash (const inline_entry_data *);
23525 static inline bool equal (const inline_entry_data *, const_tree);
23526 };
23527
23528 /* Hash table routines for inline_entry_data. */
23529
23530 inline hashval_t
23531 inline_entry_data_hasher::hash (const inline_entry_data *data)
23532 {
23533 return htab_hash_pointer (data->block);
23534 }
23535
23536 inline bool
23537 inline_entry_data_hasher::equal (const inline_entry_data *data,
23538 const_tree block)
23539 {
23540 return data->block == block;
23541 }
23542
23543 /* Inlined entry points pending DIE creation in this compilation unit. */
23544
23545 static GTY(()) hash_table<inline_entry_data_hasher> *inline_entry_data_table;
23546
23547
23548 /* Return TRUE if DECL, which may have been previously generated as
23549 OLD_DIE, is a candidate for a DW_AT_specification. DECLARATION is
23550 true if decl (or its origin) is either an extern declaration or a
23551 class/namespace scoped declaration.
23552
23553 The declare_in_namespace support causes us to get two DIEs for one
23554 variable, both of which are declarations. We want to avoid
23555 considering one to be a specification, so we must test for
23556 DECLARATION and DW_AT_declaration. */
23557 static inline bool
23558 decl_will_get_specification_p (dw_die_ref old_die, tree decl, bool declaration)
23559 {
23560 return (old_die && TREE_STATIC (decl) && !declaration
23561 && get_AT_flag (old_die, DW_AT_declaration) == 1);
23562 }
23563
23564 /* Return true if DECL is a local static. */
23565
23566 static inline bool
23567 local_function_static (tree decl)
23568 {
23569 gcc_assert (VAR_P (decl));
23570 return TREE_STATIC (decl)
23571 && DECL_CONTEXT (decl)
23572 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL;
23573 }
23574
23575 /* Return true iff DECL overrides (presumably completes) the type of
23576 OLD_DIE within CONTEXT_DIE. */
23577
23578 static bool
23579 override_type_for_decl_p (tree decl, dw_die_ref old_die,
23580 dw_die_ref context_die)
23581 {
23582 tree type = TREE_TYPE (decl);
23583 int cv_quals;
23584
23585 if (decl_by_reference_p (decl))
23586 {
23587 type = TREE_TYPE (type);
23588 cv_quals = TYPE_UNQUALIFIED;
23589 }
23590 else
23591 cv_quals = decl_quals (decl);
23592
23593 dw_die_ref type_die = modified_type_die (type,
23594 cv_quals | TYPE_QUALS (type),
23595 false,
23596 context_die);
23597
23598 dw_die_ref old_type_die = get_AT_ref (old_die, DW_AT_type);
23599
23600 return type_die != old_type_die;
23601 }
23602
23603 /* Generate a DIE to represent a declared data object.
23604 Either DECL or ORIGIN must be non-null. */
23605
23606 static void
23607 gen_variable_die (tree decl, tree origin, dw_die_ref context_die)
23608 {
23609 HOST_WIDE_INT off = 0;
23610 tree com_decl;
23611 tree decl_or_origin = decl ? decl : origin;
23612 tree ultimate_origin;
23613 dw_die_ref var_die;
23614 dw_die_ref old_die = decl ? lookup_decl_die (decl) : NULL;
23615 bool declaration = (DECL_EXTERNAL (decl_or_origin)
23616 || class_or_namespace_scope_p (context_die));
23617 bool specialization_p = false;
23618 bool no_linkage_name = false;
23619
23620 /* While C++ inline static data members have definitions inside of the
23621 class, force the first DIE to be a declaration, then let gen_member_die
23622 reparent it to the class context and call gen_variable_die again
23623 to create the outside of the class DIE for the definition. */
23624 if (!declaration
23625 && old_die == NULL
23626 && decl
23627 && DECL_CONTEXT (decl)
23628 && TYPE_P (DECL_CONTEXT (decl))
23629 && lang_hooks.decls.decl_dwarf_attribute (decl, DW_AT_inline) != -1)
23630 {
23631 declaration = true;
23632 if (dwarf_version < 5)
23633 no_linkage_name = true;
23634 }
23635
23636 ultimate_origin = decl_ultimate_origin (decl_or_origin);
23637 if (decl || ultimate_origin)
23638 origin = ultimate_origin;
23639 com_decl = fortran_common (decl_or_origin, &off);
23640
23641 /* Symbol in common gets emitted as a child of the common block, in the form
23642 of a data member. */
23643 if (com_decl)
23644 {
23645 dw_die_ref com_die;
23646 dw_loc_list_ref loc = NULL;
23647 die_node com_die_arg;
23648
23649 var_die = lookup_decl_die (decl_or_origin);
23650 if (var_die)
23651 {
23652 if (! early_dwarf && get_AT (var_die, DW_AT_location) == NULL)
23653 {
23654 loc = loc_list_from_tree (com_decl, off ? 1 : 2, NULL);
23655 if (loc)
23656 {
23657 if (off)
23658 {
23659 /* Optimize the common case. */
23660 if (single_element_loc_list_p (loc)
23661 && loc->expr->dw_loc_opc == DW_OP_addr
23662 && loc->expr->dw_loc_next == NULL
23663 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr)
23664 == SYMBOL_REF)
23665 {
23666 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23667 loc->expr->dw_loc_oprnd1.v.val_addr
23668 = plus_constant (GET_MODE (x), x , off);
23669 }
23670 else
23671 loc_list_plus_const (loc, off);
23672 }
23673 add_AT_location_description (var_die, DW_AT_location, loc);
23674 remove_AT (var_die, DW_AT_declaration);
23675 }
23676 }
23677 return;
23678 }
23679
23680 if (common_block_die_table == NULL)
23681 common_block_die_table = hash_table<block_die_hasher>::create_ggc (10);
23682
23683 com_die_arg.decl_id = DECL_UID (com_decl);
23684 com_die_arg.die_parent = context_die;
23685 com_die = common_block_die_table->find (&com_die_arg);
23686 if (! early_dwarf)
23687 loc = loc_list_from_tree (com_decl, 2, NULL);
23688 if (com_die == NULL)
23689 {
23690 const char *cnam
23691 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (com_decl));
23692 die_node **slot;
23693
23694 com_die = new_die (DW_TAG_common_block, context_die, decl);
23695 add_name_and_src_coords_attributes (com_die, com_decl);
23696 if (loc)
23697 {
23698 add_AT_location_description (com_die, DW_AT_location, loc);
23699 /* Avoid sharing the same loc descriptor between
23700 DW_TAG_common_block and DW_TAG_variable. */
23701 loc = loc_list_from_tree (com_decl, 2, NULL);
23702 }
23703 else if (DECL_EXTERNAL (decl_or_origin))
23704 add_AT_flag (com_die, DW_AT_declaration, 1);
23705 if (want_pubnames ())
23706 add_pubname_string (cnam, com_die); /* ??? needed? */
23707 com_die->decl_id = DECL_UID (com_decl);
23708 slot = common_block_die_table->find_slot (com_die, INSERT);
23709 *slot = com_die;
23710 }
23711 else if (get_AT (com_die, DW_AT_location) == NULL && loc)
23712 {
23713 add_AT_location_description (com_die, DW_AT_location, loc);
23714 loc = loc_list_from_tree (com_decl, 2, NULL);
23715 remove_AT (com_die, DW_AT_declaration);
23716 }
23717 var_die = new_die (DW_TAG_variable, com_die, decl);
23718 add_name_and_src_coords_attributes (var_die, decl_or_origin);
23719 add_type_attribute (var_die, TREE_TYPE (decl_or_origin),
23720 decl_quals (decl_or_origin), false,
23721 context_die);
23722 add_alignment_attribute (var_die, decl);
23723 add_AT_flag (var_die, DW_AT_external, 1);
23724 if (loc)
23725 {
23726 if (off)
23727 {
23728 /* Optimize the common case. */
23729 if (single_element_loc_list_p (loc)
23730 && loc->expr->dw_loc_opc == DW_OP_addr
23731 && loc->expr->dw_loc_next == NULL
23732 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF)
23733 {
23734 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23735 loc->expr->dw_loc_oprnd1.v.val_addr
23736 = plus_constant (GET_MODE (x), x, off);
23737 }
23738 else
23739 loc_list_plus_const (loc, off);
23740 }
23741 add_AT_location_description (var_die, DW_AT_location, loc);
23742 }
23743 else if (DECL_EXTERNAL (decl_or_origin))
23744 add_AT_flag (var_die, DW_AT_declaration, 1);
23745 if (decl)
23746 equate_decl_number_to_die (decl, var_die);
23747 return;
23748 }
23749
23750 if (old_die)
23751 {
23752 if (declaration)
23753 {
23754 /* A declaration that has been previously dumped, needs no
23755 further annotations, since it doesn't need location on
23756 the second pass. */
23757 return;
23758 }
23759 else if (decl_will_get_specification_p (old_die, decl, declaration)
23760 && !get_AT (old_die, DW_AT_specification))
23761 {
23762 /* Fall-thru so we can make a new variable die along with a
23763 DW_AT_specification. */
23764 }
23765 else if (origin && old_die->die_parent != context_die)
23766 {
23767 /* If we will be creating an inlined instance, we need a
23768 new DIE that will get annotated with
23769 DW_AT_abstract_origin. */
23770 gcc_assert (!DECL_ABSTRACT_P (decl));
23771 }
23772 else
23773 {
23774 /* If a DIE was dumped early, it still needs location info.
23775 Skip to where we fill the location bits. */
23776 var_die = old_die;
23777
23778 /* ??? In LTRANS we cannot annotate early created variably
23779 modified type DIEs without copying them and adjusting all
23780 references to them. Thus we dumped them again. Also add a
23781 reference to them but beware of -g0 compile and -g link
23782 in which case the reference will be already present. */
23783 tree type = TREE_TYPE (decl_or_origin);
23784 if (in_lto_p
23785 && ! get_AT (var_die, DW_AT_type)
23786 && variably_modified_type_p
23787 (type, decl_function_context (decl_or_origin)))
23788 {
23789 if (decl_by_reference_p (decl_or_origin))
23790 add_type_attribute (var_die, TREE_TYPE (type),
23791 TYPE_UNQUALIFIED, false, context_die);
23792 else
23793 add_type_attribute (var_die, type, decl_quals (decl_or_origin),
23794 false, context_die);
23795 }
23796
23797 goto gen_variable_die_location;
23798 }
23799 }
23800
23801 /* For static data members, the declaration in the class is supposed
23802 to have DW_TAG_member tag in DWARF{3,4} and we emit it for compatibility
23803 also in DWARF2; the specification should still be DW_TAG_variable
23804 referencing the DW_TAG_member DIE. */
23805 if (declaration && class_scope_p (context_die) && dwarf_version < 5)
23806 var_die = new_die (DW_TAG_member, context_die, decl);
23807 else
23808 var_die = new_die (DW_TAG_variable, context_die, decl);
23809
23810 if (origin != NULL)
23811 add_abstract_origin_attribute (var_die, origin);
23812
23813 /* Loop unrolling can create multiple blocks that refer to the same
23814 static variable, so we must test for the DW_AT_declaration flag.
23815
23816 ??? Loop unrolling/reorder_blocks should perhaps be rewritten to
23817 copy decls and set the DECL_ABSTRACT_P flag on them instead of
23818 sharing them.
23819
23820 ??? Duplicated blocks have been rewritten to use .debug_ranges. */
23821 else if (decl_will_get_specification_p (old_die, decl, declaration))
23822 {
23823 /* This is a definition of a C++ class level static. */
23824 add_AT_specification (var_die, old_die);
23825 specialization_p = true;
23826 if (DECL_NAME (decl))
23827 {
23828 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
23829 struct dwarf_file_data * file_index = lookup_filename (s.file);
23830
23831 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
23832 add_AT_file (var_die, DW_AT_decl_file, file_index);
23833
23834 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
23835 add_AT_unsigned (var_die, DW_AT_decl_line, s.line);
23836
23837 if (debug_column_info
23838 && s.column
23839 && (get_AT_unsigned (old_die, DW_AT_decl_column)
23840 != (unsigned) s.column))
23841 add_AT_unsigned (var_die, DW_AT_decl_column, s.column);
23842
23843 if (old_die->die_tag == DW_TAG_member)
23844 add_linkage_name (var_die, decl);
23845 }
23846 }
23847 else
23848 add_name_and_src_coords_attributes (var_die, decl, no_linkage_name);
23849
23850 if ((origin == NULL && !specialization_p)
23851 || (origin != NULL
23852 && !DECL_ABSTRACT_P (decl_or_origin)
23853 && variably_modified_type_p (TREE_TYPE (decl_or_origin),
23854 decl_function_context
23855 (decl_or_origin)))
23856 || (old_die && specialization_p
23857 && override_type_for_decl_p (decl_or_origin, old_die, context_die)))
23858 {
23859 tree type = TREE_TYPE (decl_or_origin);
23860
23861 if (decl_by_reference_p (decl_or_origin))
23862 add_type_attribute (var_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
23863 context_die);
23864 else
23865 add_type_attribute (var_die, type, decl_quals (decl_or_origin), false,
23866 context_die);
23867 }
23868
23869 if (origin == NULL && !specialization_p)
23870 {
23871 if (TREE_PUBLIC (decl))
23872 add_AT_flag (var_die, DW_AT_external, 1);
23873
23874 if (DECL_ARTIFICIAL (decl))
23875 add_AT_flag (var_die, DW_AT_artificial, 1);
23876
23877 add_alignment_attribute (var_die, decl);
23878
23879 add_accessibility_attribute (var_die, decl);
23880 }
23881
23882 if (declaration)
23883 add_AT_flag (var_die, DW_AT_declaration, 1);
23884
23885 if (decl && (DECL_ABSTRACT_P (decl)
23886 || !old_die || is_declaration_die (old_die)))
23887 equate_decl_number_to_die (decl, var_die);
23888
23889 gen_variable_die_location:
23890 if (! declaration
23891 && (! DECL_ABSTRACT_P (decl_or_origin)
23892 /* Local static vars are shared between all clones/inlines,
23893 so emit DW_AT_location on the abstract DIE if DECL_RTL is
23894 already set. */
23895 || (VAR_P (decl_or_origin)
23896 && TREE_STATIC (decl_or_origin)
23897 && DECL_RTL_SET_P (decl_or_origin))))
23898 {
23899 if (early_dwarf)
23900 add_pubname (decl_or_origin, var_die);
23901 else
23902 add_location_or_const_value_attribute (var_die, decl_or_origin,
23903 decl == NULL);
23904 }
23905 else
23906 tree_add_const_value_attribute_for_decl (var_die, decl_or_origin);
23907
23908 if ((dwarf_version >= 4 || !dwarf_strict)
23909 && lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23910 DW_AT_const_expr) == 1
23911 && !get_AT (var_die, DW_AT_const_expr)
23912 && !specialization_p)
23913 add_AT_flag (var_die, DW_AT_const_expr, 1);
23914
23915 if (!dwarf_strict)
23916 {
23917 int inl = lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
23918 DW_AT_inline);
23919 if (inl != -1
23920 && !get_AT (var_die, DW_AT_inline)
23921 && !specialization_p)
23922 add_AT_unsigned (var_die, DW_AT_inline, inl);
23923 }
23924 }
23925
23926 /* Generate a DIE to represent a named constant. */
23927
23928 static void
23929 gen_const_die (tree decl, dw_die_ref context_die)
23930 {
23931 dw_die_ref const_die;
23932 tree type = TREE_TYPE (decl);
23933
23934 const_die = lookup_decl_die (decl);
23935 if (const_die)
23936 return;
23937
23938 const_die = new_die (DW_TAG_constant, context_die, decl);
23939 equate_decl_number_to_die (decl, const_die);
23940 add_name_and_src_coords_attributes (const_die, decl);
23941 add_type_attribute (const_die, type, TYPE_QUAL_CONST, false, context_die);
23942 if (TREE_PUBLIC (decl))
23943 add_AT_flag (const_die, DW_AT_external, 1);
23944 if (DECL_ARTIFICIAL (decl))
23945 add_AT_flag (const_die, DW_AT_artificial, 1);
23946 tree_add_const_value_attribute_for_decl (const_die, decl);
23947 }
23948
23949 /* Generate a DIE to represent a label identifier. */
23950
23951 static void
23952 gen_label_die (tree decl, dw_die_ref context_die)
23953 {
23954 tree origin = decl_ultimate_origin (decl);
23955 dw_die_ref lbl_die = lookup_decl_die (decl);
23956 rtx insn;
23957 char label[MAX_ARTIFICIAL_LABEL_BYTES];
23958
23959 if (!lbl_die)
23960 {
23961 lbl_die = new_die (DW_TAG_label, context_die, decl);
23962 equate_decl_number_to_die (decl, lbl_die);
23963
23964 if (origin != NULL)
23965 add_abstract_origin_attribute (lbl_die, origin);
23966 else
23967 add_name_and_src_coords_attributes (lbl_die, decl);
23968 }
23969
23970 if (DECL_ABSTRACT_P (decl))
23971 equate_decl_number_to_die (decl, lbl_die);
23972 else if (! early_dwarf)
23973 {
23974 insn = DECL_RTL_IF_SET (decl);
23975
23976 /* Deleted labels are programmer specified labels which have been
23977 eliminated because of various optimizations. We still emit them
23978 here so that it is possible to put breakpoints on them. */
23979 if (insn
23980 && (LABEL_P (insn)
23981 || ((NOTE_P (insn)
23982 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL))))
23983 {
23984 /* When optimization is enabled (via -O) some parts of the compiler
23985 (e.g. jump.c and cse.c) may try to delete CODE_LABEL insns which
23986 represent source-level labels which were explicitly declared by
23987 the user. This really shouldn't be happening though, so catch
23988 it if it ever does happen. */
23989 gcc_assert (!as_a<rtx_insn *> (insn)->deleted ());
23990
23991 ASM_GENERATE_INTERNAL_LABEL (label, "L", CODE_LABEL_NUMBER (insn));
23992 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
23993 }
23994 else if (insn
23995 && NOTE_P (insn)
23996 && NOTE_KIND (insn) == NOTE_INSN_DELETED_DEBUG_LABEL
23997 && CODE_LABEL_NUMBER (insn) != -1)
23998 {
23999 ASM_GENERATE_INTERNAL_LABEL (label, "LDL", CODE_LABEL_NUMBER (insn));
24000 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
24001 }
24002 }
24003 }
24004
24005 /* A helper function for gen_inlined_subroutine_die. Add source coordinate
24006 attributes to the DIE for a block STMT, to describe where the inlined
24007 function was called from. This is similar to add_src_coords_attributes. */
24008
24009 static inline void
24010 add_call_src_coords_attributes (tree stmt, dw_die_ref die)
24011 {
24012 /* We can end up with BUILTINS_LOCATION here. */
24013 if (RESERVED_LOCATION_P (BLOCK_SOURCE_LOCATION (stmt)))
24014 return;
24015
24016 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (stmt));
24017
24018 if (dwarf_version >= 3 || !dwarf_strict)
24019 {
24020 add_AT_file (die, DW_AT_call_file, lookup_filename (s.file));
24021 add_AT_unsigned (die, DW_AT_call_line, s.line);
24022 if (debug_column_info && s.column)
24023 add_AT_unsigned (die, DW_AT_call_column, s.column);
24024 }
24025 }
24026
24027
24028 /* A helper function for gen_lexical_block_die and gen_inlined_subroutine_die.
24029 Add low_pc and high_pc attributes to the DIE for a block STMT. */
24030
24031 static inline void
24032 add_high_low_attributes (tree stmt, dw_die_ref die)
24033 {
24034 char label[MAX_ARTIFICIAL_LABEL_BYTES];
24035
24036 if (inline_entry_data **iedp
24037 = !inline_entry_data_table ? NULL
24038 : inline_entry_data_table->find_slot_with_hash (stmt,
24039 htab_hash_pointer (stmt),
24040 NO_INSERT))
24041 {
24042 inline_entry_data *ied = *iedp;
24043 gcc_assert (MAY_HAVE_DEBUG_MARKER_INSNS);
24044 gcc_assert (debug_inline_points);
24045 gcc_assert (inlined_function_outer_scope_p (stmt));
24046
24047 ASM_GENERATE_INTERNAL_LABEL (label, ied->label_pfx, ied->label_num);
24048 add_AT_lbl_id (die, DW_AT_entry_pc, label);
24049
24050 if (debug_variable_location_views && !ZERO_VIEW_P (ied->view)
24051 && !dwarf_strict)
24052 {
24053 if (!output_asm_line_debug_info ())
24054 add_AT_unsigned (die, DW_AT_GNU_entry_view, ied->view);
24055 else
24056 {
24057 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", ied->view);
24058 /* FIXME: this will resolve to a small number. Could we
24059 possibly emit smaller data? Ideally we'd emit a
24060 uleb128, but that would make the size of DIEs
24061 impossible for the compiler to compute, since it's
24062 the assembler that computes the value of the view
24063 label in this case. Ideally, we'd have a single form
24064 encompassing both the address and the view, and
24065 indirecting them through a table might make things
24066 easier, but even that would be more wasteful,
24067 space-wise, than what we have now. */
24068 add_AT_symview (die, DW_AT_GNU_entry_view, label);
24069 }
24070 }
24071
24072 inline_entry_data_table->clear_slot (iedp);
24073 }
24074
24075 if (BLOCK_FRAGMENT_CHAIN (stmt)
24076 && (dwarf_version >= 3 || !dwarf_strict))
24077 {
24078 tree chain, superblock = NULL_TREE;
24079 dw_die_ref pdie;
24080 dw_attr_node *attr = NULL;
24081
24082 if (!debug_inline_points && inlined_function_outer_scope_p (stmt))
24083 {
24084 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
24085 BLOCK_NUMBER (stmt));
24086 add_AT_lbl_id (die, DW_AT_entry_pc, label);
24087 }
24088
24089 /* Optimize duplicate .debug_ranges lists or even tails of
24090 lists. If this BLOCK has same ranges as its supercontext,
24091 lookup DW_AT_ranges attribute in the supercontext (and
24092 recursively so), verify that the ranges_table contains the
24093 right values and use it instead of adding a new .debug_range. */
24094 for (chain = stmt, pdie = die;
24095 BLOCK_SAME_RANGE (chain);
24096 chain = BLOCK_SUPERCONTEXT (chain))
24097 {
24098 dw_attr_node *new_attr;
24099
24100 pdie = pdie->die_parent;
24101 if (pdie == NULL)
24102 break;
24103 if (BLOCK_SUPERCONTEXT (chain) == NULL_TREE)
24104 break;
24105 new_attr = get_AT (pdie, DW_AT_ranges);
24106 if (new_attr == NULL
24107 || new_attr->dw_attr_val.val_class != dw_val_class_range_list)
24108 break;
24109 attr = new_attr;
24110 superblock = BLOCK_SUPERCONTEXT (chain);
24111 }
24112 if (attr != NULL
24113 && ((*ranges_table)[attr->dw_attr_val.v.val_offset].num
24114 == (int)BLOCK_NUMBER (superblock))
24115 && BLOCK_FRAGMENT_CHAIN (superblock))
24116 {
24117 unsigned long off = attr->dw_attr_val.v.val_offset;
24118 unsigned long supercnt = 0, thiscnt = 0;
24119 for (chain = BLOCK_FRAGMENT_CHAIN (superblock);
24120 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
24121 {
24122 ++supercnt;
24123 gcc_checking_assert ((*ranges_table)[off + supercnt].num
24124 == (int)BLOCK_NUMBER (chain));
24125 }
24126 gcc_checking_assert ((*ranges_table)[off + supercnt + 1].num == 0);
24127 for (chain = BLOCK_FRAGMENT_CHAIN (stmt);
24128 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
24129 ++thiscnt;
24130 gcc_assert (supercnt >= thiscnt);
24131 add_AT_range_list (die, DW_AT_ranges, off + supercnt - thiscnt,
24132 false);
24133 note_rnglist_head (off + supercnt - thiscnt);
24134 return;
24135 }
24136
24137 unsigned int offset = add_ranges (stmt, true);
24138 add_AT_range_list (die, DW_AT_ranges, offset, false);
24139 note_rnglist_head (offset);
24140
24141 bool prev_in_cold = BLOCK_IN_COLD_SECTION_P (stmt);
24142 chain = BLOCK_FRAGMENT_CHAIN (stmt);
24143 do
24144 {
24145 add_ranges (chain, prev_in_cold != BLOCK_IN_COLD_SECTION_P (chain));
24146 prev_in_cold = BLOCK_IN_COLD_SECTION_P (chain);
24147 chain = BLOCK_FRAGMENT_CHAIN (chain);
24148 }
24149 while (chain);
24150 add_ranges (NULL);
24151 }
24152 else
24153 {
24154 char label_high[MAX_ARTIFICIAL_LABEL_BYTES];
24155 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
24156 BLOCK_NUMBER (stmt));
24157 ASM_GENERATE_INTERNAL_LABEL (label_high, BLOCK_END_LABEL,
24158 BLOCK_NUMBER (stmt));
24159 add_AT_low_high_pc (die, label, label_high, false);
24160 }
24161 }
24162
24163 /* Generate a DIE for a lexical block. */
24164
24165 static void
24166 gen_lexical_block_die (tree stmt, dw_die_ref context_die)
24167 {
24168 dw_die_ref old_die = lookup_block_die (stmt);
24169 dw_die_ref stmt_die = NULL;
24170 if (!old_die)
24171 {
24172 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24173 equate_block_to_die (stmt, stmt_die);
24174 }
24175
24176 if (BLOCK_ABSTRACT_ORIGIN (stmt))
24177 {
24178 /* If this is an inlined or conrecte instance, create a new lexical
24179 die for anything below to attach DW_AT_abstract_origin to. */
24180 if (old_die)
24181 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24182
24183 tree origin = block_ultimate_origin (stmt);
24184 if (origin != NULL_TREE && (origin != stmt || old_die))
24185 add_abstract_origin_attribute (stmt_die, origin);
24186
24187 old_die = NULL;
24188 }
24189
24190 if (old_die)
24191 stmt_die = old_die;
24192
24193 /* A non abstract block whose blocks have already been reordered
24194 should have the instruction range for this block. If so, set the
24195 high/low attributes. */
24196 if (!early_dwarf && TREE_ASM_WRITTEN (stmt))
24197 {
24198 gcc_assert (stmt_die);
24199 add_high_low_attributes (stmt, stmt_die);
24200 }
24201
24202 decls_for_scope (stmt, stmt_die);
24203 }
24204
24205 /* Generate a DIE for an inlined subprogram. */
24206
24207 static void
24208 gen_inlined_subroutine_die (tree stmt, dw_die_ref context_die)
24209 {
24210 tree decl = block_ultimate_origin (stmt);
24211
24212 /* Make sure any inlined functions are known to be inlineable. */
24213 gcc_checking_assert (DECL_ABSTRACT_P (decl)
24214 || cgraph_function_possibly_inlined_p (decl));
24215
24216 dw_die_ref subr_die = new_die (DW_TAG_inlined_subroutine, context_die, stmt);
24217
24218 if (call_arg_locations || debug_inline_points)
24219 equate_block_to_die (stmt, subr_die);
24220 add_abstract_origin_attribute (subr_die, decl);
24221 if (TREE_ASM_WRITTEN (stmt))
24222 add_high_low_attributes (stmt, subr_die);
24223 add_call_src_coords_attributes (stmt, subr_die);
24224
24225 /* The inliner creates an extra BLOCK for the parameter setup,
24226 we want to merge that with the actual outermost BLOCK of the
24227 inlined function to avoid duplicate locals in consumers.
24228 Do that by doing the recursion to subblocks on the single subblock
24229 of STMT. */
24230 bool unwrap_one = false;
24231 if (BLOCK_SUBBLOCKS (stmt) && !BLOCK_CHAIN (BLOCK_SUBBLOCKS (stmt)))
24232 {
24233 tree origin = block_ultimate_origin (BLOCK_SUBBLOCKS (stmt));
24234 if (origin
24235 && TREE_CODE (origin) == BLOCK
24236 && BLOCK_SUPERCONTEXT (origin) == decl)
24237 unwrap_one = true;
24238 }
24239 decls_for_scope (stmt, subr_die, !unwrap_one);
24240 if (unwrap_one)
24241 decls_for_scope (BLOCK_SUBBLOCKS (stmt), subr_die);
24242 }
24243
24244 /* Generate a DIE for a field in a record, or structure. CTX is required: see
24245 the comment for VLR_CONTEXT. */
24246
24247 static void
24248 gen_field_die (tree decl, struct vlr_context *ctx, dw_die_ref context_die)
24249 {
24250 dw_die_ref decl_die;
24251
24252 if (TREE_TYPE (decl) == error_mark_node)
24253 return;
24254
24255 decl_die = new_die (DW_TAG_member, context_die, decl);
24256 add_name_and_src_coords_attributes (decl_die, decl);
24257 add_type_attribute (decl_die, member_declared_type (decl), decl_quals (decl),
24258 TYPE_REVERSE_STORAGE_ORDER (DECL_FIELD_CONTEXT (decl)),
24259 context_die);
24260
24261 if (DECL_BIT_FIELD_TYPE (decl))
24262 {
24263 add_byte_size_attribute (decl_die, decl);
24264 add_bit_size_attribute (decl_die, decl);
24265 add_bit_offset_attribute (decl_die, decl, ctx);
24266 }
24267
24268 add_alignment_attribute (decl_die, decl);
24269
24270 /* If we have a variant part offset, then we are supposed to process a member
24271 of a QUAL_UNION_TYPE, which is how we represent variant parts in
24272 trees. */
24273 gcc_assert (ctx->variant_part_offset == NULL_TREE
24274 || TREE_CODE (DECL_FIELD_CONTEXT (decl)) != QUAL_UNION_TYPE);
24275 if (TREE_CODE (DECL_FIELD_CONTEXT (decl)) != UNION_TYPE)
24276 add_data_member_location_attribute (decl_die, decl, ctx);
24277
24278 if (DECL_ARTIFICIAL (decl))
24279 add_AT_flag (decl_die, DW_AT_artificial, 1);
24280
24281 add_accessibility_attribute (decl_die, decl);
24282
24283 /* Equate decl number to die, so that we can look up this decl later on. */
24284 equate_decl_number_to_die (decl, decl_die);
24285 }
24286
24287 /* Generate a DIE for a pointer to a member type. TYPE can be an
24288 OFFSET_TYPE, for a pointer to data member, or a RECORD_TYPE, for a
24289 pointer to member function. */
24290
24291 static void
24292 gen_ptr_to_mbr_type_die (tree type, dw_die_ref context_die)
24293 {
24294 if (lookup_type_die (type))
24295 return;
24296
24297 dw_die_ref ptr_die = new_die (DW_TAG_ptr_to_member_type,
24298 scope_die_for (type, context_die), type);
24299
24300 equate_type_number_to_die (type, ptr_die);
24301 add_AT_die_ref (ptr_die, DW_AT_containing_type,
24302 lookup_type_die (TYPE_OFFSET_BASETYPE (type)));
24303 add_type_attribute (ptr_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
24304 context_die);
24305 add_alignment_attribute (ptr_die, type);
24306
24307 if (TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE
24308 && TREE_CODE (TREE_TYPE (type)) != METHOD_TYPE)
24309 {
24310 dw_loc_descr_ref op = new_loc_descr (DW_OP_plus, 0, 0);
24311 add_AT_loc (ptr_die, DW_AT_use_location, op);
24312 }
24313 }
24314
24315 static char *producer_string;
24316
24317 /* Return a heap allocated producer string including command line options
24318 if -grecord-gcc-switches. */
24319
24320 static char *
24321 gen_producer_string (void)
24322 {
24323 size_t j;
24324 auto_vec<const char *> switches;
24325 const char *language_string = lang_hooks.name;
24326 char *producer, *tail;
24327 const char *p;
24328 size_t len = dwarf_record_gcc_switches ? 0 : 3;
24329 size_t plen = strlen (language_string) + 1 + strlen (version_string);
24330
24331 for (j = 1; dwarf_record_gcc_switches && j < save_decoded_options_count; j++)
24332 switch (save_decoded_options[j].opt_index)
24333 {
24334 case OPT_o:
24335 case OPT_d:
24336 case OPT_dumpbase:
24337 case OPT_dumpdir:
24338 case OPT_auxbase:
24339 case OPT_auxbase_strip:
24340 case OPT_quiet:
24341 case OPT_version:
24342 case OPT_v:
24343 case OPT_w:
24344 case OPT_L:
24345 case OPT_D:
24346 case OPT_I:
24347 case OPT_U:
24348 case OPT_SPECIAL_unknown:
24349 case OPT_SPECIAL_ignore:
24350 case OPT_SPECIAL_warn_removed:
24351 case OPT_SPECIAL_program_name:
24352 case OPT_SPECIAL_input_file:
24353 case OPT_grecord_gcc_switches:
24354 case OPT__output_pch_:
24355 case OPT_fdiagnostics_show_location_:
24356 case OPT_fdiagnostics_show_option:
24357 case OPT_fdiagnostics_show_caret:
24358 case OPT_fdiagnostics_show_labels:
24359 case OPT_fdiagnostics_show_line_numbers:
24360 case OPT_fdiagnostics_color_:
24361 case OPT_fdiagnostics_format_:
24362 case OPT_fverbose_asm:
24363 case OPT____:
24364 case OPT__sysroot_:
24365 case OPT_nostdinc:
24366 case OPT_nostdinc__:
24367 case OPT_fpreprocessed:
24368 case OPT_fltrans_output_list_:
24369 case OPT_fresolution_:
24370 case OPT_fdebug_prefix_map_:
24371 case OPT_fmacro_prefix_map_:
24372 case OPT_ffile_prefix_map_:
24373 case OPT_fcompare_debug:
24374 case OPT_fchecking:
24375 case OPT_fchecking_:
24376 /* Ignore these. */
24377 continue;
24378 case OPT_flto_:
24379 {
24380 const char *lto_canonical = "-flto";
24381 switches.safe_push (lto_canonical);
24382 len += strlen (lto_canonical) + 1;
24383 break;
24384 }
24385 default:
24386 if (cl_options[save_decoded_options[j].opt_index].flags
24387 & CL_NO_DWARF_RECORD)
24388 continue;
24389 gcc_checking_assert (save_decoded_options[j].canonical_option[0][0]
24390 == '-');
24391 switch (save_decoded_options[j].canonical_option[0][1])
24392 {
24393 case 'M':
24394 case 'i':
24395 case 'W':
24396 continue;
24397 case 'f':
24398 if (strncmp (save_decoded_options[j].canonical_option[0] + 2,
24399 "dump", 4) == 0)
24400 continue;
24401 break;
24402 default:
24403 break;
24404 }
24405 switches.safe_push (save_decoded_options[j].orig_option_with_args_text);
24406 len += strlen (save_decoded_options[j].orig_option_with_args_text) + 1;
24407 break;
24408 }
24409
24410 producer = XNEWVEC (char, plen + 1 + len + 1);
24411 tail = producer;
24412 sprintf (tail, "%s %s", language_string, version_string);
24413 tail += plen;
24414
24415 FOR_EACH_VEC_ELT (switches, j, p)
24416 {
24417 len = strlen (p);
24418 *tail = ' ';
24419 memcpy (tail + 1, p, len);
24420 tail += len + 1;
24421 }
24422
24423 *tail = '\0';
24424 return producer;
24425 }
24426
24427 /* Given a C and/or C++ language/version string return the "highest".
24428 C++ is assumed to be "higher" than C in this case. Used for merging
24429 LTO translation unit languages. */
24430 static const char *
24431 highest_c_language (const char *lang1, const char *lang2)
24432 {
24433 if (strcmp ("GNU C++17", lang1) == 0 || strcmp ("GNU C++17", lang2) == 0)
24434 return "GNU C++17";
24435 if (strcmp ("GNU C++14", lang1) == 0 || strcmp ("GNU C++14", lang2) == 0)
24436 return "GNU C++14";
24437 if (strcmp ("GNU C++11", lang1) == 0 || strcmp ("GNU C++11", lang2) == 0)
24438 return "GNU C++11";
24439 if (strcmp ("GNU C++98", lang1) == 0 || strcmp ("GNU C++98", lang2) == 0)
24440 return "GNU C++98";
24441
24442 if (strcmp ("GNU C2X", lang1) == 0 || strcmp ("GNU C2X", lang2) == 0)
24443 return "GNU C2X";
24444 if (strcmp ("GNU C17", lang1) == 0 || strcmp ("GNU C17", lang2) == 0)
24445 return "GNU C17";
24446 if (strcmp ("GNU C11", lang1) == 0 || strcmp ("GNU C11", lang2) == 0)
24447 return "GNU C11";
24448 if (strcmp ("GNU C99", lang1) == 0 || strcmp ("GNU C99", lang2) == 0)
24449 return "GNU C99";
24450 if (strcmp ("GNU C89", lang1) == 0 || strcmp ("GNU C89", lang2) == 0)
24451 return "GNU C89";
24452
24453 gcc_unreachable ();
24454 }
24455
24456
24457 /* Generate the DIE for the compilation unit. */
24458
24459 static dw_die_ref
24460 gen_compile_unit_die (const char *filename)
24461 {
24462 dw_die_ref die;
24463 const char *language_string = lang_hooks.name;
24464 int language;
24465
24466 die = new_die (DW_TAG_compile_unit, NULL, NULL);
24467
24468 if (filename)
24469 {
24470 add_name_attribute (die, filename);
24471 /* Don't add cwd for <built-in>. */
24472 if (filename[0] != '<')
24473 add_comp_dir_attribute (die);
24474 }
24475
24476 add_AT_string (die, DW_AT_producer, producer_string ? producer_string : "");
24477
24478 /* If our producer is LTO try to figure out a common language to use
24479 from the global list of translation units. */
24480 if (strcmp (language_string, "GNU GIMPLE") == 0)
24481 {
24482 unsigned i;
24483 tree t;
24484 const char *common_lang = NULL;
24485
24486 FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, t)
24487 {
24488 if (!TRANSLATION_UNIT_LANGUAGE (t))
24489 continue;
24490 if (!common_lang)
24491 common_lang = TRANSLATION_UNIT_LANGUAGE (t);
24492 else if (strcmp (common_lang, TRANSLATION_UNIT_LANGUAGE (t)) == 0)
24493 ;
24494 else if (strncmp (common_lang, "GNU C", 5) == 0
24495 && strncmp (TRANSLATION_UNIT_LANGUAGE (t), "GNU C", 5) == 0)
24496 /* Mixing C and C++ is ok, use C++ in that case. */
24497 common_lang = highest_c_language (common_lang,
24498 TRANSLATION_UNIT_LANGUAGE (t));
24499 else
24500 {
24501 /* Fall back to C. */
24502 common_lang = NULL;
24503 break;
24504 }
24505 }
24506
24507 if (common_lang)
24508 language_string = common_lang;
24509 }
24510
24511 language = DW_LANG_C;
24512 if (strncmp (language_string, "GNU C", 5) == 0
24513 && ISDIGIT (language_string[5]))
24514 {
24515 language = DW_LANG_C89;
24516 if (dwarf_version >= 3 || !dwarf_strict)
24517 {
24518 if (strcmp (language_string, "GNU C89") != 0)
24519 language = DW_LANG_C99;
24520
24521 if (dwarf_version >= 5 /* || !dwarf_strict */)
24522 if (strcmp (language_string, "GNU C11") == 0
24523 || strcmp (language_string, "GNU C17") == 0
24524 || strcmp (language_string, "GNU C2X"))
24525 language = DW_LANG_C11;
24526 }
24527 }
24528 else if (strncmp (language_string, "GNU C++", 7) == 0)
24529 {
24530 language = DW_LANG_C_plus_plus;
24531 if (dwarf_version >= 5 /* || !dwarf_strict */)
24532 {
24533 if (strcmp (language_string, "GNU C++11") == 0)
24534 language = DW_LANG_C_plus_plus_11;
24535 else if (strcmp (language_string, "GNU C++14") == 0)
24536 language = DW_LANG_C_plus_plus_14;
24537 else if (strcmp (language_string, "GNU C++17") == 0)
24538 /* For now. */
24539 language = DW_LANG_C_plus_plus_14;
24540 }
24541 }
24542 else if (strcmp (language_string, "GNU F77") == 0)
24543 language = DW_LANG_Fortran77;
24544 else if (dwarf_version >= 3 || !dwarf_strict)
24545 {
24546 if (strcmp (language_string, "GNU Ada") == 0)
24547 language = DW_LANG_Ada95;
24548 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24549 {
24550 language = DW_LANG_Fortran95;
24551 if (dwarf_version >= 5 /* || !dwarf_strict */)
24552 {
24553 if (strcmp (language_string, "GNU Fortran2003") == 0)
24554 language = DW_LANG_Fortran03;
24555 else if (strcmp (language_string, "GNU Fortran2008") == 0)
24556 language = DW_LANG_Fortran08;
24557 }
24558 }
24559 else if (strcmp (language_string, "GNU Objective-C") == 0)
24560 language = DW_LANG_ObjC;
24561 else if (strcmp (language_string, "GNU Objective-C++") == 0)
24562 language = DW_LANG_ObjC_plus_plus;
24563 else if (strcmp (language_string, "GNU D") == 0)
24564 language = DW_LANG_D;
24565 else if (dwarf_version >= 5 || !dwarf_strict)
24566 {
24567 if (strcmp (language_string, "GNU Go") == 0)
24568 language = DW_LANG_Go;
24569 }
24570 }
24571 /* Use a degraded Fortran setting in strict DWARF2 so is_fortran works. */
24572 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24573 language = DW_LANG_Fortran90;
24574 /* Likewise for Ada. */
24575 else if (strcmp (language_string, "GNU Ada") == 0)
24576 language = DW_LANG_Ada83;
24577
24578 add_AT_unsigned (die, DW_AT_language, language);
24579
24580 switch (language)
24581 {
24582 case DW_LANG_Fortran77:
24583 case DW_LANG_Fortran90:
24584 case DW_LANG_Fortran95:
24585 case DW_LANG_Fortran03:
24586 case DW_LANG_Fortran08:
24587 /* Fortran has case insensitive identifiers and the front-end
24588 lowercases everything. */
24589 add_AT_unsigned (die, DW_AT_identifier_case, DW_ID_down_case);
24590 break;
24591 default:
24592 /* The default DW_ID_case_sensitive doesn't need to be specified. */
24593 break;
24594 }
24595 return die;
24596 }
24597
24598 /* Generate the DIE for a base class. */
24599
24600 static void
24601 gen_inheritance_die (tree binfo, tree access, tree type,
24602 dw_die_ref context_die)
24603 {
24604 dw_die_ref die = new_die (DW_TAG_inheritance, context_die, binfo);
24605 struct vlr_context ctx = { type, NULL };
24606
24607 add_type_attribute (die, BINFO_TYPE (binfo), TYPE_UNQUALIFIED, false,
24608 context_die);
24609 add_data_member_location_attribute (die, binfo, &ctx);
24610
24611 if (BINFO_VIRTUAL_P (binfo))
24612 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
24613
24614 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
24615 children, otherwise the default is DW_ACCESS_public. In DWARF2
24616 the default has always been DW_ACCESS_private. */
24617 if (access == access_public_node)
24618 {
24619 if (dwarf_version == 2
24620 || context_die->die_tag == DW_TAG_class_type)
24621 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
24622 }
24623 else if (access == access_protected_node)
24624 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
24625 else if (dwarf_version > 2
24626 && context_die->die_tag != DW_TAG_class_type)
24627 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
24628 }
24629
24630 /* Return whether DECL is a FIELD_DECL that represents the variant part of a
24631 structure. */
24632
24633 static bool
24634 is_variant_part (tree decl)
24635 {
24636 return (TREE_CODE (decl) == FIELD_DECL
24637 && TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE);
24638 }
24639
24640 /* Check that OPERAND is a reference to a field in STRUCT_TYPE. If it is,
24641 return the FIELD_DECL. Return NULL_TREE otherwise. */
24642
24643 static tree
24644 analyze_discr_in_predicate (tree operand, tree struct_type)
24645 {
24646 while (CONVERT_EXPR_P (operand))
24647 operand = TREE_OPERAND (operand, 0);
24648
24649 /* Match field access to members of struct_type only. */
24650 if (TREE_CODE (operand) == COMPONENT_REF
24651 && TREE_CODE (TREE_OPERAND (operand, 0)) == PLACEHOLDER_EXPR
24652 && TREE_TYPE (TREE_OPERAND (operand, 0)) == struct_type
24653 && TREE_CODE (TREE_OPERAND (operand, 1)) == FIELD_DECL)
24654 return TREE_OPERAND (operand, 1);
24655 else
24656 return NULL_TREE;
24657 }
24658
24659 /* Check that SRC is a constant integer that can be represented as a native
24660 integer constant (either signed or unsigned). If so, store it into DEST and
24661 return true. Return false otherwise. */
24662
24663 static bool
24664 get_discr_value (tree src, dw_discr_value *dest)
24665 {
24666 tree discr_type = TREE_TYPE (src);
24667
24668 if (lang_hooks.types.get_debug_type)
24669 {
24670 tree debug_type = lang_hooks.types.get_debug_type (discr_type);
24671 if (debug_type != NULL)
24672 discr_type = debug_type;
24673 }
24674
24675 if (TREE_CODE (src) != INTEGER_CST || !INTEGRAL_TYPE_P (discr_type))
24676 return false;
24677
24678 /* Signedness can vary between the original type and the debug type. This
24679 can happen for character types in Ada for instance: the character type
24680 used for code generation can be signed, to be compatible with the C one,
24681 but from a debugger point of view, it must be unsigned. */
24682 bool is_orig_unsigned = TYPE_UNSIGNED (TREE_TYPE (src));
24683 bool is_debug_unsigned = TYPE_UNSIGNED (discr_type);
24684
24685 if (is_orig_unsigned != is_debug_unsigned)
24686 src = fold_convert (discr_type, src);
24687
24688 if (!(is_debug_unsigned ? tree_fits_uhwi_p (src) : tree_fits_shwi_p (src)))
24689 return false;
24690
24691 dest->pos = is_debug_unsigned;
24692 if (is_debug_unsigned)
24693 dest->v.uval = tree_to_uhwi (src);
24694 else
24695 dest->v.sval = tree_to_shwi (src);
24696
24697 return true;
24698 }
24699
24700 /* Try to extract synthetic properties out of VARIANT_PART_DECL, which is a
24701 FIELD_DECL in STRUCT_TYPE that represents a variant part. If unsuccessful,
24702 store NULL_TREE in DISCR_DECL. Otherwise:
24703
24704 - store the discriminant field in STRUCT_TYPE that controls the variant
24705 part to *DISCR_DECL
24706
24707 - put in *DISCR_LISTS_P an array where for each variant, the item
24708 represents the corresponding matching list of discriminant values.
24709
24710 - put in *DISCR_LISTS_LENGTH the number of variants, which is the size of
24711 the above array.
24712
24713 Note that when the array is allocated (i.e. when the analysis is
24714 successful), it is up to the caller to free the array. */
24715
24716 static void
24717 analyze_variants_discr (tree variant_part_decl,
24718 tree struct_type,
24719 tree *discr_decl,
24720 dw_discr_list_ref **discr_lists_p,
24721 unsigned *discr_lists_length)
24722 {
24723 tree variant_part_type = TREE_TYPE (variant_part_decl);
24724 tree variant;
24725 dw_discr_list_ref *discr_lists;
24726 unsigned i;
24727
24728 /* Compute how many variants there are in this variant part. */
24729 *discr_lists_length = 0;
24730 for (variant = TYPE_FIELDS (variant_part_type);
24731 variant != NULL_TREE;
24732 variant = DECL_CHAIN (variant))
24733 ++*discr_lists_length;
24734
24735 *discr_decl = NULL_TREE;
24736 *discr_lists_p
24737 = (dw_discr_list_ref *) xcalloc (*discr_lists_length,
24738 sizeof (**discr_lists_p));
24739 discr_lists = *discr_lists_p;
24740
24741 /* And then analyze all variants to extract discriminant information for all
24742 of them. This analysis is conservative: as soon as we detect something we
24743 do not support, abort everything and pretend we found nothing. */
24744 for (variant = TYPE_FIELDS (variant_part_type), i = 0;
24745 variant != NULL_TREE;
24746 variant = DECL_CHAIN (variant), ++i)
24747 {
24748 tree match_expr = DECL_QUALIFIER (variant);
24749
24750 /* Now, try to analyze the predicate and deduce a discriminant for
24751 it. */
24752 if (match_expr == boolean_true_node)
24753 /* Typically happens for the default variant: it matches all cases that
24754 previous variants rejected. Don't output any matching value for
24755 this one. */
24756 continue;
24757
24758 /* The following loop tries to iterate over each discriminant
24759 possibility: single values or ranges. */
24760 while (match_expr != NULL_TREE)
24761 {
24762 tree next_round_match_expr;
24763 tree candidate_discr = NULL_TREE;
24764 dw_discr_list_ref new_node = NULL;
24765
24766 /* Possibilities are matched one after the other by nested
24767 TRUTH_ORIF_EXPR expressions. Process the current possibility and
24768 continue with the rest at next iteration. */
24769 if (TREE_CODE (match_expr) == TRUTH_ORIF_EXPR)
24770 {
24771 next_round_match_expr = TREE_OPERAND (match_expr, 0);
24772 match_expr = TREE_OPERAND (match_expr, 1);
24773 }
24774 else
24775 next_round_match_expr = NULL_TREE;
24776
24777 if (match_expr == boolean_false_node)
24778 /* This sub-expression matches nothing: just wait for the next
24779 one. */
24780 ;
24781
24782 else if (TREE_CODE (match_expr) == EQ_EXPR)
24783 {
24784 /* We are matching: <discr_field> == <integer_cst>
24785 This sub-expression matches a single value. */
24786 tree integer_cst = TREE_OPERAND (match_expr, 1);
24787
24788 candidate_discr
24789 = analyze_discr_in_predicate (TREE_OPERAND (match_expr, 0),
24790 struct_type);
24791
24792 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24793 if (!get_discr_value (integer_cst,
24794 &new_node->dw_discr_lower_bound))
24795 goto abort;
24796 new_node->dw_discr_range = false;
24797 }
24798
24799 else if (TREE_CODE (match_expr) == TRUTH_ANDIF_EXPR)
24800 {
24801 /* We are matching:
24802 <discr_field> > <integer_cst>
24803 && <discr_field> < <integer_cst>.
24804 This sub-expression matches the range of values between the
24805 two matched integer constants. Note that comparisons can be
24806 inclusive or exclusive. */
24807 tree candidate_discr_1, candidate_discr_2;
24808 tree lower_cst, upper_cst;
24809 bool lower_cst_included, upper_cst_included;
24810 tree lower_op = TREE_OPERAND (match_expr, 0);
24811 tree upper_op = TREE_OPERAND (match_expr, 1);
24812
24813 /* When the comparison is exclusive, the integer constant is not
24814 the discriminant range bound we are looking for: we will have
24815 to increment or decrement it. */
24816 if (TREE_CODE (lower_op) == GE_EXPR)
24817 lower_cst_included = true;
24818 else if (TREE_CODE (lower_op) == GT_EXPR)
24819 lower_cst_included = false;
24820 else
24821 goto abort;
24822
24823 if (TREE_CODE (upper_op) == LE_EXPR)
24824 upper_cst_included = true;
24825 else if (TREE_CODE (upper_op) == LT_EXPR)
24826 upper_cst_included = false;
24827 else
24828 goto abort;
24829
24830 /* Extract the discriminant from the first operand and check it
24831 is consistant with the same analysis in the second
24832 operand. */
24833 candidate_discr_1
24834 = analyze_discr_in_predicate (TREE_OPERAND (lower_op, 0),
24835 struct_type);
24836 candidate_discr_2
24837 = analyze_discr_in_predicate (TREE_OPERAND (upper_op, 0),
24838 struct_type);
24839 if (candidate_discr_1 == candidate_discr_2)
24840 candidate_discr = candidate_discr_1;
24841 else
24842 goto abort;
24843
24844 /* Extract bounds from both. */
24845 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24846 lower_cst = TREE_OPERAND (lower_op, 1);
24847 upper_cst = TREE_OPERAND (upper_op, 1);
24848
24849 if (!lower_cst_included)
24850 lower_cst
24851 = fold_build2 (PLUS_EXPR, TREE_TYPE (lower_cst), lower_cst,
24852 build_int_cst (TREE_TYPE (lower_cst), 1));
24853 if (!upper_cst_included)
24854 upper_cst
24855 = fold_build2 (MINUS_EXPR, TREE_TYPE (upper_cst), upper_cst,
24856 build_int_cst (TREE_TYPE (upper_cst), 1));
24857
24858 if (!get_discr_value (lower_cst,
24859 &new_node->dw_discr_lower_bound)
24860 || !get_discr_value (upper_cst,
24861 &new_node->dw_discr_upper_bound))
24862 goto abort;
24863
24864 new_node->dw_discr_range = true;
24865 }
24866
24867 else if ((candidate_discr
24868 = analyze_discr_in_predicate (match_expr, struct_type))
24869 && TREE_TYPE (candidate_discr) == boolean_type_node)
24870 {
24871 /* We are matching: <discr_field> for a boolean discriminant.
24872 This sub-expression matches boolean_true_node. */
24873 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24874 if (!get_discr_value (boolean_true_node,
24875 &new_node->dw_discr_lower_bound))
24876 goto abort;
24877 new_node->dw_discr_range = false;
24878 }
24879
24880 else
24881 /* Unsupported sub-expression: we cannot determine the set of
24882 matching discriminant values. Abort everything. */
24883 goto abort;
24884
24885 /* If the discriminant info is not consistant with what we saw so
24886 far, consider the analysis failed and abort everything. */
24887 if (candidate_discr == NULL_TREE
24888 || (*discr_decl != NULL_TREE && candidate_discr != *discr_decl))
24889 goto abort;
24890 else
24891 *discr_decl = candidate_discr;
24892
24893 if (new_node != NULL)
24894 {
24895 new_node->dw_discr_next = discr_lists[i];
24896 discr_lists[i] = new_node;
24897 }
24898 match_expr = next_round_match_expr;
24899 }
24900 }
24901
24902 /* If we reach this point, we could match everything we were interested
24903 in. */
24904 return;
24905
24906 abort:
24907 /* Clean all data structure and return no result. */
24908 free (*discr_lists_p);
24909 *discr_lists_p = NULL;
24910 *discr_decl = NULL_TREE;
24911 }
24912
24913 /* Generate a DIE to represent VARIANT_PART_DECL, a variant part that is part
24914 of STRUCT_TYPE, a record type. This new DIE is emitted as the next child
24915 under CONTEXT_DIE.
24916
24917 Variant parts are supposed to be implemented as a FIELD_DECL whose type is a
24918 QUAL_UNION_TYPE: this is the VARIANT_PART_DECL parameter. The members for
24919 this type, which are record types, represent the available variants and each
24920 has a DECL_QUALIFIER attribute. The discriminant and the discriminant
24921 values are inferred from these attributes.
24922
24923 In trees, the offsets for the fields inside these sub-records are relative
24924 to the variant part itself, whereas the corresponding DIEs should have
24925 offset attributes that are relative to the embedding record base address.
24926 This is why the caller must provide a VARIANT_PART_OFFSET expression: it
24927 must be an expression that computes the offset of the variant part to
24928 describe in DWARF. */
24929
24930 static void
24931 gen_variant_part (tree variant_part_decl, struct vlr_context *vlr_ctx,
24932 dw_die_ref context_die)
24933 {
24934 const tree variant_part_type = TREE_TYPE (variant_part_decl);
24935 tree variant_part_offset = vlr_ctx->variant_part_offset;
24936 struct loc_descr_context ctx = {
24937 vlr_ctx->struct_type, /* context_type */
24938 NULL_TREE, /* base_decl */
24939 NULL, /* dpi */
24940 false, /* placeholder_arg */
24941 false /* placeholder_seen */
24942 };
24943
24944 /* The FIELD_DECL node in STRUCT_TYPE that acts as the discriminant, or
24945 NULL_TREE if there is no such field. */
24946 tree discr_decl = NULL_TREE;
24947 dw_discr_list_ref *discr_lists;
24948 unsigned discr_lists_length = 0;
24949 unsigned i;
24950
24951 dw_die_ref dwarf_proc_die = NULL;
24952 dw_die_ref variant_part_die
24953 = new_die (DW_TAG_variant_part, context_die, variant_part_type);
24954
24955 equate_decl_number_to_die (variant_part_decl, variant_part_die);
24956
24957 analyze_variants_discr (variant_part_decl, vlr_ctx->struct_type,
24958 &discr_decl, &discr_lists, &discr_lists_length);
24959
24960 if (discr_decl != NULL_TREE)
24961 {
24962 dw_die_ref discr_die = lookup_decl_die (discr_decl);
24963
24964 if (discr_die)
24965 add_AT_die_ref (variant_part_die, DW_AT_discr, discr_die);
24966 else
24967 /* We have no DIE for the discriminant, so just discard all
24968 discrimimant information in the output. */
24969 discr_decl = NULL_TREE;
24970 }
24971
24972 /* If the offset for this variant part is more complex than a constant,
24973 create a DWARF procedure for it so that we will not have to generate DWARF
24974 expressions for it for each member. */
24975 if (TREE_CODE (variant_part_offset) != INTEGER_CST
24976 && (dwarf_version >= 3 || !dwarf_strict))
24977 {
24978 const tree dwarf_proc_fndecl
24979 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
24980 build_function_type (TREE_TYPE (variant_part_offset),
24981 NULL_TREE));
24982 const tree dwarf_proc_call = build_call_expr (dwarf_proc_fndecl, 0);
24983 const dw_loc_descr_ref dwarf_proc_body
24984 = loc_descriptor_from_tree (variant_part_offset, 0, &ctx);
24985
24986 dwarf_proc_die = new_dwarf_proc_die (dwarf_proc_body,
24987 dwarf_proc_fndecl, context_die);
24988 if (dwarf_proc_die != NULL)
24989 variant_part_offset = dwarf_proc_call;
24990 }
24991
24992 /* Output DIEs for all variants. */
24993 i = 0;
24994 for (tree variant = TYPE_FIELDS (variant_part_type);
24995 variant != NULL_TREE;
24996 variant = DECL_CHAIN (variant), ++i)
24997 {
24998 tree variant_type = TREE_TYPE (variant);
24999 dw_die_ref variant_die;
25000
25001 /* All variants (i.e. members of a variant part) are supposed to be
25002 encoded as structures. Sub-variant parts are QUAL_UNION_TYPE fields
25003 under these records. */
25004 gcc_assert (TREE_CODE (variant_type) == RECORD_TYPE);
25005
25006 variant_die = new_die (DW_TAG_variant, variant_part_die, variant_type);
25007 equate_decl_number_to_die (variant, variant_die);
25008
25009 /* Output discriminant values this variant matches, if any. */
25010 if (discr_decl == NULL || discr_lists[i] == NULL)
25011 /* In the case we have discriminant information at all, this is
25012 probably the default variant: as the standard says, don't
25013 output any discriminant value/list attribute. */
25014 ;
25015 else if (discr_lists[i]->dw_discr_next == NULL
25016 && !discr_lists[i]->dw_discr_range)
25017 /* If there is only one accepted value, don't bother outputting a
25018 list. */
25019 add_discr_value (variant_die, &discr_lists[i]->dw_discr_lower_bound);
25020 else
25021 add_discr_list (variant_die, discr_lists[i]);
25022
25023 for (tree member = TYPE_FIELDS (variant_type);
25024 member != NULL_TREE;
25025 member = DECL_CHAIN (member))
25026 {
25027 struct vlr_context vlr_sub_ctx = {
25028 vlr_ctx->struct_type, /* struct_type */
25029 NULL /* variant_part_offset */
25030 };
25031 if (is_variant_part (member))
25032 {
25033 /* All offsets for fields inside variant parts are relative to
25034 the top-level embedding RECORD_TYPE's base address. On the
25035 other hand, offsets in GCC's types are relative to the
25036 nested-most variant part. So we have to sum offsets each time
25037 we recurse. */
25038
25039 vlr_sub_ctx.variant_part_offset
25040 = fold_build2 (PLUS_EXPR, TREE_TYPE (variant_part_offset),
25041 variant_part_offset, byte_position (member));
25042 gen_variant_part (member, &vlr_sub_ctx, variant_die);
25043 }
25044 else
25045 {
25046 vlr_sub_ctx.variant_part_offset = variant_part_offset;
25047 gen_decl_die (member, NULL, &vlr_sub_ctx, variant_die);
25048 }
25049 }
25050 }
25051
25052 free (discr_lists);
25053 }
25054
25055 /* Generate a DIE for a class member. */
25056
25057 static void
25058 gen_member_die (tree type, dw_die_ref context_die)
25059 {
25060 tree member;
25061 tree binfo = TYPE_BINFO (type);
25062
25063 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
25064
25065 /* If this is not an incomplete type, output descriptions of each of its
25066 members. Note that as we output the DIEs necessary to represent the
25067 members of this record or union type, we will also be trying to output
25068 DIEs to represent the *types* of those members. However the `type'
25069 function (above) will specifically avoid generating type DIEs for member
25070 types *within* the list of member DIEs for this (containing) type except
25071 for those types (of members) which are explicitly marked as also being
25072 members of this (containing) type themselves. The g++ front- end can
25073 force any given type to be treated as a member of some other (containing)
25074 type by setting the TYPE_CONTEXT of the given (member) type to point to
25075 the TREE node representing the appropriate (containing) type. */
25076
25077 /* First output info about the base classes. */
25078 if (binfo && early_dwarf)
25079 {
25080 vec<tree, va_gc> *accesses = BINFO_BASE_ACCESSES (binfo);
25081 int i;
25082 tree base;
25083
25084 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base); i++)
25085 gen_inheritance_die (base,
25086 (accesses ? (*accesses)[i] : access_public_node),
25087 type,
25088 context_die);
25089 }
25090
25091 /* Now output info about the members. */
25092 for (member = TYPE_FIELDS (type); member; member = DECL_CHAIN (member))
25093 {
25094 /* Ignore clones. */
25095 if (DECL_ABSTRACT_ORIGIN (member))
25096 continue;
25097
25098 struct vlr_context vlr_ctx = { type, NULL_TREE };
25099 bool static_inline_p
25100 = (VAR_P (member)
25101 && TREE_STATIC (member)
25102 && (lang_hooks.decls.decl_dwarf_attribute (member, DW_AT_inline)
25103 != -1));
25104
25105 /* If we thought we were generating minimal debug info for TYPE
25106 and then changed our minds, some of the member declarations
25107 may have already been defined. Don't define them again, but
25108 do put them in the right order. */
25109
25110 if (dw_die_ref child = lookup_decl_die (member))
25111 {
25112 /* Handle inline static data members, which only have in-class
25113 declarations. */
25114 bool splice = true;
25115
25116 dw_die_ref ref = NULL;
25117 if (child->die_tag == DW_TAG_variable
25118 && child->die_parent == comp_unit_die ())
25119 {
25120 ref = get_AT_ref (child, DW_AT_specification);
25121
25122 /* For C++17 inline static data members followed by redundant
25123 out of class redeclaration, we might get here with
25124 child being the DIE created for the out of class
25125 redeclaration and with its DW_AT_specification being
25126 the DIE created for in-class definition. We want to
25127 reparent the latter, and don't want to create another
25128 DIE with DW_AT_specification in that case, because
25129 we already have one. */
25130 if (ref
25131 && static_inline_p
25132 && ref->die_tag == DW_TAG_variable
25133 && ref->die_parent == comp_unit_die ()
25134 && get_AT (ref, DW_AT_specification) == NULL)
25135 {
25136 child = ref;
25137 ref = NULL;
25138 static_inline_p = false;
25139 }
25140
25141 if (!ref)
25142 {
25143 reparent_child (child, context_die);
25144 if (dwarf_version < 5)
25145 child->die_tag = DW_TAG_member;
25146 splice = false;
25147 }
25148 }
25149
25150 if (splice)
25151 splice_child_die (context_die, child);
25152 }
25153
25154 /* Do not generate standard DWARF for variant parts if we are generating
25155 the corresponding GNAT encodings: DIEs generated for both would
25156 conflict in our mappings. */
25157 else if (is_variant_part (member)
25158 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
25159 {
25160 vlr_ctx.variant_part_offset = byte_position (member);
25161 gen_variant_part (member, &vlr_ctx, context_die);
25162 }
25163 else
25164 {
25165 vlr_ctx.variant_part_offset = NULL_TREE;
25166 gen_decl_die (member, NULL, &vlr_ctx, context_die);
25167 }
25168
25169 /* For C++ inline static data members emit immediately a DW_TAG_variable
25170 DIE that will refer to that DW_TAG_member/DW_TAG_variable through
25171 DW_AT_specification. */
25172 if (static_inline_p)
25173 {
25174 int old_extern = DECL_EXTERNAL (member);
25175 DECL_EXTERNAL (member) = 0;
25176 gen_decl_die (member, NULL, NULL, comp_unit_die ());
25177 DECL_EXTERNAL (member) = old_extern;
25178 }
25179 }
25180 }
25181
25182 /* Generate a DIE for a structure or union type. If TYPE_DECL_SUPPRESS_DEBUG
25183 is set, we pretend that the type was never defined, so we only get the
25184 member DIEs needed by later specification DIEs. */
25185
25186 static void
25187 gen_struct_or_union_type_die (tree type, dw_die_ref context_die,
25188 enum debug_info_usage usage)
25189 {
25190 if (TREE_ASM_WRITTEN (type))
25191 {
25192 /* Fill in the bound of variable-length fields in late dwarf if
25193 still incomplete. */
25194 if (!early_dwarf && variably_modified_type_p (type, NULL))
25195 for (tree member = TYPE_FIELDS (type);
25196 member;
25197 member = DECL_CHAIN (member))
25198 fill_variable_array_bounds (TREE_TYPE (member));
25199 return;
25200 }
25201
25202 dw_die_ref type_die = lookup_type_die (type);
25203 dw_die_ref scope_die = 0;
25204 int nested = 0;
25205 int complete = (TYPE_SIZE (type)
25206 && (! TYPE_STUB_DECL (type)
25207 || ! TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))));
25208 int ns_decl = (context_die && context_die->die_tag == DW_TAG_namespace);
25209 complete = complete && should_emit_struct_debug (type, usage);
25210
25211 if (type_die && ! complete)
25212 return;
25213
25214 if (TYPE_CONTEXT (type) != NULL_TREE
25215 && (AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25216 || TREE_CODE (TYPE_CONTEXT (type)) == NAMESPACE_DECL))
25217 nested = 1;
25218
25219 scope_die = scope_die_for (type, context_die);
25220
25221 /* Generate child dies for template paramaters. */
25222 if (!type_die && debug_info_level > DINFO_LEVEL_TERSE)
25223 schedule_generic_params_dies_gen (type);
25224
25225 if (! type_die || (nested && is_cu_die (scope_die)))
25226 /* First occurrence of type or toplevel definition of nested class. */
25227 {
25228 dw_die_ref old_die = type_die;
25229
25230 type_die = new_die (TREE_CODE (type) == RECORD_TYPE
25231 ? record_type_tag (type) : DW_TAG_union_type,
25232 scope_die, type);
25233 equate_type_number_to_die (type, type_die);
25234 if (old_die)
25235 add_AT_specification (type_die, old_die);
25236 else
25237 add_name_attribute (type_die, type_tag (type));
25238 }
25239 else
25240 remove_AT (type_die, DW_AT_declaration);
25241
25242 /* If this type has been completed, then give it a byte_size attribute and
25243 then give a list of members. */
25244 if (complete && !ns_decl)
25245 {
25246 /* Prevent infinite recursion in cases where the type of some member of
25247 this type is expressed in terms of this type itself. */
25248 TREE_ASM_WRITTEN (type) = 1;
25249 add_byte_size_attribute (type_die, type);
25250 add_alignment_attribute (type_die, type);
25251 if (TYPE_STUB_DECL (type) != NULL_TREE)
25252 {
25253 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
25254 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
25255 }
25256
25257 /* If the first reference to this type was as the return type of an
25258 inline function, then it may not have a parent. Fix this now. */
25259 if (type_die->die_parent == NULL)
25260 add_child_die (scope_die, type_die);
25261
25262 gen_member_die (type, type_die);
25263
25264 add_gnat_descriptive_type_attribute (type_die, type, context_die);
25265 if (TYPE_ARTIFICIAL (type))
25266 add_AT_flag (type_die, DW_AT_artificial, 1);
25267
25268 /* GNU extension: Record what type our vtable lives in. */
25269 if (TYPE_VFIELD (type))
25270 {
25271 tree vtype = DECL_FCONTEXT (TYPE_VFIELD (type));
25272
25273 gen_type_die (vtype, context_die);
25274 add_AT_die_ref (type_die, DW_AT_containing_type,
25275 lookup_type_die (vtype));
25276 }
25277 }
25278 else
25279 {
25280 add_AT_flag (type_die, DW_AT_declaration, 1);
25281
25282 /* We don't need to do this for function-local types. */
25283 if (TYPE_STUB_DECL (type)
25284 && ! decl_function_context (TYPE_STUB_DECL (type)))
25285 vec_safe_push (incomplete_types, type);
25286 }
25287
25288 if (get_AT (type_die, DW_AT_name))
25289 add_pubtype (type, type_die);
25290 }
25291
25292 /* Generate a DIE for a subroutine _type_. */
25293
25294 static void
25295 gen_subroutine_type_die (tree type, dw_die_ref context_die)
25296 {
25297 tree return_type = TREE_TYPE (type);
25298 dw_die_ref subr_die
25299 = new_die (DW_TAG_subroutine_type,
25300 scope_die_for (type, context_die), type);
25301
25302 equate_type_number_to_die (type, subr_die);
25303 add_prototyped_attribute (subr_die, type);
25304 add_type_attribute (subr_die, return_type, TYPE_UNQUALIFIED, false,
25305 context_die);
25306 add_alignment_attribute (subr_die, type);
25307 gen_formal_types_die (type, subr_die);
25308
25309 if (get_AT (subr_die, DW_AT_name))
25310 add_pubtype (type, subr_die);
25311 if ((dwarf_version >= 5 || !dwarf_strict)
25312 && lang_hooks.types.type_dwarf_attribute (type, DW_AT_reference) != -1)
25313 add_AT_flag (subr_die, DW_AT_reference, 1);
25314 if ((dwarf_version >= 5 || !dwarf_strict)
25315 && lang_hooks.types.type_dwarf_attribute (type,
25316 DW_AT_rvalue_reference) != -1)
25317 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
25318 }
25319
25320 /* Generate a DIE for a type definition. */
25321
25322 static void
25323 gen_typedef_die (tree decl, dw_die_ref context_die)
25324 {
25325 dw_die_ref type_die;
25326 tree type;
25327
25328 if (TREE_ASM_WRITTEN (decl))
25329 {
25330 if (DECL_ORIGINAL_TYPE (decl))
25331 fill_variable_array_bounds (DECL_ORIGINAL_TYPE (decl));
25332 return;
25333 }
25334
25335 /* As we avoid creating DIEs for local typedefs (see decl_ultimate_origin
25336 checks in process_scope_var and modified_type_die), this should be called
25337 only for original types. */
25338 gcc_assert (decl_ultimate_origin (decl) == NULL
25339 || decl_ultimate_origin (decl) == decl);
25340
25341 TREE_ASM_WRITTEN (decl) = 1;
25342 type_die = new_die (DW_TAG_typedef, context_die, decl);
25343
25344 add_name_and_src_coords_attributes (type_die, decl);
25345 if (DECL_ORIGINAL_TYPE (decl))
25346 {
25347 type = DECL_ORIGINAL_TYPE (decl);
25348 if (type == error_mark_node)
25349 return;
25350
25351 gcc_assert (type != TREE_TYPE (decl));
25352 equate_type_number_to_die (TREE_TYPE (decl), type_die);
25353 }
25354 else
25355 {
25356 type = TREE_TYPE (decl);
25357 if (type == error_mark_node)
25358 return;
25359
25360 if (is_naming_typedef_decl (TYPE_NAME (type)))
25361 {
25362 /* Here, we are in the case of decl being a typedef naming
25363 an anonymous type, e.g:
25364 typedef struct {...} foo;
25365 In that case TREE_TYPE (decl) is not a typedef variant
25366 type and TYPE_NAME of the anonymous type is set to the
25367 TYPE_DECL of the typedef. This construct is emitted by
25368 the C++ FE.
25369
25370 TYPE is the anonymous struct named by the typedef
25371 DECL. As we need the DW_AT_type attribute of the
25372 DW_TAG_typedef to point to the DIE of TYPE, let's
25373 generate that DIE right away. add_type_attribute
25374 called below will then pick (via lookup_type_die) that
25375 anonymous struct DIE. */
25376 if (!TREE_ASM_WRITTEN (type))
25377 gen_tagged_type_die (type, context_die, DINFO_USAGE_DIR_USE);
25378
25379 /* This is a GNU Extension. We are adding a
25380 DW_AT_linkage_name attribute to the DIE of the
25381 anonymous struct TYPE. The value of that attribute
25382 is the name of the typedef decl naming the anonymous
25383 struct. This greatly eases the work of consumers of
25384 this debug info. */
25385 add_linkage_name_raw (lookup_type_die (type), decl);
25386 }
25387 }
25388
25389 add_type_attribute (type_die, type, decl_quals (decl), false,
25390 context_die);
25391
25392 if (is_naming_typedef_decl (decl))
25393 /* We want that all subsequent calls to lookup_type_die with
25394 TYPE in argument yield the DW_TAG_typedef we have just
25395 created. */
25396 equate_type_number_to_die (type, type_die);
25397
25398 add_alignment_attribute (type_die, TREE_TYPE (decl));
25399
25400 add_accessibility_attribute (type_die, decl);
25401
25402 if (DECL_ABSTRACT_P (decl))
25403 equate_decl_number_to_die (decl, type_die);
25404
25405 if (get_AT (type_die, DW_AT_name))
25406 add_pubtype (decl, type_die);
25407 }
25408
25409 /* Generate a DIE for a struct, class, enum or union type. */
25410
25411 static void
25412 gen_tagged_type_die (tree type,
25413 dw_die_ref context_die,
25414 enum debug_info_usage usage)
25415 {
25416 if (type == NULL_TREE
25417 || !is_tagged_type (type))
25418 return;
25419
25420 if (TREE_ASM_WRITTEN (type))
25421 ;
25422 /* If this is a nested type whose containing class hasn't been written
25423 out yet, writing it out will cover this one, too. This does not apply
25424 to instantiations of member class templates; they need to be added to
25425 the containing class as they are generated. FIXME: This hurts the
25426 idea of combining type decls from multiple TUs, since we can't predict
25427 what set of template instantiations we'll get. */
25428 else if (TYPE_CONTEXT (type)
25429 && AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25430 && ! TREE_ASM_WRITTEN (TYPE_CONTEXT (type)))
25431 {
25432 gen_type_die_with_usage (TYPE_CONTEXT (type), context_die, usage);
25433
25434 if (TREE_ASM_WRITTEN (type))
25435 return;
25436
25437 /* If that failed, attach ourselves to the stub. */
25438 context_die = lookup_type_die (TYPE_CONTEXT (type));
25439 }
25440 else if (TYPE_CONTEXT (type) != NULL_TREE
25441 && (TREE_CODE (TYPE_CONTEXT (type)) == FUNCTION_DECL))
25442 {
25443 /* If this type is local to a function that hasn't been written
25444 out yet, use a NULL context for now; it will be fixed up in
25445 decls_for_scope. */
25446 context_die = lookup_decl_die (TYPE_CONTEXT (type));
25447 /* A declaration DIE doesn't count; nested types need to go in the
25448 specification. */
25449 if (context_die && is_declaration_die (context_die))
25450 context_die = NULL;
25451 }
25452 else
25453 context_die = declare_in_namespace (type, context_die);
25454
25455 if (TREE_CODE (type) == ENUMERAL_TYPE)
25456 {
25457 /* This might have been written out by the call to
25458 declare_in_namespace. */
25459 if (!TREE_ASM_WRITTEN (type))
25460 gen_enumeration_type_die (type, context_die);
25461 }
25462 else
25463 gen_struct_or_union_type_die (type, context_die, usage);
25464
25465 /* Don't set TREE_ASM_WRITTEN on an incomplete struct; we want to fix
25466 it up if it is ever completed. gen_*_type_die will set it for us
25467 when appropriate. */
25468 }
25469
25470 /* Generate a type description DIE. */
25471
25472 static void
25473 gen_type_die_with_usage (tree type, dw_die_ref context_die,
25474 enum debug_info_usage usage)
25475 {
25476 struct array_descr_info info;
25477
25478 if (type == NULL_TREE || type == error_mark_node)
25479 return;
25480
25481 if (flag_checking && type)
25482 verify_type (type);
25483
25484 if (TYPE_NAME (type) != NULL_TREE
25485 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
25486 && is_redundant_typedef (TYPE_NAME (type))
25487 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
25488 /* The DECL of this type is a typedef we don't want to emit debug
25489 info for but we want debug info for its underlying typedef.
25490 This can happen for e.g, the injected-class-name of a C++
25491 type. */
25492 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
25493
25494 /* If TYPE is a typedef type variant, let's generate debug info
25495 for the parent typedef which TYPE is a type of. */
25496 if (typedef_variant_p (type))
25497 {
25498 if (TREE_ASM_WRITTEN (type))
25499 return;
25500
25501 tree name = TYPE_NAME (type);
25502 tree origin = decl_ultimate_origin (name);
25503 if (origin != NULL && origin != name)
25504 {
25505 gen_decl_die (origin, NULL, NULL, context_die);
25506 return;
25507 }
25508
25509 /* Prevent broken recursion; we can't hand off to the same type. */
25510 gcc_assert (DECL_ORIGINAL_TYPE (name) != type);
25511
25512 /* Give typedefs the right scope. */
25513 context_die = scope_die_for (type, context_die);
25514
25515 TREE_ASM_WRITTEN (type) = 1;
25516
25517 gen_decl_die (name, NULL, NULL, context_die);
25518 return;
25519 }
25520
25521 /* If type is an anonymous tagged type named by a typedef, let's
25522 generate debug info for the typedef. */
25523 if (is_naming_typedef_decl (TYPE_NAME (type)))
25524 {
25525 /* Give typedefs the right scope. */
25526 context_die = scope_die_for (type, context_die);
25527
25528 gen_decl_die (TYPE_NAME (type), NULL, NULL, context_die);
25529 return;
25530 }
25531
25532 if (lang_hooks.types.get_debug_type)
25533 {
25534 tree debug_type = lang_hooks.types.get_debug_type (type);
25535
25536 if (debug_type != NULL_TREE && debug_type != type)
25537 {
25538 gen_type_die_with_usage (debug_type, context_die, usage);
25539 return;
25540 }
25541 }
25542
25543 /* We are going to output a DIE to represent the unqualified version
25544 of this type (i.e. without any const or volatile qualifiers) so
25545 get the main variant (i.e. the unqualified version) of this type
25546 now. (Vectors and arrays are special because the debugging info is in the
25547 cloned type itself. Similarly function/method types can contain extra
25548 ref-qualification). */
25549 if (TREE_CODE (type) == FUNCTION_TYPE
25550 || TREE_CODE (type) == METHOD_TYPE)
25551 {
25552 /* For function/method types, can't use type_main_variant here,
25553 because that can have different ref-qualifiers for C++,
25554 but try to canonicalize. */
25555 tree main = TYPE_MAIN_VARIANT (type);
25556 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
25557 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
25558 && check_base_type (t, main)
25559 && check_lang_type (t, type))
25560 {
25561 type = t;
25562 break;
25563 }
25564 }
25565 else if (TREE_CODE (type) != VECTOR_TYPE
25566 && TREE_CODE (type) != ARRAY_TYPE)
25567 type = type_main_variant (type);
25568
25569 /* If this is an array type with hidden descriptor, handle it first. */
25570 if (!TREE_ASM_WRITTEN (type)
25571 && lang_hooks.types.get_array_descr_info)
25572 {
25573 memset (&info, 0, sizeof (info));
25574 if (lang_hooks.types.get_array_descr_info (type, &info))
25575 {
25576 /* Fortran sometimes emits array types with no dimension. */
25577 gcc_assert (info.ndimensions >= 0
25578 && (info.ndimensions
25579 <= DWARF2OUT_ARRAY_DESCR_INFO_MAX_DIMEN));
25580 gen_descr_array_type_die (type, &info, context_die);
25581 TREE_ASM_WRITTEN (type) = 1;
25582 return;
25583 }
25584 }
25585
25586 if (TREE_ASM_WRITTEN (type))
25587 {
25588 /* Variable-length types may be incomplete even if
25589 TREE_ASM_WRITTEN. For such types, fall through to
25590 gen_array_type_die() and possibly fill in
25591 DW_AT_{upper,lower}_bound attributes. */
25592 if ((TREE_CODE (type) != ARRAY_TYPE
25593 && TREE_CODE (type) != RECORD_TYPE
25594 && TREE_CODE (type) != UNION_TYPE
25595 && TREE_CODE (type) != QUAL_UNION_TYPE)
25596 || !variably_modified_type_p (type, NULL))
25597 return;
25598 }
25599
25600 switch (TREE_CODE (type))
25601 {
25602 case ERROR_MARK:
25603 break;
25604
25605 case POINTER_TYPE:
25606 case REFERENCE_TYPE:
25607 /* We must set TREE_ASM_WRITTEN in case this is a recursive type. This
25608 ensures that the gen_type_die recursion will terminate even if the
25609 type is recursive. Recursive types are possible in Ada. */
25610 /* ??? We could perhaps do this for all types before the switch
25611 statement. */
25612 TREE_ASM_WRITTEN (type) = 1;
25613
25614 /* For these types, all that is required is that we output a DIE (or a
25615 set of DIEs) to represent the "basis" type. */
25616 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25617 DINFO_USAGE_IND_USE);
25618 break;
25619
25620 case OFFSET_TYPE:
25621 /* This code is used for C++ pointer-to-data-member types.
25622 Output a description of the relevant class type. */
25623 gen_type_die_with_usage (TYPE_OFFSET_BASETYPE (type), context_die,
25624 DINFO_USAGE_IND_USE);
25625
25626 /* Output a description of the type of the object pointed to. */
25627 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25628 DINFO_USAGE_IND_USE);
25629
25630 /* Now output a DIE to represent this pointer-to-data-member type
25631 itself. */
25632 gen_ptr_to_mbr_type_die (type, context_die);
25633 break;
25634
25635 case FUNCTION_TYPE:
25636 /* Force out return type (in case it wasn't forced out already). */
25637 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25638 DINFO_USAGE_DIR_USE);
25639 gen_subroutine_type_die (type, context_die);
25640 break;
25641
25642 case METHOD_TYPE:
25643 /* Force out return type (in case it wasn't forced out already). */
25644 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25645 DINFO_USAGE_DIR_USE);
25646 gen_subroutine_type_die (type, context_die);
25647 break;
25648
25649 case ARRAY_TYPE:
25650 case VECTOR_TYPE:
25651 gen_array_type_die (type, context_die);
25652 break;
25653
25654 case ENUMERAL_TYPE:
25655 case RECORD_TYPE:
25656 case UNION_TYPE:
25657 case QUAL_UNION_TYPE:
25658 gen_tagged_type_die (type, context_die, usage);
25659 return;
25660
25661 case VOID_TYPE:
25662 case INTEGER_TYPE:
25663 case REAL_TYPE:
25664 case FIXED_POINT_TYPE:
25665 case COMPLEX_TYPE:
25666 case BOOLEAN_TYPE:
25667 /* No DIEs needed for fundamental types. */
25668 break;
25669
25670 case NULLPTR_TYPE:
25671 case LANG_TYPE:
25672 /* Just use DW_TAG_unspecified_type. */
25673 {
25674 dw_die_ref type_die = lookup_type_die (type);
25675 if (type_die == NULL)
25676 {
25677 tree name = TYPE_IDENTIFIER (type);
25678 type_die = new_die (DW_TAG_unspecified_type, comp_unit_die (),
25679 type);
25680 add_name_attribute (type_die, IDENTIFIER_POINTER (name));
25681 equate_type_number_to_die (type, type_die);
25682 }
25683 }
25684 break;
25685
25686 default:
25687 if (is_cxx_auto (type))
25688 {
25689 tree name = TYPE_IDENTIFIER (type);
25690 dw_die_ref *die = (name == get_identifier ("auto")
25691 ? &auto_die : &decltype_auto_die);
25692 if (!*die)
25693 {
25694 *die = new_die (DW_TAG_unspecified_type,
25695 comp_unit_die (), NULL_TREE);
25696 add_name_attribute (*die, IDENTIFIER_POINTER (name));
25697 }
25698 equate_type_number_to_die (type, *die);
25699 break;
25700 }
25701 gcc_unreachable ();
25702 }
25703
25704 TREE_ASM_WRITTEN (type) = 1;
25705 }
25706
25707 static void
25708 gen_type_die (tree type, dw_die_ref context_die)
25709 {
25710 if (type != error_mark_node)
25711 {
25712 gen_type_die_with_usage (type, context_die, DINFO_USAGE_DIR_USE);
25713 if (flag_checking)
25714 {
25715 dw_die_ref die = lookup_type_die (type);
25716 if (die)
25717 check_die (die);
25718 }
25719 }
25720 }
25721
25722 /* Generate a DW_TAG_lexical_block DIE followed by DIEs to represent all of the
25723 things which are local to the given block. */
25724
25725 static void
25726 gen_block_die (tree stmt, dw_die_ref context_die)
25727 {
25728 int must_output_die = 0;
25729 bool inlined_func;
25730
25731 /* Ignore blocks that are NULL. */
25732 if (stmt == NULL_TREE)
25733 return;
25734
25735 inlined_func = inlined_function_outer_scope_p (stmt);
25736
25737 /* If the block is one fragment of a non-contiguous block, do not
25738 process the variables, since they will have been done by the
25739 origin block. Do process subblocks. */
25740 if (BLOCK_FRAGMENT_ORIGIN (stmt))
25741 {
25742 tree sub;
25743
25744 for (sub = BLOCK_SUBBLOCKS (stmt); sub; sub = BLOCK_CHAIN (sub))
25745 gen_block_die (sub, context_die);
25746
25747 return;
25748 }
25749
25750 /* Determine if we need to output any Dwarf DIEs at all to represent this
25751 block. */
25752 if (inlined_func)
25753 /* The outer scopes for inlinings *must* always be represented. We
25754 generate DW_TAG_inlined_subroutine DIEs for them. (See below.) */
25755 must_output_die = 1;
25756 else if (lookup_block_die (stmt))
25757 /* If we already have a DIE then it was filled early. Meanwhile
25758 we might have pruned all BLOCK_VARS as optimized out but we
25759 still want to generate high/low PC attributes so output it. */
25760 must_output_die = 1;
25761 else if (TREE_USED (stmt)
25762 || TREE_ASM_WRITTEN (stmt))
25763 {
25764 /* Determine if this block directly contains any "significant"
25765 local declarations which we will need to output DIEs for. */
25766 if (debug_info_level > DINFO_LEVEL_TERSE)
25767 {
25768 /* We are not in terse mode so any local declaration that
25769 is not ignored for debug purposes counts as being a
25770 "significant" one. */
25771 if (BLOCK_NUM_NONLOCALIZED_VARS (stmt))
25772 must_output_die = 1;
25773 else
25774 for (tree var = BLOCK_VARS (stmt); var; var = DECL_CHAIN (var))
25775 if (!DECL_IGNORED_P (var))
25776 {
25777 must_output_die = 1;
25778 break;
25779 }
25780 }
25781 else if (!dwarf2out_ignore_block (stmt))
25782 must_output_die = 1;
25783 }
25784
25785 /* It would be a waste of space to generate a Dwarf DW_TAG_lexical_block
25786 DIE for any block which contains no significant local declarations at
25787 all. Rather, in such cases we just call `decls_for_scope' so that any
25788 needed Dwarf info for any sub-blocks will get properly generated. Note
25789 that in terse mode, our definition of what constitutes a "significant"
25790 local declaration gets restricted to include only inlined function
25791 instances and local (nested) function definitions. */
25792 if (must_output_die)
25793 {
25794 if (inlined_func)
25795 gen_inlined_subroutine_die (stmt, context_die);
25796 else
25797 gen_lexical_block_die (stmt, context_die);
25798 }
25799 else
25800 decls_for_scope (stmt, context_die);
25801 }
25802
25803 /* Process variable DECL (or variable with origin ORIGIN) within
25804 block STMT and add it to CONTEXT_DIE. */
25805 static void
25806 process_scope_var (tree stmt, tree decl, tree origin, dw_die_ref context_die)
25807 {
25808 dw_die_ref die;
25809 tree decl_or_origin = decl ? decl : origin;
25810
25811 if (TREE_CODE (decl_or_origin) == FUNCTION_DECL)
25812 die = lookup_decl_die (decl_or_origin);
25813 else if (TREE_CODE (decl_or_origin) == TYPE_DECL)
25814 {
25815 if (TYPE_DECL_IS_STUB (decl_or_origin))
25816 die = lookup_type_die (TREE_TYPE (decl_or_origin));
25817 else
25818 die = lookup_decl_die (decl_or_origin);
25819 /* Avoid re-creating the DIE late if it was optimized as unused early. */
25820 if (! die && ! early_dwarf)
25821 return;
25822 }
25823 else
25824 die = NULL;
25825
25826 /* Avoid creating DIEs for local typedefs and concrete static variables that
25827 will only be pruned later. */
25828 if ((origin || decl_ultimate_origin (decl))
25829 && (TREE_CODE (decl_or_origin) == TYPE_DECL
25830 || (VAR_P (decl_or_origin) && TREE_STATIC (decl_or_origin))))
25831 {
25832 origin = decl_ultimate_origin (decl_or_origin);
25833 if (decl && VAR_P (decl) && die != NULL)
25834 {
25835 die = lookup_decl_die (origin);
25836 if (die != NULL)
25837 equate_decl_number_to_die (decl, die);
25838 }
25839 return;
25840 }
25841
25842 if (die != NULL && die->die_parent == NULL)
25843 add_child_die (context_die, die);
25844 else if (TREE_CODE (decl_or_origin) == IMPORTED_DECL)
25845 {
25846 if (early_dwarf)
25847 dwarf2out_imported_module_or_decl_1 (decl_or_origin, DECL_NAME (decl_or_origin),
25848 stmt, context_die);
25849 }
25850 else
25851 {
25852 if (decl && DECL_P (decl))
25853 {
25854 die = lookup_decl_die (decl);
25855
25856 /* Early created DIEs do not have a parent as the decls refer
25857 to the function as DECL_CONTEXT rather than the BLOCK. */
25858 if (die && die->die_parent == NULL)
25859 {
25860 gcc_assert (in_lto_p);
25861 add_child_die (context_die, die);
25862 }
25863 }
25864
25865 gen_decl_die (decl, origin, NULL, context_die);
25866 }
25867 }
25868
25869 /* Generate all of the decls declared within a given scope and (recursively)
25870 all of its sub-blocks. */
25871
25872 static void
25873 decls_for_scope (tree stmt, dw_die_ref context_die, bool recurse)
25874 {
25875 tree decl;
25876 unsigned int i;
25877 tree subblocks;
25878
25879 /* Ignore NULL blocks. */
25880 if (stmt == NULL_TREE)
25881 return;
25882
25883 /* Output the DIEs to represent all of the data objects and typedefs
25884 declared directly within this block but not within any nested
25885 sub-blocks. Also, nested function and tag DIEs have been
25886 generated with a parent of NULL; fix that up now. We don't
25887 have to do this if we're at -g1. */
25888 if (debug_info_level > DINFO_LEVEL_TERSE)
25889 {
25890 for (decl = BLOCK_VARS (stmt); decl != NULL; decl = DECL_CHAIN (decl))
25891 process_scope_var (stmt, decl, NULL_TREE, context_die);
25892 /* BLOCK_NONLOCALIZED_VARs simply generate DIE stubs with abstract
25893 origin - avoid doing this twice as we have no good way to see
25894 if we've done it once already. */
25895 if (! early_dwarf)
25896 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (stmt); i++)
25897 {
25898 decl = BLOCK_NONLOCALIZED_VAR (stmt, i);
25899 if (decl == current_function_decl)
25900 /* Ignore declarations of the current function, while they
25901 are declarations, gen_subprogram_die would treat them
25902 as definitions again, because they are equal to
25903 current_function_decl and endlessly recurse. */;
25904 else if (TREE_CODE (decl) == FUNCTION_DECL)
25905 process_scope_var (stmt, decl, NULL_TREE, context_die);
25906 else
25907 process_scope_var (stmt, NULL_TREE, decl, context_die);
25908 }
25909 }
25910
25911 /* Even if we're at -g1, we need to process the subblocks in order to get
25912 inlined call information. */
25913
25914 /* Output the DIEs to represent all sub-blocks (and the items declared
25915 therein) of this block. */
25916 if (recurse)
25917 for (subblocks = BLOCK_SUBBLOCKS (stmt);
25918 subblocks != NULL;
25919 subblocks = BLOCK_CHAIN (subblocks))
25920 gen_block_die (subblocks, context_die);
25921 }
25922
25923 /* Is this a typedef we can avoid emitting? */
25924
25925 static bool
25926 is_redundant_typedef (const_tree decl)
25927 {
25928 if (TYPE_DECL_IS_STUB (decl))
25929 return true;
25930
25931 if (DECL_ARTIFICIAL (decl)
25932 && DECL_CONTEXT (decl)
25933 && is_tagged_type (DECL_CONTEXT (decl))
25934 && TREE_CODE (TYPE_NAME (DECL_CONTEXT (decl))) == TYPE_DECL
25935 && DECL_NAME (decl) == DECL_NAME (TYPE_NAME (DECL_CONTEXT (decl))))
25936 /* Also ignore the artificial member typedef for the class name. */
25937 return true;
25938
25939 return false;
25940 }
25941
25942 /* Return TRUE if TYPE is a typedef that names a type for linkage
25943 purposes. This kind of typedefs is produced by the C++ FE for
25944 constructs like:
25945
25946 typedef struct {...} foo;
25947
25948 In that case, there is no typedef variant type produced for foo.
25949 Rather, the TREE_TYPE of the TYPE_DECL of foo is the anonymous
25950 struct type. */
25951
25952 static bool
25953 is_naming_typedef_decl (const_tree decl)
25954 {
25955 if (decl == NULL_TREE
25956 || TREE_CODE (decl) != TYPE_DECL
25957 || DECL_NAMELESS (decl)
25958 || !is_tagged_type (TREE_TYPE (decl))
25959 || DECL_IS_BUILTIN (decl)
25960 || is_redundant_typedef (decl)
25961 /* It looks like Ada produces TYPE_DECLs that are very similar
25962 to C++ naming typedefs but that have different
25963 semantics. Let's be specific to c++ for now. */
25964 || !is_cxx (decl))
25965 return FALSE;
25966
25967 return (DECL_ORIGINAL_TYPE (decl) == NULL_TREE
25968 && TYPE_NAME (TREE_TYPE (decl)) == decl
25969 && (TYPE_STUB_DECL (TREE_TYPE (decl))
25970 != TYPE_NAME (TREE_TYPE (decl))));
25971 }
25972
25973 /* Looks up the DIE for a context. */
25974
25975 static inline dw_die_ref
25976 lookup_context_die (tree context)
25977 {
25978 if (context)
25979 {
25980 /* Find die that represents this context. */
25981 if (TYPE_P (context))
25982 {
25983 context = TYPE_MAIN_VARIANT (context);
25984 dw_die_ref ctx = lookup_type_die (context);
25985 if (!ctx)
25986 return NULL;
25987 return strip_naming_typedef (context, ctx);
25988 }
25989 else
25990 return lookup_decl_die (context);
25991 }
25992 return comp_unit_die ();
25993 }
25994
25995 /* Returns the DIE for a context. */
25996
25997 static inline dw_die_ref
25998 get_context_die (tree context)
25999 {
26000 if (context)
26001 {
26002 /* Find die that represents this context. */
26003 if (TYPE_P (context))
26004 {
26005 context = TYPE_MAIN_VARIANT (context);
26006 return strip_naming_typedef (context, force_type_die (context));
26007 }
26008 else
26009 return force_decl_die (context);
26010 }
26011 return comp_unit_die ();
26012 }
26013
26014 /* Returns the DIE for decl. A DIE will always be returned. */
26015
26016 static dw_die_ref
26017 force_decl_die (tree decl)
26018 {
26019 dw_die_ref decl_die;
26020 unsigned saved_external_flag;
26021 tree save_fn = NULL_TREE;
26022 decl_die = lookup_decl_die (decl);
26023 if (!decl_die)
26024 {
26025 dw_die_ref context_die = get_context_die (DECL_CONTEXT (decl));
26026
26027 decl_die = lookup_decl_die (decl);
26028 if (decl_die)
26029 return decl_die;
26030
26031 switch (TREE_CODE (decl))
26032 {
26033 case FUNCTION_DECL:
26034 /* Clear current_function_decl, so that gen_subprogram_die thinks
26035 that this is a declaration. At this point, we just want to force
26036 declaration die. */
26037 save_fn = current_function_decl;
26038 current_function_decl = NULL_TREE;
26039 gen_subprogram_die (decl, context_die);
26040 current_function_decl = save_fn;
26041 break;
26042
26043 case VAR_DECL:
26044 /* Set external flag to force declaration die. Restore it after
26045 gen_decl_die() call. */
26046 saved_external_flag = DECL_EXTERNAL (decl);
26047 DECL_EXTERNAL (decl) = 1;
26048 gen_decl_die (decl, NULL, NULL, context_die);
26049 DECL_EXTERNAL (decl) = saved_external_flag;
26050 break;
26051
26052 case NAMESPACE_DECL:
26053 if (dwarf_version >= 3 || !dwarf_strict)
26054 dwarf2out_decl (decl);
26055 else
26056 /* DWARF2 has neither DW_TAG_module, nor DW_TAG_namespace. */
26057 decl_die = comp_unit_die ();
26058 break;
26059
26060 case TRANSLATION_UNIT_DECL:
26061 decl_die = comp_unit_die ();
26062 break;
26063
26064 default:
26065 gcc_unreachable ();
26066 }
26067
26068 /* We should be able to find the DIE now. */
26069 if (!decl_die)
26070 decl_die = lookup_decl_die (decl);
26071 gcc_assert (decl_die);
26072 }
26073
26074 return decl_die;
26075 }
26076
26077 /* Returns the DIE for TYPE, that must not be a base type. A DIE is
26078 always returned. */
26079
26080 static dw_die_ref
26081 force_type_die (tree type)
26082 {
26083 dw_die_ref type_die;
26084
26085 type_die = lookup_type_die (type);
26086 if (!type_die)
26087 {
26088 dw_die_ref context_die = get_context_die (TYPE_CONTEXT (type));
26089
26090 type_die = modified_type_die (type, TYPE_QUALS_NO_ADDR_SPACE (type),
26091 false, context_die);
26092 gcc_assert (type_die);
26093 }
26094 return type_die;
26095 }
26096
26097 /* Force out any required namespaces to be able to output DECL,
26098 and return the new context_die for it, if it's changed. */
26099
26100 static dw_die_ref
26101 setup_namespace_context (tree thing, dw_die_ref context_die)
26102 {
26103 tree context = (DECL_P (thing)
26104 ? DECL_CONTEXT (thing) : TYPE_CONTEXT (thing));
26105 if (context && TREE_CODE (context) == NAMESPACE_DECL)
26106 /* Force out the namespace. */
26107 context_die = force_decl_die (context);
26108
26109 return context_die;
26110 }
26111
26112 /* Emit a declaration DIE for THING (which is either a DECL or a tagged
26113 type) within its namespace, if appropriate.
26114
26115 For compatibility with older debuggers, namespace DIEs only contain
26116 declarations; all definitions are emitted at CU scope, with
26117 DW_AT_specification pointing to the declaration (like with class
26118 members). */
26119
26120 static dw_die_ref
26121 declare_in_namespace (tree thing, dw_die_ref context_die)
26122 {
26123 dw_die_ref ns_context;
26124
26125 if (debug_info_level <= DINFO_LEVEL_TERSE)
26126 return context_die;
26127
26128 /* External declarations in the local scope only need to be emitted
26129 once, not once in the namespace and once in the scope.
26130
26131 This avoids declaring the `extern' below in the
26132 namespace DIE as well as in the innermost scope:
26133
26134 namespace S
26135 {
26136 int i=5;
26137 int foo()
26138 {
26139 int i=8;
26140 extern int i;
26141 return i;
26142 }
26143 }
26144 */
26145 if (DECL_P (thing) && DECL_EXTERNAL (thing) && local_scope_p (context_die))
26146 return context_die;
26147
26148 /* If this decl is from an inlined function, then don't try to emit it in its
26149 namespace, as we will get confused. It would have already been emitted
26150 when the abstract instance of the inline function was emitted anyways. */
26151 if (DECL_P (thing) && DECL_ABSTRACT_ORIGIN (thing))
26152 return context_die;
26153
26154 ns_context = setup_namespace_context (thing, context_die);
26155
26156 if (ns_context != context_die)
26157 {
26158 if (is_fortran () || is_dlang ())
26159 return ns_context;
26160 if (DECL_P (thing))
26161 gen_decl_die (thing, NULL, NULL, ns_context);
26162 else
26163 gen_type_die (thing, ns_context);
26164 }
26165 return context_die;
26166 }
26167
26168 /* Generate a DIE for a namespace or namespace alias. */
26169
26170 static void
26171 gen_namespace_die (tree decl, dw_die_ref context_die)
26172 {
26173 dw_die_ref namespace_die;
26174
26175 /* Namespace aliases have a DECL_ABSTRACT_ORIGIN of the namespace
26176 they are an alias of. */
26177 if (DECL_ABSTRACT_ORIGIN (decl) == NULL)
26178 {
26179 /* Output a real namespace or module. */
26180 context_die = setup_namespace_context (decl, comp_unit_die ());
26181 namespace_die = new_die (is_fortran () || is_dlang ()
26182 ? DW_TAG_module : DW_TAG_namespace,
26183 context_die, decl);
26184 /* For Fortran modules defined in different CU don't add src coords. */
26185 if (namespace_die->die_tag == DW_TAG_module && DECL_EXTERNAL (decl))
26186 {
26187 const char *name = dwarf2_name (decl, 0);
26188 if (name)
26189 add_name_attribute (namespace_die, name);
26190 }
26191 else
26192 add_name_and_src_coords_attributes (namespace_die, decl);
26193 if (DECL_EXTERNAL (decl))
26194 add_AT_flag (namespace_die, DW_AT_declaration, 1);
26195 equate_decl_number_to_die (decl, namespace_die);
26196 }
26197 else
26198 {
26199 /* Output a namespace alias. */
26200
26201 /* Force out the namespace we are an alias of, if necessary. */
26202 dw_die_ref origin_die
26203 = force_decl_die (DECL_ABSTRACT_ORIGIN (decl));
26204
26205 if (DECL_FILE_SCOPE_P (decl)
26206 || TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL)
26207 context_die = setup_namespace_context (decl, comp_unit_die ());
26208 /* Now create the namespace alias DIE. */
26209 namespace_die = new_die (DW_TAG_imported_declaration, context_die, decl);
26210 add_name_and_src_coords_attributes (namespace_die, decl);
26211 add_AT_die_ref (namespace_die, DW_AT_import, origin_die);
26212 equate_decl_number_to_die (decl, namespace_die);
26213 }
26214 if ((dwarf_version >= 5 || !dwarf_strict)
26215 && lang_hooks.decls.decl_dwarf_attribute (decl,
26216 DW_AT_export_symbols) == 1)
26217 add_AT_flag (namespace_die, DW_AT_export_symbols, 1);
26218
26219 /* Bypass dwarf2_name's check for DECL_NAMELESS. */
26220 if (want_pubnames ())
26221 add_pubname_string (lang_hooks.dwarf_name (decl, 1), namespace_die);
26222 }
26223
26224 /* Generate Dwarf debug information for a decl described by DECL.
26225 The return value is currently only meaningful for PARM_DECLs,
26226 for all other decls it returns NULL.
26227
26228 If DECL is a FIELD_DECL, CTX is required: see the comment for VLR_CONTEXT.
26229 It can be NULL otherwise. */
26230
26231 static dw_die_ref
26232 gen_decl_die (tree decl, tree origin, struct vlr_context *ctx,
26233 dw_die_ref context_die)
26234 {
26235 tree decl_or_origin = decl ? decl : origin;
26236 tree class_origin = NULL, ultimate_origin;
26237
26238 if (DECL_P (decl_or_origin) && DECL_IGNORED_P (decl_or_origin))
26239 return NULL;
26240
26241 switch (TREE_CODE (decl_or_origin))
26242 {
26243 case ERROR_MARK:
26244 break;
26245
26246 case CONST_DECL:
26247 if (!is_fortran () && !is_ada () && !is_dlang ())
26248 {
26249 /* The individual enumerators of an enum type get output when we output
26250 the Dwarf representation of the relevant enum type itself. */
26251 break;
26252 }
26253
26254 /* Emit its type. */
26255 gen_type_die (TREE_TYPE (decl), context_die);
26256
26257 /* And its containing namespace. */
26258 context_die = declare_in_namespace (decl, context_die);
26259
26260 gen_const_die (decl, context_die);
26261 break;
26262
26263 case FUNCTION_DECL:
26264 #if 0
26265 /* FIXME */
26266 /* This doesn't work because the C frontend sets DECL_ABSTRACT_ORIGIN
26267 on local redeclarations of global functions. That seems broken. */
26268 if (current_function_decl != decl)
26269 /* This is only a declaration. */;
26270 #endif
26271
26272 /* We should have abstract copies already and should not generate
26273 stray type DIEs in late LTO dumping. */
26274 if (! early_dwarf)
26275 ;
26276
26277 /* If we're emitting a clone, emit info for the abstract instance. */
26278 else if (origin || DECL_ORIGIN (decl) != decl)
26279 dwarf2out_abstract_function (origin
26280 ? DECL_ORIGIN (origin)
26281 : DECL_ABSTRACT_ORIGIN (decl));
26282
26283 /* If we're emitting a possibly inlined function emit it as
26284 abstract instance. */
26285 else if (cgraph_function_possibly_inlined_p (decl)
26286 && ! DECL_ABSTRACT_P (decl)
26287 && ! class_or_namespace_scope_p (context_die)
26288 /* dwarf2out_abstract_function won't emit a die if this is just
26289 a declaration. We must avoid setting DECL_ABSTRACT_ORIGIN in
26290 that case, because that works only if we have a die. */
26291 && DECL_INITIAL (decl) != NULL_TREE)
26292 dwarf2out_abstract_function (decl);
26293
26294 /* Otherwise we're emitting the primary DIE for this decl. */
26295 else if (debug_info_level > DINFO_LEVEL_TERSE)
26296 {
26297 /* Before we describe the FUNCTION_DECL itself, make sure that we
26298 have its containing type. */
26299 if (!origin)
26300 origin = decl_class_context (decl);
26301 if (origin != NULL_TREE)
26302 gen_type_die (origin, context_die);
26303
26304 /* And its return type. */
26305 gen_type_die (TREE_TYPE (TREE_TYPE (decl)), context_die);
26306
26307 /* And its virtual context. */
26308 if (DECL_VINDEX (decl) != NULL_TREE)
26309 gen_type_die (DECL_CONTEXT (decl), context_die);
26310
26311 /* Make sure we have a member DIE for decl. */
26312 if (origin != NULL_TREE)
26313 gen_type_die_for_member (origin, decl, context_die);
26314
26315 /* And its containing namespace. */
26316 context_die = declare_in_namespace (decl, context_die);
26317 }
26318
26319 /* Now output a DIE to represent the function itself. */
26320 if (decl)
26321 gen_subprogram_die (decl, context_die);
26322 break;
26323
26324 case TYPE_DECL:
26325 /* If we are in terse mode, don't generate any DIEs to represent any
26326 actual typedefs. */
26327 if (debug_info_level <= DINFO_LEVEL_TERSE)
26328 break;
26329
26330 /* In the special case of a TYPE_DECL node representing the declaration
26331 of some type tag, if the given TYPE_DECL is marked as having been
26332 instantiated from some other (original) TYPE_DECL node (e.g. one which
26333 was generated within the original definition of an inline function) we
26334 used to generate a special (abbreviated) DW_TAG_structure_type,
26335 DW_TAG_union_type, or DW_TAG_enumeration_type DIE here. But nothing
26336 should be actually referencing those DIEs, as variable DIEs with that
26337 type would be emitted already in the abstract origin, so it was always
26338 removed during unused type prunning. Don't add anything in this
26339 case. */
26340 if (TYPE_DECL_IS_STUB (decl) && decl_ultimate_origin (decl) != NULL_TREE)
26341 break;
26342
26343 if (is_redundant_typedef (decl))
26344 gen_type_die (TREE_TYPE (decl), context_die);
26345 else
26346 /* Output a DIE to represent the typedef itself. */
26347 gen_typedef_die (decl, context_die);
26348 break;
26349
26350 case LABEL_DECL:
26351 if (debug_info_level >= DINFO_LEVEL_NORMAL)
26352 gen_label_die (decl, context_die);
26353 break;
26354
26355 case VAR_DECL:
26356 case RESULT_DECL:
26357 /* If we are in terse mode, don't generate any DIEs to represent any
26358 variable declarations or definitions. */
26359 if (debug_info_level <= DINFO_LEVEL_TERSE)
26360 break;
26361
26362 /* Avoid generating stray type DIEs during late dwarf dumping.
26363 All types have been dumped early. */
26364 if (early_dwarf
26365 /* ??? But in LTRANS we cannot annotate early created variably
26366 modified type DIEs without copying them and adjusting all
26367 references to them. Dump them again as happens for inlining
26368 which copies both the decl and the types. */
26369 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26370 in VLA bound information for example. */
26371 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26372 current_function_decl)))
26373 {
26374 /* Output any DIEs that are needed to specify the type of this data
26375 object. */
26376 if (decl_by_reference_p (decl_or_origin))
26377 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26378 else
26379 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26380 }
26381
26382 if (early_dwarf)
26383 {
26384 /* And its containing type. */
26385 class_origin = decl_class_context (decl_or_origin);
26386 if (class_origin != NULL_TREE)
26387 gen_type_die_for_member (class_origin, decl_or_origin, context_die);
26388
26389 /* And its containing namespace. */
26390 context_die = declare_in_namespace (decl_or_origin, context_die);
26391 }
26392
26393 /* Now output the DIE to represent the data object itself. This gets
26394 complicated because of the possibility that the VAR_DECL really
26395 represents an inlined instance of a formal parameter for an inline
26396 function. */
26397 ultimate_origin = decl_ultimate_origin (decl_or_origin);
26398 if (ultimate_origin != NULL_TREE
26399 && TREE_CODE (ultimate_origin) == PARM_DECL)
26400 gen_formal_parameter_die (decl, origin,
26401 true /* Emit name attribute. */,
26402 context_die);
26403 else
26404 gen_variable_die (decl, origin, context_die);
26405 break;
26406
26407 case FIELD_DECL:
26408 gcc_assert (ctx != NULL && ctx->struct_type != NULL);
26409 /* Ignore the nameless fields that are used to skip bits but handle C++
26410 anonymous unions and structs. */
26411 if (DECL_NAME (decl) != NULL_TREE
26412 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE
26413 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE)
26414 {
26415 gen_type_die (member_declared_type (decl), context_die);
26416 gen_field_die (decl, ctx, context_die);
26417 }
26418 break;
26419
26420 case PARM_DECL:
26421 /* Avoid generating stray type DIEs during late dwarf dumping.
26422 All types have been dumped early. */
26423 if (early_dwarf
26424 /* ??? But in LTRANS we cannot annotate early created variably
26425 modified type DIEs without copying them and adjusting all
26426 references to them. Dump them again as happens for inlining
26427 which copies both the decl and the types. */
26428 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26429 in VLA bound information for example. */
26430 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26431 current_function_decl)))
26432 {
26433 if (DECL_BY_REFERENCE (decl_or_origin))
26434 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26435 else
26436 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26437 }
26438 return gen_formal_parameter_die (decl, origin,
26439 true /* Emit name attribute. */,
26440 context_die);
26441
26442 case NAMESPACE_DECL:
26443 if (dwarf_version >= 3 || !dwarf_strict)
26444 gen_namespace_die (decl, context_die);
26445 break;
26446
26447 case IMPORTED_DECL:
26448 dwarf2out_imported_module_or_decl_1 (decl, DECL_NAME (decl),
26449 DECL_CONTEXT (decl), context_die);
26450 break;
26451
26452 case NAMELIST_DECL:
26453 gen_namelist_decl (DECL_NAME (decl), context_die,
26454 NAMELIST_DECL_ASSOCIATED_DECL (decl));
26455 break;
26456
26457 default:
26458 /* Probably some frontend-internal decl. Assume we don't care. */
26459 gcc_assert ((int)TREE_CODE (decl) > NUM_TREE_CODES);
26460 break;
26461 }
26462
26463 return NULL;
26464 }
26465 \f
26466 /* Output initial debug information for global DECL. Called at the
26467 end of the parsing process.
26468
26469 This is the initial debug generation process. As such, the DIEs
26470 generated may be incomplete. A later debug generation pass
26471 (dwarf2out_late_global_decl) will augment the information generated
26472 in this pass (e.g., with complete location info). */
26473
26474 static void
26475 dwarf2out_early_global_decl (tree decl)
26476 {
26477 set_early_dwarf s;
26478
26479 /* gen_decl_die() will set DECL_ABSTRACT because
26480 cgraph_function_possibly_inlined_p() returns true. This is in
26481 turn will cause DW_AT_inline attributes to be set.
26482
26483 This happens because at early dwarf generation, there is no
26484 cgraph information, causing cgraph_function_possibly_inlined_p()
26485 to return true. Trick cgraph_function_possibly_inlined_p()
26486 while we generate dwarf early. */
26487 bool save = symtab->global_info_ready;
26488 symtab->global_info_ready = true;
26489
26490 /* We don't handle TYPE_DECLs. If required, they'll be reached via
26491 other DECLs and they can point to template types or other things
26492 that dwarf2out can't handle when done via dwarf2out_decl. */
26493 if (TREE_CODE (decl) != TYPE_DECL
26494 && TREE_CODE (decl) != PARM_DECL)
26495 {
26496 if (TREE_CODE (decl) == FUNCTION_DECL)
26497 {
26498 tree save_fndecl = current_function_decl;
26499
26500 /* For nested functions, make sure we have DIEs for the parents first
26501 so that all nested DIEs are generated at the proper scope in the
26502 first shot. */
26503 tree context = decl_function_context (decl);
26504 if (context != NULL)
26505 {
26506 dw_die_ref context_die = lookup_decl_die (context);
26507 current_function_decl = context;
26508
26509 /* Avoid emitting DIEs multiple times, but still process CONTEXT
26510 enough so that it lands in its own context. This avoids type
26511 pruning issues later on. */
26512 if (context_die == NULL || is_declaration_die (context_die))
26513 dwarf2out_early_global_decl (context);
26514 }
26515
26516 /* Emit an abstract origin of a function first. This happens
26517 with C++ constructor clones for example and makes
26518 dwarf2out_abstract_function happy which requires the early
26519 DIE of the abstract instance to be present. */
26520 tree origin = DECL_ABSTRACT_ORIGIN (decl);
26521 dw_die_ref origin_die;
26522 if (origin != NULL
26523 /* Do not emit the DIE multiple times but make sure to
26524 process it fully here in case we just saw a declaration. */
26525 && ((origin_die = lookup_decl_die (origin)) == NULL
26526 || is_declaration_die (origin_die)))
26527 {
26528 current_function_decl = origin;
26529 dwarf2out_decl (origin);
26530 }
26531
26532 /* Emit the DIE for decl but avoid doing that multiple times. */
26533 dw_die_ref old_die;
26534 if ((old_die = lookup_decl_die (decl)) == NULL
26535 || is_declaration_die (old_die))
26536 {
26537 current_function_decl = decl;
26538 dwarf2out_decl (decl);
26539 }
26540
26541 current_function_decl = save_fndecl;
26542 }
26543 else
26544 dwarf2out_decl (decl);
26545 }
26546 symtab->global_info_ready = save;
26547 }
26548
26549 /* Return whether EXPR is an expression with the following pattern:
26550 INDIRECT_REF (NOP_EXPR (INTEGER_CST)). */
26551
26552 static bool
26553 is_trivial_indirect_ref (tree expr)
26554 {
26555 if (expr == NULL_TREE || TREE_CODE (expr) != INDIRECT_REF)
26556 return false;
26557
26558 tree nop = TREE_OPERAND (expr, 0);
26559 if (nop == NULL_TREE || TREE_CODE (nop) != NOP_EXPR)
26560 return false;
26561
26562 tree int_cst = TREE_OPERAND (nop, 0);
26563 return int_cst != NULL_TREE && TREE_CODE (int_cst) == INTEGER_CST;
26564 }
26565
26566 /* Output debug information for global decl DECL. Called from
26567 toplev.c after compilation proper has finished. */
26568
26569 static void
26570 dwarf2out_late_global_decl (tree decl)
26571 {
26572 /* Fill-in any location information we were unable to determine
26573 on the first pass. */
26574 if (VAR_P (decl))
26575 {
26576 dw_die_ref die = lookup_decl_die (decl);
26577
26578 /* We may have to generate full debug late for LTO in case debug
26579 was not enabled at compile-time or the target doesn't support
26580 the LTO early debug scheme. */
26581 if (! die && in_lto_p)
26582 dwarf2out_decl (decl);
26583 else if (die)
26584 {
26585 /* We get called via the symtab code invoking late_global_decl
26586 for symbols that are optimized out.
26587
26588 Do not add locations for those, except if they have a
26589 DECL_VALUE_EXPR, in which case they are relevant for debuggers.
26590 Still don't add a location if the DECL_VALUE_EXPR is not a trivial
26591 INDIRECT_REF expression, as this could generate relocations to
26592 text symbols in LTO object files, which is invalid. */
26593 varpool_node *node = varpool_node::get (decl);
26594 if ((! node || ! node->definition)
26595 && ! (DECL_HAS_VALUE_EXPR_P (decl)
26596 && is_trivial_indirect_ref (DECL_VALUE_EXPR (decl))))
26597 tree_add_const_value_attribute_for_decl (die, decl);
26598 else
26599 add_location_or_const_value_attribute (die, decl, false);
26600 }
26601 }
26602 }
26603
26604 /* Output debug information for type decl DECL. Called from toplev.c
26605 and from language front ends (to record built-in types). */
26606 static void
26607 dwarf2out_type_decl (tree decl, int local)
26608 {
26609 if (!local)
26610 {
26611 set_early_dwarf s;
26612 dwarf2out_decl (decl);
26613 }
26614 }
26615
26616 /* Output debug information for imported module or decl DECL.
26617 NAME is non-NULL name in the lexical block if the decl has been renamed.
26618 LEXICAL_BLOCK is the lexical block (which TREE_CODE is a BLOCK)
26619 that DECL belongs to.
26620 LEXICAL_BLOCK_DIE is the DIE of LEXICAL_BLOCK. */
26621 static void
26622 dwarf2out_imported_module_or_decl_1 (tree decl,
26623 tree name,
26624 tree lexical_block,
26625 dw_die_ref lexical_block_die)
26626 {
26627 expanded_location xloc;
26628 dw_die_ref imported_die = NULL;
26629 dw_die_ref at_import_die;
26630
26631 if (TREE_CODE (decl) == IMPORTED_DECL)
26632 {
26633 xloc = expand_location (DECL_SOURCE_LOCATION (decl));
26634 decl = IMPORTED_DECL_ASSOCIATED_DECL (decl);
26635 gcc_assert (decl);
26636 }
26637 else
26638 xloc = expand_location (input_location);
26639
26640 if (TREE_CODE (decl) == TYPE_DECL || TREE_CODE (decl) == CONST_DECL)
26641 {
26642 at_import_die = force_type_die (TREE_TYPE (decl));
26643 /* For namespace N { typedef void T; } using N::T; base_type_die
26644 returns NULL, but DW_TAG_imported_declaration requires
26645 the DW_AT_import tag. Force creation of DW_TAG_typedef. */
26646 if (!at_import_die)
26647 {
26648 gcc_assert (TREE_CODE (decl) == TYPE_DECL);
26649 gen_typedef_die (decl, get_context_die (DECL_CONTEXT (decl)));
26650 at_import_die = lookup_type_die (TREE_TYPE (decl));
26651 gcc_assert (at_import_die);
26652 }
26653 }
26654 else
26655 {
26656 at_import_die = lookup_decl_die (decl);
26657 if (!at_import_die)
26658 {
26659 /* If we're trying to avoid duplicate debug info, we may not have
26660 emitted the member decl for this field. Emit it now. */
26661 if (TREE_CODE (decl) == FIELD_DECL)
26662 {
26663 tree type = DECL_CONTEXT (decl);
26664
26665 if (TYPE_CONTEXT (type)
26666 && TYPE_P (TYPE_CONTEXT (type))
26667 && !should_emit_struct_debug (TYPE_CONTEXT (type),
26668 DINFO_USAGE_DIR_USE))
26669 return;
26670 gen_type_die_for_member (type, decl,
26671 get_context_die (TYPE_CONTEXT (type)));
26672 }
26673 if (TREE_CODE (decl) == NAMELIST_DECL)
26674 at_import_die = gen_namelist_decl (DECL_NAME (decl),
26675 get_context_die (DECL_CONTEXT (decl)),
26676 NULL_TREE);
26677 else
26678 at_import_die = force_decl_die (decl);
26679 }
26680 }
26681
26682 if (TREE_CODE (decl) == NAMESPACE_DECL)
26683 {
26684 if (dwarf_version >= 3 || !dwarf_strict)
26685 imported_die = new_die (DW_TAG_imported_module,
26686 lexical_block_die,
26687 lexical_block);
26688 else
26689 return;
26690 }
26691 else
26692 imported_die = new_die (DW_TAG_imported_declaration,
26693 lexical_block_die,
26694 lexical_block);
26695
26696 add_AT_file (imported_die, DW_AT_decl_file, lookup_filename (xloc.file));
26697 add_AT_unsigned (imported_die, DW_AT_decl_line, xloc.line);
26698 if (debug_column_info && xloc.column)
26699 add_AT_unsigned (imported_die, DW_AT_decl_column, xloc.column);
26700 if (name)
26701 add_AT_string (imported_die, DW_AT_name,
26702 IDENTIFIER_POINTER (name));
26703 add_AT_die_ref (imported_die, DW_AT_import, at_import_die);
26704 }
26705
26706 /* Output debug information for imported module or decl DECL.
26707 NAME is non-NULL name in context if the decl has been renamed.
26708 CHILD is true if decl is one of the renamed decls as part of
26709 importing whole module.
26710 IMPLICIT is set if this hook is called for an implicit import
26711 such as inline namespace. */
26712
26713 static void
26714 dwarf2out_imported_module_or_decl (tree decl, tree name, tree context,
26715 bool child, bool implicit)
26716 {
26717 /* dw_die_ref at_import_die; */
26718 dw_die_ref scope_die;
26719
26720 if (debug_info_level <= DINFO_LEVEL_TERSE)
26721 return;
26722
26723 gcc_assert (decl);
26724
26725 /* For DWARF5, just DW_AT_export_symbols on the DW_TAG_namespace
26726 should be enough, for DWARF4 and older even if we emit as extension
26727 DW_AT_export_symbols add the implicit DW_TAG_imported_module anyway
26728 for the benefit of consumers unaware of DW_AT_export_symbols. */
26729 if (implicit
26730 && dwarf_version >= 5
26731 && lang_hooks.decls.decl_dwarf_attribute (decl,
26732 DW_AT_export_symbols) == 1)
26733 return;
26734
26735 set_early_dwarf s;
26736
26737 /* To emit DW_TAG_imported_module or DW_TAG_imported_decl, we need two DIEs.
26738 We need decl DIE for reference and scope die. First, get DIE for the decl
26739 itself. */
26740
26741 /* Get the scope die for decl context. Use comp_unit_die for global module
26742 or decl. If die is not found for non globals, force new die. */
26743 if (context
26744 && TYPE_P (context)
26745 && !should_emit_struct_debug (context, DINFO_USAGE_DIR_USE))
26746 return;
26747
26748 scope_die = get_context_die (context);
26749
26750 if (child)
26751 {
26752 /* DW_TAG_imported_module was introduced in the DWARFv3 specification, so
26753 there is nothing we can do, here. */
26754 if (dwarf_version < 3 && dwarf_strict)
26755 return;
26756
26757 gcc_assert (scope_die->die_child);
26758 gcc_assert (scope_die->die_child->die_tag == DW_TAG_imported_module);
26759 gcc_assert (TREE_CODE (decl) != NAMESPACE_DECL);
26760 scope_die = scope_die->die_child;
26761 }
26762
26763 /* OK, now we have DIEs for decl as well as scope. Emit imported die. */
26764 dwarf2out_imported_module_or_decl_1 (decl, name, context, scope_die);
26765 }
26766
26767 /* Output debug information for namelists. */
26768
26769 static dw_die_ref
26770 gen_namelist_decl (tree name, dw_die_ref scope_die, tree item_decls)
26771 {
26772 dw_die_ref nml_die, nml_item_die, nml_item_ref_die;
26773 tree value;
26774 unsigned i;
26775
26776 if (debug_info_level <= DINFO_LEVEL_TERSE)
26777 return NULL;
26778
26779 gcc_assert (scope_die != NULL);
26780 nml_die = new_die (DW_TAG_namelist, scope_die, NULL);
26781 add_AT_string (nml_die, DW_AT_name, IDENTIFIER_POINTER (name));
26782
26783 /* If there are no item_decls, we have a nondefining namelist, e.g.
26784 with USE association; hence, set DW_AT_declaration. */
26785 if (item_decls == NULL_TREE)
26786 {
26787 add_AT_flag (nml_die, DW_AT_declaration, 1);
26788 return nml_die;
26789 }
26790
26791 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (item_decls), i, value)
26792 {
26793 nml_item_ref_die = lookup_decl_die (value);
26794 if (!nml_item_ref_die)
26795 nml_item_ref_die = force_decl_die (value);
26796
26797 nml_item_die = new_die (DW_TAG_namelist_item, nml_die, NULL);
26798 add_AT_die_ref (nml_item_die, DW_AT_namelist_items, nml_item_ref_die);
26799 }
26800 return nml_die;
26801 }
26802
26803
26804 /* Write the debugging output for DECL and return the DIE. */
26805
26806 static void
26807 dwarf2out_decl (tree decl)
26808 {
26809 dw_die_ref context_die = comp_unit_die ();
26810
26811 switch (TREE_CODE (decl))
26812 {
26813 case ERROR_MARK:
26814 return;
26815
26816 case FUNCTION_DECL:
26817 /* If we're a nested function, initially use a parent of NULL; if we're
26818 a plain function, this will be fixed up in decls_for_scope. If
26819 we're a method, it will be ignored, since we already have a DIE.
26820 Avoid doing this late though since clones of class methods may
26821 otherwise end up in limbo and create type DIEs late. */
26822 if (early_dwarf
26823 && decl_function_context (decl)
26824 /* But if we're in terse mode, we don't care about scope. */
26825 && debug_info_level > DINFO_LEVEL_TERSE)
26826 context_die = NULL;
26827 break;
26828
26829 case VAR_DECL:
26830 /* For local statics lookup proper context die. */
26831 if (local_function_static (decl))
26832 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26833
26834 /* If we are in terse mode, don't generate any DIEs to represent any
26835 variable declarations or definitions. */
26836 if (debug_info_level <= DINFO_LEVEL_TERSE)
26837 return;
26838 break;
26839
26840 case CONST_DECL:
26841 if (debug_info_level <= DINFO_LEVEL_TERSE)
26842 return;
26843 if (!is_fortran () && !is_ada () && !is_dlang ())
26844 return;
26845 if (TREE_STATIC (decl) && decl_function_context (decl))
26846 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26847 break;
26848
26849 case NAMESPACE_DECL:
26850 case IMPORTED_DECL:
26851 if (debug_info_level <= DINFO_LEVEL_TERSE)
26852 return;
26853 if (lookup_decl_die (decl) != NULL)
26854 return;
26855 break;
26856
26857 case TYPE_DECL:
26858 /* Don't emit stubs for types unless they are needed by other DIEs. */
26859 if (TYPE_DECL_SUPPRESS_DEBUG (decl))
26860 return;
26861
26862 /* Don't bother trying to generate any DIEs to represent any of the
26863 normal built-in types for the language we are compiling. */
26864 if (DECL_IS_BUILTIN (decl))
26865 return;
26866
26867 /* If we are in terse mode, don't generate any DIEs for types. */
26868 if (debug_info_level <= DINFO_LEVEL_TERSE)
26869 return;
26870
26871 /* If we're a function-scope tag, initially use a parent of NULL;
26872 this will be fixed up in decls_for_scope. */
26873 if (decl_function_context (decl))
26874 context_die = NULL;
26875
26876 break;
26877
26878 case NAMELIST_DECL:
26879 break;
26880
26881 default:
26882 return;
26883 }
26884
26885 gen_decl_die (decl, NULL, NULL, context_die);
26886
26887 if (flag_checking)
26888 {
26889 dw_die_ref die = lookup_decl_die (decl);
26890 if (die)
26891 check_die (die);
26892 }
26893 }
26894
26895 /* Write the debugging output for DECL. */
26896
26897 static void
26898 dwarf2out_function_decl (tree decl)
26899 {
26900 dwarf2out_decl (decl);
26901 call_arg_locations = NULL;
26902 call_arg_loc_last = NULL;
26903 call_site_count = -1;
26904 tail_call_site_count = -1;
26905 decl_loc_table->empty ();
26906 cached_dw_loc_list_table->empty ();
26907 }
26908
26909 /* Output a marker (i.e. a label) for the beginning of the generated code for
26910 a lexical block. */
26911
26912 static void
26913 dwarf2out_begin_block (unsigned int line ATTRIBUTE_UNUSED,
26914 unsigned int blocknum)
26915 {
26916 switch_to_section (current_function_section ());
26917 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_BEGIN_LABEL, blocknum);
26918 }
26919
26920 /* Output a marker (i.e. a label) for the end of the generated code for a
26921 lexical block. */
26922
26923 static void
26924 dwarf2out_end_block (unsigned int line ATTRIBUTE_UNUSED, unsigned int blocknum)
26925 {
26926 switch_to_section (current_function_section ());
26927 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_END_LABEL, blocknum);
26928 }
26929
26930 /* Returns nonzero if it is appropriate not to emit any debugging
26931 information for BLOCK, because it doesn't contain any instructions.
26932
26933 Don't allow this for blocks with nested functions or local classes
26934 as we would end up with orphans, and in the presence of scheduling
26935 we may end up calling them anyway. */
26936
26937 static bool
26938 dwarf2out_ignore_block (const_tree block)
26939 {
26940 tree decl;
26941 unsigned int i;
26942
26943 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
26944 if (TREE_CODE (decl) == FUNCTION_DECL
26945 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26946 return 0;
26947 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (block); i++)
26948 {
26949 decl = BLOCK_NONLOCALIZED_VAR (block, i);
26950 if (TREE_CODE (decl) == FUNCTION_DECL
26951 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
26952 return 0;
26953 }
26954
26955 return 1;
26956 }
26957
26958 /* Hash table routines for file_hash. */
26959
26960 bool
26961 dwarf_file_hasher::equal (dwarf_file_data *p1, const char *p2)
26962 {
26963 return filename_cmp (p1->filename, p2) == 0;
26964 }
26965
26966 hashval_t
26967 dwarf_file_hasher::hash (dwarf_file_data *p)
26968 {
26969 return htab_hash_string (p->filename);
26970 }
26971
26972 /* Lookup FILE_NAME (in the list of filenames that we know about here in
26973 dwarf2out.c) and return its "index". The index of each (known) filename is
26974 just a unique number which is associated with only that one filename. We
26975 need such numbers for the sake of generating labels (in the .debug_sfnames
26976 section) and references to those files numbers (in the .debug_srcinfo
26977 and .debug_macinfo sections). If the filename given as an argument is not
26978 found in our current list, add it to the list and assign it the next
26979 available unique index number. */
26980
26981 static struct dwarf_file_data *
26982 lookup_filename (const char *file_name)
26983 {
26984 struct dwarf_file_data * created;
26985
26986 if (!file_name)
26987 return NULL;
26988
26989 if (!file_name[0])
26990 file_name = "<stdin>";
26991
26992 dwarf_file_data **slot
26993 = file_table->find_slot_with_hash (file_name, htab_hash_string (file_name),
26994 INSERT);
26995 if (*slot)
26996 return *slot;
26997
26998 created = ggc_alloc<dwarf_file_data> ();
26999 created->filename = file_name;
27000 created->emitted_number = 0;
27001 *slot = created;
27002 return created;
27003 }
27004
27005 /* If the assembler will construct the file table, then translate the compiler
27006 internal file table number into the assembler file table number, and emit
27007 a .file directive if we haven't already emitted one yet. The file table
27008 numbers are different because we prune debug info for unused variables and
27009 types, which may include filenames. */
27010
27011 static int
27012 maybe_emit_file (struct dwarf_file_data * fd)
27013 {
27014 if (! fd->emitted_number)
27015 {
27016 if (last_emitted_file)
27017 fd->emitted_number = last_emitted_file->emitted_number + 1;
27018 else
27019 fd->emitted_number = 1;
27020 last_emitted_file = fd;
27021
27022 if (output_asm_line_debug_info ())
27023 {
27024 fprintf (asm_out_file, "\t.file %u ", fd->emitted_number);
27025 output_quoted_string (asm_out_file,
27026 remap_debug_filename (fd->filename));
27027 fputc ('\n', asm_out_file);
27028 }
27029 }
27030
27031 return fd->emitted_number;
27032 }
27033
27034 /* Schedule generation of a DW_AT_const_value attribute to DIE.
27035 That generation should happen after function debug info has been
27036 generated. The value of the attribute is the constant value of ARG. */
27037
27038 static void
27039 append_entry_to_tmpl_value_parm_die_table (dw_die_ref die, tree arg)
27040 {
27041 die_arg_entry entry;
27042
27043 if (!die || !arg)
27044 return;
27045
27046 gcc_assert (early_dwarf);
27047
27048 if (!tmpl_value_parm_die_table)
27049 vec_alloc (tmpl_value_parm_die_table, 32);
27050
27051 entry.die = die;
27052 entry.arg = arg;
27053 vec_safe_push (tmpl_value_parm_die_table, entry);
27054 }
27055
27056 /* Return TRUE if T is an instance of generic type, FALSE
27057 otherwise. */
27058
27059 static bool
27060 generic_type_p (tree t)
27061 {
27062 if (t == NULL_TREE || !TYPE_P (t))
27063 return false;
27064 return lang_hooks.get_innermost_generic_parms (t) != NULL_TREE;
27065 }
27066
27067 /* Schedule the generation of the generic parameter dies for the
27068 instance of generic type T. The proper generation itself is later
27069 done by gen_scheduled_generic_parms_dies. */
27070
27071 static void
27072 schedule_generic_params_dies_gen (tree t)
27073 {
27074 if (!generic_type_p (t))
27075 return;
27076
27077 gcc_assert (early_dwarf);
27078
27079 if (!generic_type_instances)
27080 vec_alloc (generic_type_instances, 256);
27081
27082 vec_safe_push (generic_type_instances, t);
27083 }
27084
27085 /* Add a DW_AT_const_value attribute to DIEs that were scheduled
27086 by append_entry_to_tmpl_value_parm_die_table. This function must
27087 be called after function DIEs have been generated. */
27088
27089 static void
27090 gen_remaining_tmpl_value_param_die_attribute (void)
27091 {
27092 if (tmpl_value_parm_die_table)
27093 {
27094 unsigned i, j;
27095 die_arg_entry *e;
27096
27097 /* We do this in two phases - first get the cases we can
27098 handle during early-finish, preserving those we cannot
27099 (containing symbolic constants where we don't yet know
27100 whether we are going to output the referenced symbols).
27101 For those we try again at late-finish. */
27102 j = 0;
27103 FOR_EACH_VEC_ELT (*tmpl_value_parm_die_table, i, e)
27104 {
27105 if (!e->die->removed
27106 && !tree_add_const_value_attribute (e->die, e->arg))
27107 {
27108 dw_loc_descr_ref loc = NULL;
27109 if (! early_dwarf
27110 && (dwarf_version >= 5 || !dwarf_strict))
27111 loc = loc_descriptor_from_tree (e->arg, 2, NULL);
27112 if (loc)
27113 add_AT_loc (e->die, DW_AT_location, loc);
27114 else
27115 (*tmpl_value_parm_die_table)[j++] = *e;
27116 }
27117 }
27118 tmpl_value_parm_die_table->truncate (j);
27119 }
27120 }
27121
27122 /* Generate generic parameters DIEs for instances of generic types
27123 that have been previously scheduled by
27124 schedule_generic_params_dies_gen. This function must be called
27125 after all the types of the CU have been laid out. */
27126
27127 static void
27128 gen_scheduled_generic_parms_dies (void)
27129 {
27130 unsigned i;
27131 tree t;
27132
27133 if (!generic_type_instances)
27134 return;
27135
27136 FOR_EACH_VEC_ELT (*generic_type_instances, i, t)
27137 if (COMPLETE_TYPE_P (t))
27138 gen_generic_params_dies (t);
27139
27140 generic_type_instances = NULL;
27141 }
27142
27143
27144 /* Replace DW_AT_name for the decl with name. */
27145
27146 static void
27147 dwarf2out_set_name (tree decl, tree name)
27148 {
27149 dw_die_ref die;
27150 dw_attr_node *attr;
27151 const char *dname;
27152
27153 die = TYPE_SYMTAB_DIE (decl);
27154 if (!die)
27155 return;
27156
27157 dname = dwarf2_name (name, 0);
27158 if (!dname)
27159 return;
27160
27161 attr = get_AT (die, DW_AT_name);
27162 if (attr)
27163 {
27164 struct indirect_string_node *node;
27165
27166 node = find_AT_string (dname);
27167 /* replace the string. */
27168 attr->dw_attr_val.v.val_str = node;
27169 }
27170
27171 else
27172 add_name_attribute (die, dname);
27173 }
27174
27175 /* True if before or during processing of the first function being emitted. */
27176 static bool in_first_function_p = true;
27177 /* True if loc_note during dwarf2out_var_location call might still be
27178 before first real instruction at address equal to .Ltext0. */
27179 static bool maybe_at_text_label_p = true;
27180 /* One above highest N where .LVLN label might be equal to .Ltext0 label. */
27181 static unsigned int first_loclabel_num_not_at_text_label;
27182
27183 /* Look ahead for a real insn, or for a begin stmt marker. */
27184
27185 static rtx_insn *
27186 dwarf2out_next_real_insn (rtx_insn *loc_note)
27187 {
27188 rtx_insn *next_real = NEXT_INSN (loc_note);
27189
27190 while (next_real)
27191 if (INSN_P (next_real))
27192 break;
27193 else
27194 next_real = NEXT_INSN (next_real);
27195
27196 return next_real;
27197 }
27198
27199 /* Called by the final INSN scan whenever we see a var location. We
27200 use it to drop labels in the right places, and throw the location in
27201 our lookup table. */
27202
27203 static void
27204 dwarf2out_var_location (rtx_insn *loc_note)
27205 {
27206 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES + 2];
27207 struct var_loc_node *newloc;
27208 rtx_insn *next_real, *next_note;
27209 rtx_insn *call_insn = NULL;
27210 static const char *last_label;
27211 static const char *last_postcall_label;
27212 static bool last_in_cold_section_p;
27213 static rtx_insn *expected_next_loc_note;
27214 tree decl;
27215 bool var_loc_p;
27216 var_loc_view view = 0;
27217
27218 if (!NOTE_P (loc_note))
27219 {
27220 if (CALL_P (loc_note))
27221 {
27222 maybe_reset_location_view (loc_note, cur_line_info_table);
27223 call_site_count++;
27224 if (SIBLING_CALL_P (loc_note))
27225 tail_call_site_count++;
27226 if (find_reg_note (loc_note, REG_CALL_ARG_LOCATION, NULL_RTX))
27227 {
27228 call_insn = loc_note;
27229 loc_note = NULL;
27230 var_loc_p = false;
27231
27232 next_real = dwarf2out_next_real_insn (call_insn);
27233 next_note = NULL;
27234 cached_next_real_insn = NULL;
27235 goto create_label;
27236 }
27237 if (optimize == 0 && !flag_var_tracking)
27238 {
27239 /* When the var-tracking pass is not running, there is no note
27240 for indirect calls whose target is compile-time known. In this
27241 case, process such calls specifically so that we generate call
27242 sites for them anyway. */
27243 rtx x = PATTERN (loc_note);
27244 if (GET_CODE (x) == PARALLEL)
27245 x = XVECEXP (x, 0, 0);
27246 if (GET_CODE (x) == SET)
27247 x = SET_SRC (x);
27248 if (GET_CODE (x) == CALL)
27249 x = XEXP (x, 0);
27250 if (!MEM_P (x)
27251 || GET_CODE (XEXP (x, 0)) != SYMBOL_REF
27252 || !SYMBOL_REF_DECL (XEXP (x, 0))
27253 || (TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0)))
27254 != FUNCTION_DECL))
27255 {
27256 call_insn = loc_note;
27257 loc_note = NULL;
27258 var_loc_p = false;
27259
27260 next_real = dwarf2out_next_real_insn (call_insn);
27261 next_note = NULL;
27262 cached_next_real_insn = NULL;
27263 goto create_label;
27264 }
27265 }
27266 }
27267 else if (!debug_variable_location_views)
27268 gcc_unreachable ();
27269 else
27270 maybe_reset_location_view (loc_note, cur_line_info_table);
27271
27272 return;
27273 }
27274
27275 var_loc_p = NOTE_KIND (loc_note) == NOTE_INSN_VAR_LOCATION;
27276 if (var_loc_p && !DECL_P (NOTE_VAR_LOCATION_DECL (loc_note)))
27277 return;
27278
27279 /* Optimize processing a large consecutive sequence of location
27280 notes so we don't spend too much time in next_real_insn. If the
27281 next insn is another location note, remember the next_real_insn
27282 calculation for next time. */
27283 next_real = cached_next_real_insn;
27284 if (next_real)
27285 {
27286 if (expected_next_loc_note != loc_note)
27287 next_real = NULL;
27288 }
27289
27290 next_note = NEXT_INSN (loc_note);
27291 if (! next_note
27292 || next_note->deleted ()
27293 || ! NOTE_P (next_note)
27294 || (NOTE_KIND (next_note) != NOTE_INSN_VAR_LOCATION
27295 && NOTE_KIND (next_note) != NOTE_INSN_BEGIN_STMT
27296 && NOTE_KIND (next_note) != NOTE_INSN_INLINE_ENTRY))
27297 next_note = NULL;
27298
27299 if (! next_real)
27300 next_real = dwarf2out_next_real_insn (loc_note);
27301
27302 if (next_note)
27303 {
27304 expected_next_loc_note = next_note;
27305 cached_next_real_insn = next_real;
27306 }
27307 else
27308 cached_next_real_insn = NULL;
27309
27310 /* If there are no instructions which would be affected by this note,
27311 don't do anything. */
27312 if (var_loc_p
27313 && next_real == NULL_RTX
27314 && !NOTE_DURING_CALL_P (loc_note))
27315 return;
27316
27317 create_label:
27318
27319 if (next_real == NULL_RTX)
27320 next_real = get_last_insn ();
27321
27322 /* If there were any real insns between note we processed last time
27323 and this note (or if it is the first note), clear
27324 last_{,postcall_}label so that they are not reused this time. */
27325 if (last_var_location_insn == NULL_RTX
27326 || last_var_location_insn != next_real
27327 || last_in_cold_section_p != in_cold_section_p)
27328 {
27329 last_label = NULL;
27330 last_postcall_label = NULL;
27331 }
27332
27333 if (var_loc_p)
27334 {
27335 const char *label
27336 = NOTE_DURING_CALL_P (loc_note) ? last_postcall_label : last_label;
27337 view = cur_line_info_table->view;
27338 decl = NOTE_VAR_LOCATION_DECL (loc_note);
27339 newloc = add_var_loc_to_decl (decl, loc_note, label, view);
27340 if (newloc == NULL)
27341 return;
27342 }
27343 else
27344 {
27345 decl = NULL_TREE;
27346 newloc = NULL;
27347 }
27348
27349 /* If there were no real insns between note we processed last time
27350 and this note, use the label we emitted last time. Otherwise
27351 create a new label and emit it. */
27352 if (last_label == NULL)
27353 {
27354 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", loclabel_num);
27355 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LVL", loclabel_num);
27356 loclabel_num++;
27357 last_label = ggc_strdup (loclabel);
27358 /* See if loclabel might be equal to .Ltext0. If yes,
27359 bump first_loclabel_num_not_at_text_label. */
27360 if (!have_multiple_function_sections
27361 && in_first_function_p
27362 && maybe_at_text_label_p)
27363 {
27364 static rtx_insn *last_start;
27365 rtx_insn *insn;
27366 for (insn = loc_note; insn; insn = previous_insn (insn))
27367 if (insn == last_start)
27368 break;
27369 else if (!NONDEBUG_INSN_P (insn))
27370 continue;
27371 else
27372 {
27373 rtx body = PATTERN (insn);
27374 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
27375 continue;
27376 /* Inline asm could occupy zero bytes. */
27377 else if (GET_CODE (body) == ASM_INPUT
27378 || asm_noperands (body) >= 0)
27379 continue;
27380 #ifdef HAVE_ATTR_length /* ??? We don't include insn-attr.h. */
27381 else if (HAVE_ATTR_length && get_attr_min_length (insn) == 0)
27382 continue;
27383 #endif
27384 else
27385 {
27386 /* Assume insn has non-zero length. */
27387 maybe_at_text_label_p = false;
27388 break;
27389 }
27390 }
27391 if (maybe_at_text_label_p)
27392 {
27393 last_start = loc_note;
27394 first_loclabel_num_not_at_text_label = loclabel_num;
27395 }
27396 }
27397 }
27398
27399 gcc_assert ((loc_note == NULL_RTX && call_insn != NULL_RTX)
27400 || (loc_note != NULL_RTX && call_insn == NULL_RTX));
27401
27402 if (!var_loc_p)
27403 {
27404 struct call_arg_loc_node *ca_loc
27405 = ggc_cleared_alloc<call_arg_loc_node> ();
27406 rtx_insn *prev = call_insn;
27407
27408 ca_loc->call_arg_loc_note
27409 = find_reg_note (call_insn, REG_CALL_ARG_LOCATION, NULL_RTX);
27410 ca_loc->next = NULL;
27411 ca_loc->label = last_label;
27412 gcc_assert (prev
27413 && (CALL_P (prev)
27414 || (NONJUMP_INSN_P (prev)
27415 && GET_CODE (PATTERN (prev)) == SEQUENCE
27416 && CALL_P (XVECEXP (PATTERN (prev), 0, 0)))));
27417 if (!CALL_P (prev))
27418 prev = as_a <rtx_sequence *> (PATTERN (prev))->insn (0);
27419 ca_loc->tail_call_p = SIBLING_CALL_P (prev);
27420
27421 /* Look for a SYMBOL_REF in the "prev" instruction. */
27422 rtx x = get_call_rtx_from (prev);
27423 if (x)
27424 {
27425 /* Try to get the call symbol, if any. */
27426 if (MEM_P (XEXP (x, 0)))
27427 x = XEXP (x, 0);
27428 /* First, look for a memory access to a symbol_ref. */
27429 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
27430 && SYMBOL_REF_DECL (XEXP (x, 0))
27431 && TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0))) == FUNCTION_DECL)
27432 ca_loc->symbol_ref = XEXP (x, 0);
27433 /* Otherwise, look at a compile-time known user-level function
27434 declaration. */
27435 else if (MEM_P (x)
27436 && MEM_EXPR (x)
27437 && TREE_CODE (MEM_EXPR (x)) == FUNCTION_DECL)
27438 ca_loc->symbol_ref = XEXP (DECL_RTL (MEM_EXPR (x)), 0);
27439 }
27440
27441 ca_loc->block = insn_scope (prev);
27442 if (call_arg_locations)
27443 call_arg_loc_last->next = ca_loc;
27444 else
27445 call_arg_locations = ca_loc;
27446 call_arg_loc_last = ca_loc;
27447 }
27448 else if (loc_note != NULL_RTX && !NOTE_DURING_CALL_P (loc_note))
27449 {
27450 newloc->label = last_label;
27451 newloc->view = view;
27452 }
27453 else
27454 {
27455 if (!last_postcall_label)
27456 {
27457 sprintf (loclabel, "%s-1", last_label);
27458 last_postcall_label = ggc_strdup (loclabel);
27459 }
27460 newloc->label = last_postcall_label;
27461 /* ??? This view is at last_label, not last_label-1, but we
27462 could only assume view at last_label-1 is zero if we could
27463 assume calls always have length greater than one. This is
27464 probably true in general, though there might be a rare
27465 exception to this rule, e.g. if a call insn is optimized out
27466 by target magic. Then, even the -1 in the label will be
27467 wrong, which might invalidate the range. Anyway, using view,
27468 though technically possibly incorrect, will work as far as
27469 ranges go: since L-1 is in the middle of the call insn,
27470 (L-1).0 and (L-1).V shouldn't make any difference, and having
27471 the loclist entry refer to the .loc entry might be useful, so
27472 leave it like this. */
27473 newloc->view = view;
27474 }
27475
27476 if (var_loc_p && flag_debug_asm)
27477 {
27478 const char *name, *sep, *patstr;
27479 if (decl && DECL_NAME (decl))
27480 name = IDENTIFIER_POINTER (DECL_NAME (decl));
27481 else
27482 name = "";
27483 if (NOTE_VAR_LOCATION_LOC (loc_note))
27484 {
27485 sep = " => ";
27486 patstr = str_pattern_slim (NOTE_VAR_LOCATION_LOC (loc_note));
27487 }
27488 else
27489 {
27490 sep = " ";
27491 patstr = "RESET";
27492 }
27493 fprintf (asm_out_file, "\t%s DEBUG %s%s%s\n", ASM_COMMENT_START,
27494 name, sep, patstr);
27495 }
27496
27497 last_var_location_insn = next_real;
27498 last_in_cold_section_p = in_cold_section_p;
27499 }
27500
27501 /* Check whether BLOCK, a lexical block, is nested within OUTER, or is
27502 OUTER itself. If BOTHWAYS, check not only that BLOCK can reach
27503 OUTER through BLOCK_SUPERCONTEXT links, but also that there is a
27504 path from OUTER to BLOCK through BLOCK_SUBBLOCKs and
27505 BLOCK_FRAGMENT_ORIGIN links. */
27506 static bool
27507 block_within_block_p (tree block, tree outer, bool bothways)
27508 {
27509 if (block == outer)
27510 return true;
27511
27512 /* Quickly check that OUTER is up BLOCK's supercontext chain. */
27513 for (tree context = BLOCK_SUPERCONTEXT (block);
27514 context != outer;
27515 context = BLOCK_SUPERCONTEXT (context))
27516 if (!context || TREE_CODE (context) != BLOCK)
27517 return false;
27518
27519 if (!bothways)
27520 return true;
27521
27522 /* Now check that each block is actually referenced by its
27523 parent. */
27524 for (tree context = BLOCK_SUPERCONTEXT (block); ;
27525 context = BLOCK_SUPERCONTEXT (context))
27526 {
27527 if (BLOCK_FRAGMENT_ORIGIN (context))
27528 {
27529 gcc_assert (!BLOCK_SUBBLOCKS (context));
27530 context = BLOCK_FRAGMENT_ORIGIN (context);
27531 }
27532 for (tree sub = BLOCK_SUBBLOCKS (context);
27533 sub != block;
27534 sub = BLOCK_CHAIN (sub))
27535 if (!sub)
27536 return false;
27537 if (context == outer)
27538 return true;
27539 else
27540 block = context;
27541 }
27542 }
27543
27544 /* Called during final while assembling the marker of the entry point
27545 for an inlined function. */
27546
27547 static void
27548 dwarf2out_inline_entry (tree block)
27549 {
27550 gcc_assert (debug_inline_points);
27551
27552 /* If we can't represent it, don't bother. */
27553 if (!(dwarf_version >= 3 || !dwarf_strict))
27554 return;
27555
27556 gcc_assert (DECL_P (block_ultimate_origin (block)));
27557
27558 /* Sanity check the block tree. This would catch a case in which
27559 BLOCK got removed from the tree reachable from the outermost
27560 lexical block, but got retained in markers. It would still link
27561 back to its parents, but some ancestor would be missing a link
27562 down the path to the sub BLOCK. If the block got removed, its
27563 BLOCK_NUMBER will not be a usable value. */
27564 if (flag_checking)
27565 gcc_assert (block_within_block_p (block,
27566 DECL_INITIAL (current_function_decl),
27567 true));
27568
27569 gcc_assert (inlined_function_outer_scope_p (block));
27570 gcc_assert (!lookup_block_die (block));
27571
27572 if (BLOCK_FRAGMENT_ORIGIN (block))
27573 block = BLOCK_FRAGMENT_ORIGIN (block);
27574 /* Can the entry point ever not be at the beginning of an
27575 unfragmented lexical block? */
27576 else if (!(BLOCK_FRAGMENT_CHAIN (block)
27577 || (cur_line_info_table
27578 && !ZERO_VIEW_P (cur_line_info_table->view))))
27579 return;
27580
27581 if (!inline_entry_data_table)
27582 inline_entry_data_table
27583 = hash_table<inline_entry_data_hasher>::create_ggc (10);
27584
27585
27586 inline_entry_data **iedp
27587 = inline_entry_data_table->find_slot_with_hash (block,
27588 htab_hash_pointer (block),
27589 INSERT);
27590 if (*iedp)
27591 /* ??? Ideally, we'd record all entry points for the same inlined
27592 function (some may have been duplicated by e.g. unrolling), but
27593 we have no way to represent that ATM. */
27594 return;
27595
27596 inline_entry_data *ied = *iedp = ggc_cleared_alloc<inline_entry_data> ();
27597 ied->block = block;
27598 ied->label_pfx = BLOCK_INLINE_ENTRY_LABEL;
27599 ied->label_num = BLOCK_NUMBER (block);
27600 if (cur_line_info_table)
27601 ied->view = cur_line_info_table->view;
27602
27603 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_INLINE_ENTRY_LABEL,
27604 BLOCK_NUMBER (block));
27605 }
27606
27607 /* Called from finalize_size_functions for size functions so that their body
27608 can be encoded in the debug info to describe the layout of variable-length
27609 structures. */
27610
27611 static void
27612 dwarf2out_size_function (tree decl)
27613 {
27614 set_early_dwarf s;
27615 function_to_dwarf_procedure (decl);
27616 }
27617
27618 /* Note in one location list that text section has changed. */
27619
27620 int
27621 var_location_switch_text_section_1 (var_loc_list **slot, void *)
27622 {
27623 var_loc_list *list = *slot;
27624 if (list->first)
27625 list->last_before_switch
27626 = list->last->next ? list->last->next : list->last;
27627 return 1;
27628 }
27629
27630 /* Note in all location lists that text section has changed. */
27631
27632 static void
27633 var_location_switch_text_section (void)
27634 {
27635 if (decl_loc_table == NULL)
27636 return;
27637
27638 decl_loc_table->traverse<void *, var_location_switch_text_section_1> (NULL);
27639 }
27640
27641 /* Create a new line number table. */
27642
27643 static dw_line_info_table *
27644 new_line_info_table (void)
27645 {
27646 dw_line_info_table *table;
27647
27648 table = ggc_cleared_alloc<dw_line_info_table> ();
27649 table->file_num = 1;
27650 table->line_num = 1;
27651 table->is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
27652 FORCE_RESET_NEXT_VIEW (table->view);
27653 table->symviews_since_reset = 0;
27654
27655 return table;
27656 }
27657
27658 /* Lookup the "current" table into which we emit line info, so
27659 that we don't have to do it for every source line. */
27660
27661 static void
27662 set_cur_line_info_table (section *sec)
27663 {
27664 dw_line_info_table *table;
27665
27666 if (sec == text_section)
27667 table = text_section_line_info;
27668 else if (sec == cold_text_section)
27669 {
27670 table = cold_text_section_line_info;
27671 if (!table)
27672 {
27673 cold_text_section_line_info = table = new_line_info_table ();
27674 table->end_label = cold_end_label;
27675 }
27676 }
27677 else
27678 {
27679 const char *end_label;
27680
27681 if (crtl->has_bb_partition)
27682 {
27683 if (in_cold_section_p)
27684 end_label = crtl->subsections.cold_section_end_label;
27685 else
27686 end_label = crtl->subsections.hot_section_end_label;
27687 }
27688 else
27689 {
27690 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27691 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
27692 current_function_funcdef_no);
27693 end_label = ggc_strdup (label);
27694 }
27695
27696 table = new_line_info_table ();
27697 table->end_label = end_label;
27698
27699 vec_safe_push (separate_line_info, table);
27700 }
27701
27702 if (output_asm_line_debug_info ())
27703 table->is_stmt = (cur_line_info_table
27704 ? cur_line_info_table->is_stmt
27705 : DWARF_LINE_DEFAULT_IS_STMT_START);
27706 cur_line_info_table = table;
27707 }
27708
27709
27710 /* We need to reset the locations at the beginning of each
27711 function. We can't do this in the end_function hook, because the
27712 declarations that use the locations won't have been output when
27713 that hook is called. Also compute have_multiple_function_sections here. */
27714
27715 static void
27716 dwarf2out_begin_function (tree fun)
27717 {
27718 section *sec = function_section (fun);
27719
27720 if (sec != text_section)
27721 have_multiple_function_sections = true;
27722
27723 if (crtl->has_bb_partition && !cold_text_section)
27724 {
27725 gcc_assert (current_function_decl == fun);
27726 cold_text_section = unlikely_text_section ();
27727 switch_to_section (cold_text_section);
27728 ASM_OUTPUT_LABEL (asm_out_file, cold_text_section_label);
27729 switch_to_section (sec);
27730 }
27731
27732 dwarf2out_note_section_used ();
27733 call_site_count = 0;
27734 tail_call_site_count = 0;
27735
27736 set_cur_line_info_table (sec);
27737 FORCE_RESET_NEXT_VIEW (cur_line_info_table->view);
27738 }
27739
27740 /* Helper function of dwarf2out_end_function, called only after emitting
27741 the very first function into assembly. Check if some .debug_loc range
27742 might end with a .LVL* label that could be equal to .Ltext0.
27743 In that case we must force using absolute addresses in .debug_loc ranges,
27744 because this range could be .LVLN-.Ltext0 .. .LVLM-.Ltext0 for
27745 .LVLN == .LVLM == .Ltext0, thus 0 .. 0, which is a .debug_loc
27746 list terminator.
27747 Set have_multiple_function_sections to true in that case and
27748 terminate htab traversal. */
27749
27750 int
27751 find_empty_loc_ranges_at_text_label (var_loc_list **slot, int)
27752 {
27753 var_loc_list *entry = *slot;
27754 struct var_loc_node *node;
27755
27756 node = entry->first;
27757 if (node && node->next && node->next->label)
27758 {
27759 unsigned int i;
27760 const char *label = node->next->label;
27761 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES];
27762
27763 for (i = 0; i < first_loclabel_num_not_at_text_label; i++)
27764 {
27765 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", i);
27766 if (strcmp (label, loclabel) == 0)
27767 {
27768 have_multiple_function_sections = true;
27769 return 0;
27770 }
27771 }
27772 }
27773 return 1;
27774 }
27775
27776 /* Hook called after emitting a function into assembly.
27777 This does something only for the very first function emitted. */
27778
27779 static void
27780 dwarf2out_end_function (unsigned int)
27781 {
27782 if (in_first_function_p
27783 && !have_multiple_function_sections
27784 && first_loclabel_num_not_at_text_label
27785 && decl_loc_table)
27786 decl_loc_table->traverse<int, find_empty_loc_ranges_at_text_label> (0);
27787 in_first_function_p = false;
27788 maybe_at_text_label_p = false;
27789 }
27790
27791 /* Temporary holder for dwarf2out_register_main_translation_unit. Used to let
27792 front-ends register a translation unit even before dwarf2out_init is
27793 called. */
27794 static tree main_translation_unit = NULL_TREE;
27795
27796 /* Hook called by front-ends after they built their main translation unit.
27797 Associate comp_unit_die to UNIT. */
27798
27799 static void
27800 dwarf2out_register_main_translation_unit (tree unit)
27801 {
27802 gcc_assert (TREE_CODE (unit) == TRANSLATION_UNIT_DECL
27803 && main_translation_unit == NULL_TREE);
27804 main_translation_unit = unit;
27805 /* If dwarf2out_init has not been called yet, it will perform the association
27806 itself looking at main_translation_unit. */
27807 if (decl_die_table != NULL)
27808 equate_decl_number_to_die (unit, comp_unit_die ());
27809 }
27810
27811 /* Add OPCODE+VAL as an entry at the end of the opcode array in TABLE. */
27812
27813 static void
27814 push_dw_line_info_entry (dw_line_info_table *table,
27815 enum dw_line_info_opcode opcode, unsigned int val)
27816 {
27817 dw_line_info_entry e;
27818 e.opcode = opcode;
27819 e.val = val;
27820 vec_safe_push (table->entries, e);
27821 }
27822
27823 /* Output a label to mark the beginning of a source code line entry
27824 and record information relating to this source line, in
27825 'line_info_table' for later output of the .debug_line section. */
27826 /* ??? The discriminator parameter ought to be unsigned. */
27827
27828 static void
27829 dwarf2out_source_line (unsigned int line, unsigned int column,
27830 const char *filename,
27831 int discriminator, bool is_stmt)
27832 {
27833 unsigned int file_num;
27834 dw_line_info_table *table;
27835 static var_loc_view lvugid;
27836
27837 if (debug_info_level < DINFO_LEVEL_TERSE)
27838 return;
27839
27840 table = cur_line_info_table;
27841
27842 if (line == 0)
27843 {
27844 if (debug_variable_location_views
27845 && output_asm_line_debug_info ()
27846 && table && !RESETTING_VIEW_P (table->view))
27847 {
27848 /* If we're using the assembler to compute view numbers, we
27849 can't issue a .loc directive for line zero, so we can't
27850 get a view number at this point. We might attempt to
27851 compute it from the previous view, or equate it to a
27852 subsequent view (though it might not be there!), but
27853 since we're omitting the line number entry, we might as
27854 well omit the view number as well. That means pretending
27855 it's a view number zero, which might very well turn out
27856 to be correct. ??? Extend the assembler so that the
27857 compiler could emit e.g. ".locview .LVU#", to output a
27858 view without changing line number information. We'd then
27859 have to count it in symviews_since_reset; when it's omitted,
27860 it doesn't count. */
27861 if (!zero_view_p)
27862 zero_view_p = BITMAP_GGC_ALLOC ();
27863 bitmap_set_bit (zero_view_p, table->view);
27864 if (flag_debug_asm)
27865 {
27866 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27867 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27868 fprintf (asm_out_file, "\t%s line 0, omitted view ",
27869 ASM_COMMENT_START);
27870 assemble_name (asm_out_file, label);
27871 putc ('\n', asm_out_file);
27872 }
27873 table->view = ++lvugid;
27874 }
27875 return;
27876 }
27877
27878 /* The discriminator column was added in dwarf4. Simplify the below
27879 by simply removing it if we're not supposed to output it. */
27880 if (dwarf_version < 4 && dwarf_strict)
27881 discriminator = 0;
27882
27883 if (!debug_column_info)
27884 column = 0;
27885
27886 file_num = maybe_emit_file (lookup_filename (filename));
27887
27888 /* ??? TODO: Elide duplicate line number entries. Traditionally,
27889 the debugger has used the second (possibly duplicate) line number
27890 at the beginning of the function to mark the end of the prologue.
27891 We could eliminate any other duplicates within the function. For
27892 Dwarf3, we ought to include the DW_LNS_set_prologue_end mark in
27893 that second line number entry. */
27894 /* Recall that this end-of-prologue indication is *not* the same thing
27895 as the end_prologue debug hook. The NOTE_INSN_PROLOGUE_END note,
27896 to which the hook corresponds, follows the last insn that was
27897 emitted by gen_prologue. What we need is to precede the first insn
27898 that had been emitted after NOTE_INSN_FUNCTION_BEG, i.e. the first
27899 insn that corresponds to something the user wrote. These may be
27900 very different locations once scheduling is enabled. */
27901
27902 if (0 && file_num == table->file_num
27903 && line == table->line_num
27904 && column == table->column_num
27905 && discriminator == table->discrim_num
27906 && is_stmt == table->is_stmt)
27907 return;
27908
27909 switch_to_section (current_function_section ());
27910
27911 /* If requested, emit something human-readable. */
27912 if (flag_debug_asm)
27913 {
27914 if (debug_column_info)
27915 fprintf (asm_out_file, "\t%s %s:%d:%d\n", ASM_COMMENT_START,
27916 filename, line, column);
27917 else
27918 fprintf (asm_out_file, "\t%s %s:%d\n", ASM_COMMENT_START,
27919 filename, line);
27920 }
27921
27922 if (output_asm_line_debug_info ())
27923 {
27924 /* Emit the .loc directive understood by GNU as. */
27925 /* "\t.loc %u %u 0 is_stmt %u discriminator %u",
27926 file_num, line, is_stmt, discriminator */
27927 fputs ("\t.loc ", asm_out_file);
27928 fprint_ul (asm_out_file, file_num);
27929 putc (' ', asm_out_file);
27930 fprint_ul (asm_out_file, line);
27931 putc (' ', asm_out_file);
27932 fprint_ul (asm_out_file, column);
27933
27934 if (is_stmt != table->is_stmt)
27935 {
27936 #if HAVE_GAS_LOC_STMT
27937 fputs (" is_stmt ", asm_out_file);
27938 putc (is_stmt ? '1' : '0', asm_out_file);
27939 #endif
27940 }
27941 if (SUPPORTS_DISCRIMINATOR && discriminator != 0)
27942 {
27943 gcc_assert (discriminator > 0);
27944 fputs (" discriminator ", asm_out_file);
27945 fprint_ul (asm_out_file, (unsigned long) discriminator);
27946 }
27947 if (debug_variable_location_views)
27948 {
27949 if (!RESETTING_VIEW_P (table->view))
27950 {
27951 table->symviews_since_reset++;
27952 if (table->symviews_since_reset > symview_upper_bound)
27953 symview_upper_bound = table->symviews_since_reset;
27954 /* When we're using the assembler to compute view
27955 numbers, we output symbolic labels after "view" in
27956 .loc directives, and the assembler will set them for
27957 us, so that we can refer to the view numbers in
27958 location lists. The only exceptions are when we know
27959 a view will be zero: "-0" is a forced reset, used
27960 e.g. in the beginning of functions, whereas "0" tells
27961 the assembler to check that there was a PC change
27962 since the previous view, in a way that implicitly
27963 resets the next view. */
27964 fputs (" view ", asm_out_file);
27965 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27966 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27967 assemble_name (asm_out_file, label);
27968 table->view = ++lvugid;
27969 }
27970 else
27971 {
27972 table->symviews_since_reset = 0;
27973 if (FORCE_RESETTING_VIEW_P (table->view))
27974 fputs (" view -0", asm_out_file);
27975 else
27976 fputs (" view 0", asm_out_file);
27977 /* Mark the present view as a zero view. Earlier debug
27978 binds may have already added its id to loclists to be
27979 emitted later, so we can't reuse the id for something
27980 else. However, it's good to know whether a view is
27981 known to be zero, because then we may be able to
27982 optimize out locviews that are all zeros, so take
27983 note of it in zero_view_p. */
27984 if (!zero_view_p)
27985 zero_view_p = BITMAP_GGC_ALLOC ();
27986 bitmap_set_bit (zero_view_p, lvugid);
27987 table->view = ++lvugid;
27988 }
27989 }
27990 putc ('\n', asm_out_file);
27991 }
27992 else
27993 {
27994 unsigned int label_num = ++line_info_label_num;
27995
27996 targetm.asm_out.internal_label (asm_out_file, LINE_CODE_LABEL, label_num);
27997
27998 if (debug_variable_location_views && !RESETTING_VIEW_P (table->view))
27999 push_dw_line_info_entry (table, LI_adv_address, label_num);
28000 else
28001 push_dw_line_info_entry (table, LI_set_address, label_num);
28002 if (debug_variable_location_views)
28003 {
28004 bool resetting = FORCE_RESETTING_VIEW_P (table->view);
28005 if (resetting)
28006 table->view = 0;
28007
28008 if (flag_debug_asm)
28009 fprintf (asm_out_file, "\t%s view %s%d\n",
28010 ASM_COMMENT_START,
28011 resetting ? "-" : "",
28012 table->view);
28013
28014 table->view++;
28015 }
28016 if (file_num != table->file_num)
28017 push_dw_line_info_entry (table, LI_set_file, file_num);
28018 if (discriminator != table->discrim_num)
28019 push_dw_line_info_entry (table, LI_set_discriminator, discriminator);
28020 if (is_stmt != table->is_stmt)
28021 push_dw_line_info_entry (table, LI_negate_stmt, 0);
28022 push_dw_line_info_entry (table, LI_set_line, line);
28023 if (debug_column_info)
28024 push_dw_line_info_entry (table, LI_set_column, column);
28025 }
28026
28027 table->file_num = file_num;
28028 table->line_num = line;
28029 table->column_num = column;
28030 table->discrim_num = discriminator;
28031 table->is_stmt = is_stmt;
28032 table->in_use = true;
28033 }
28034
28035 /* Record the beginning of a new source file. */
28036
28037 static void
28038 dwarf2out_start_source_file (unsigned int lineno, const char *filename)
28039 {
28040 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28041 {
28042 macinfo_entry e;
28043 e.code = DW_MACINFO_start_file;
28044 e.lineno = lineno;
28045 e.info = ggc_strdup (filename);
28046 vec_safe_push (macinfo_table, e);
28047 }
28048 }
28049
28050 /* Record the end of a source file. */
28051
28052 static void
28053 dwarf2out_end_source_file (unsigned int lineno ATTRIBUTE_UNUSED)
28054 {
28055 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28056 {
28057 macinfo_entry e;
28058 e.code = DW_MACINFO_end_file;
28059 e.lineno = lineno;
28060 e.info = NULL;
28061 vec_safe_push (macinfo_table, e);
28062 }
28063 }
28064
28065 /* Called from debug_define in toplev.c. The `buffer' parameter contains
28066 the tail part of the directive line, i.e. the part which is past the
28067 initial whitespace, #, whitespace, directive-name, whitespace part. */
28068
28069 static void
28070 dwarf2out_define (unsigned int lineno ATTRIBUTE_UNUSED,
28071 const char *buffer ATTRIBUTE_UNUSED)
28072 {
28073 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28074 {
28075 macinfo_entry e;
28076 /* Insert a dummy first entry to be able to optimize the whole
28077 predefined macro block using DW_MACRO_import. */
28078 if (macinfo_table->is_empty () && lineno <= 1)
28079 {
28080 e.code = 0;
28081 e.lineno = 0;
28082 e.info = NULL;
28083 vec_safe_push (macinfo_table, e);
28084 }
28085 e.code = DW_MACINFO_define;
28086 e.lineno = lineno;
28087 e.info = ggc_strdup (buffer);
28088 vec_safe_push (macinfo_table, e);
28089 }
28090 }
28091
28092 /* Called from debug_undef in toplev.c. The `buffer' parameter contains
28093 the tail part of the directive line, i.e. the part which is past the
28094 initial whitespace, #, whitespace, directive-name, whitespace part. */
28095
28096 static void
28097 dwarf2out_undef (unsigned int lineno ATTRIBUTE_UNUSED,
28098 const char *buffer ATTRIBUTE_UNUSED)
28099 {
28100 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28101 {
28102 macinfo_entry e;
28103 /* Insert a dummy first entry to be able to optimize the whole
28104 predefined macro block using DW_MACRO_import. */
28105 if (macinfo_table->is_empty () && lineno <= 1)
28106 {
28107 e.code = 0;
28108 e.lineno = 0;
28109 e.info = NULL;
28110 vec_safe_push (macinfo_table, e);
28111 }
28112 e.code = DW_MACINFO_undef;
28113 e.lineno = lineno;
28114 e.info = ggc_strdup (buffer);
28115 vec_safe_push (macinfo_table, e);
28116 }
28117 }
28118
28119 /* Helpers to manipulate hash table of CUs. */
28120
28121 struct macinfo_entry_hasher : nofree_ptr_hash <macinfo_entry>
28122 {
28123 static inline hashval_t hash (const macinfo_entry *);
28124 static inline bool equal (const macinfo_entry *, const macinfo_entry *);
28125 };
28126
28127 inline hashval_t
28128 macinfo_entry_hasher::hash (const macinfo_entry *entry)
28129 {
28130 return htab_hash_string (entry->info);
28131 }
28132
28133 inline bool
28134 macinfo_entry_hasher::equal (const macinfo_entry *entry1,
28135 const macinfo_entry *entry2)
28136 {
28137 return !strcmp (entry1->info, entry2->info);
28138 }
28139
28140 typedef hash_table<macinfo_entry_hasher> macinfo_hash_type;
28141
28142 /* Output a single .debug_macinfo entry. */
28143
28144 static void
28145 output_macinfo_op (macinfo_entry *ref)
28146 {
28147 int file_num;
28148 size_t len;
28149 struct indirect_string_node *node;
28150 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28151 struct dwarf_file_data *fd;
28152
28153 switch (ref->code)
28154 {
28155 case DW_MACINFO_start_file:
28156 fd = lookup_filename (ref->info);
28157 file_num = maybe_emit_file (fd);
28158 dw2_asm_output_data (1, DW_MACINFO_start_file, "Start new file");
28159 dw2_asm_output_data_uleb128 (ref->lineno,
28160 "Included from line number %lu",
28161 (unsigned long) ref->lineno);
28162 dw2_asm_output_data_uleb128 (file_num, "file %s", ref->info);
28163 break;
28164 case DW_MACINFO_end_file:
28165 dw2_asm_output_data (1, DW_MACINFO_end_file, "End file");
28166 break;
28167 case DW_MACINFO_define:
28168 case DW_MACINFO_undef:
28169 len = strlen (ref->info) + 1;
28170 if (!dwarf_strict
28171 && len > DWARF_OFFSET_SIZE
28172 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28173 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28174 {
28175 ref->code = ref->code == DW_MACINFO_define
28176 ? DW_MACRO_define_strp : DW_MACRO_undef_strp;
28177 output_macinfo_op (ref);
28178 return;
28179 }
28180 dw2_asm_output_data (1, ref->code,
28181 ref->code == DW_MACINFO_define
28182 ? "Define macro" : "Undefine macro");
28183 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28184 (unsigned long) ref->lineno);
28185 dw2_asm_output_nstring (ref->info, -1, "The macro");
28186 break;
28187 case DW_MACRO_define_strp:
28188 case DW_MACRO_undef_strp:
28189 /* NB: dwarf2out_finish performs:
28190 1. save_macinfo_strings
28191 2. hash table traverse of index_string
28192 3. output_macinfo -> output_macinfo_op
28193 4. output_indirect_strings
28194 -> hash table traverse of output_index_string
28195
28196 When output_macinfo_op is called, all index strings have been
28197 added to hash table by save_macinfo_strings and we can't pass
28198 INSERT to find_slot_with_hash which may expand hash table, even
28199 if no insertion is needed, and change hash table traverse order
28200 between index_string and output_index_string. */
28201 node = find_AT_string (ref->info, NO_INSERT);
28202 gcc_assert (node
28203 && (node->form == DW_FORM_strp
28204 || node->form == dwarf_FORM (DW_FORM_strx)));
28205 dw2_asm_output_data (1, ref->code,
28206 ref->code == DW_MACRO_define_strp
28207 ? "Define macro strp"
28208 : "Undefine macro strp");
28209 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28210 (unsigned long) ref->lineno);
28211 if (node->form == DW_FORM_strp)
28212 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
28213 debug_str_section, "The macro: \"%s\"",
28214 ref->info);
28215 else
28216 dw2_asm_output_data_uleb128 (node->index, "The macro: \"%s\"",
28217 ref->info);
28218 break;
28219 case DW_MACRO_import:
28220 dw2_asm_output_data (1, ref->code, "Import");
28221 ASM_GENERATE_INTERNAL_LABEL (label,
28222 DEBUG_MACRO_SECTION_LABEL,
28223 ref->lineno + macinfo_label_base);
28224 dw2_asm_output_offset (DWARF_OFFSET_SIZE, label, NULL, NULL);
28225 break;
28226 default:
28227 fprintf (asm_out_file, "%s unrecognized macinfo code %lu\n",
28228 ASM_COMMENT_START, (unsigned long) ref->code);
28229 break;
28230 }
28231 }
28232
28233 /* Attempt to make a sequence of define/undef macinfo ops shareable with
28234 other compilation unit .debug_macinfo sections. IDX is the first
28235 index of a define/undef, return the number of ops that should be
28236 emitted in a comdat .debug_macinfo section and emit
28237 a DW_MACRO_import entry referencing it.
28238 If the define/undef entry should be emitted normally, return 0. */
28239
28240 static unsigned
28241 optimize_macinfo_range (unsigned int idx, vec<macinfo_entry, va_gc> *files,
28242 macinfo_hash_type **macinfo_htab)
28243 {
28244 macinfo_entry *first, *second, *cur, *inc;
28245 char linebuf[sizeof (HOST_WIDE_INT) * 3 + 1];
28246 unsigned char checksum[16];
28247 struct md5_ctx ctx;
28248 char *grp_name, *tail;
28249 const char *base;
28250 unsigned int i, count, encoded_filename_len, linebuf_len;
28251 macinfo_entry **slot;
28252
28253 first = &(*macinfo_table)[idx];
28254 second = &(*macinfo_table)[idx + 1];
28255
28256 /* Optimize only if there are at least two consecutive define/undef ops,
28257 and either all of them are before first DW_MACINFO_start_file
28258 with lineno {0,1} (i.e. predefined macro block), or all of them are
28259 in some included header file. */
28260 if (second->code != DW_MACINFO_define && second->code != DW_MACINFO_undef)
28261 return 0;
28262 if (vec_safe_is_empty (files))
28263 {
28264 if (first->lineno > 1 || second->lineno > 1)
28265 return 0;
28266 }
28267 else if (first->lineno == 0)
28268 return 0;
28269
28270 /* Find the last define/undef entry that can be grouped together
28271 with first and at the same time compute md5 checksum of their
28272 codes, linenumbers and strings. */
28273 md5_init_ctx (&ctx);
28274 for (i = idx; macinfo_table->iterate (i, &cur); i++)
28275 if (cur->code != DW_MACINFO_define && cur->code != DW_MACINFO_undef)
28276 break;
28277 else if (vec_safe_is_empty (files) && cur->lineno > 1)
28278 break;
28279 else
28280 {
28281 unsigned char code = cur->code;
28282 md5_process_bytes (&code, 1, &ctx);
28283 checksum_uleb128 (cur->lineno, &ctx);
28284 md5_process_bytes (cur->info, strlen (cur->info) + 1, &ctx);
28285 }
28286 md5_finish_ctx (&ctx, checksum);
28287 count = i - idx;
28288
28289 /* From the containing include filename (if any) pick up just
28290 usable characters from its basename. */
28291 if (vec_safe_is_empty (files))
28292 base = "";
28293 else
28294 base = lbasename (files->last ().info);
28295 for (encoded_filename_len = 0, i = 0; base[i]; i++)
28296 if (ISIDNUM (base[i]) || base[i] == '.')
28297 encoded_filename_len++;
28298 /* Count . at the end. */
28299 if (encoded_filename_len)
28300 encoded_filename_len++;
28301
28302 sprintf (linebuf, HOST_WIDE_INT_PRINT_UNSIGNED, first->lineno);
28303 linebuf_len = strlen (linebuf);
28304
28305 /* The group name format is: wmN.[<encoded filename>.]<lineno>.<md5sum> */
28306 grp_name = XALLOCAVEC (char, 4 + encoded_filename_len + linebuf_len + 1
28307 + 16 * 2 + 1);
28308 memcpy (grp_name, DWARF_OFFSET_SIZE == 4 ? "wm4." : "wm8.", 4);
28309 tail = grp_name + 4;
28310 if (encoded_filename_len)
28311 {
28312 for (i = 0; base[i]; i++)
28313 if (ISIDNUM (base[i]) || base[i] == '.')
28314 *tail++ = base[i];
28315 *tail++ = '.';
28316 }
28317 memcpy (tail, linebuf, linebuf_len);
28318 tail += linebuf_len;
28319 *tail++ = '.';
28320 for (i = 0; i < 16; i++)
28321 sprintf (tail + i * 2, "%02x", checksum[i] & 0xff);
28322
28323 /* Construct a macinfo_entry for DW_MACRO_import
28324 in the empty vector entry before the first define/undef. */
28325 inc = &(*macinfo_table)[idx - 1];
28326 inc->code = DW_MACRO_import;
28327 inc->lineno = 0;
28328 inc->info = ggc_strdup (grp_name);
28329 if (!*macinfo_htab)
28330 *macinfo_htab = new macinfo_hash_type (10);
28331 /* Avoid emitting duplicates. */
28332 slot = (*macinfo_htab)->find_slot (inc, INSERT);
28333 if (*slot != NULL)
28334 {
28335 inc->code = 0;
28336 inc->info = NULL;
28337 /* If such an entry has been used before, just emit
28338 a DW_MACRO_import op. */
28339 inc = *slot;
28340 output_macinfo_op (inc);
28341 /* And clear all macinfo_entry in the range to avoid emitting them
28342 in the second pass. */
28343 for (i = idx; macinfo_table->iterate (i, &cur) && i < idx + count; i++)
28344 {
28345 cur->code = 0;
28346 cur->info = NULL;
28347 }
28348 }
28349 else
28350 {
28351 *slot = inc;
28352 inc->lineno = (*macinfo_htab)->elements ();
28353 output_macinfo_op (inc);
28354 }
28355 return count;
28356 }
28357
28358 /* Save any strings needed by the macinfo table in the debug str
28359 table. All strings must be collected into the table by the time
28360 index_string is called. */
28361
28362 static void
28363 save_macinfo_strings (void)
28364 {
28365 unsigned len;
28366 unsigned i;
28367 macinfo_entry *ref;
28368
28369 for (i = 0; macinfo_table && macinfo_table->iterate (i, &ref); i++)
28370 {
28371 switch (ref->code)
28372 {
28373 /* Match the logic in output_macinfo_op to decide on
28374 indirect strings. */
28375 case DW_MACINFO_define:
28376 case DW_MACINFO_undef:
28377 len = strlen (ref->info) + 1;
28378 if (!dwarf_strict
28379 && len > DWARF_OFFSET_SIZE
28380 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28381 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28382 set_indirect_string (find_AT_string (ref->info));
28383 break;
28384 case DW_MACINFO_start_file:
28385 /* -gsplit-dwarf -g3 will also output filename as indirect
28386 string. */
28387 if (!dwarf_split_debug_info)
28388 break;
28389 /* Fall through. */
28390 case DW_MACRO_define_strp:
28391 case DW_MACRO_undef_strp:
28392 set_indirect_string (find_AT_string (ref->info));
28393 break;
28394 default:
28395 break;
28396 }
28397 }
28398 }
28399
28400 /* Output macinfo section(s). */
28401
28402 static void
28403 output_macinfo (const char *debug_line_label, bool early_lto_debug)
28404 {
28405 unsigned i;
28406 unsigned long length = vec_safe_length (macinfo_table);
28407 macinfo_entry *ref;
28408 vec<macinfo_entry, va_gc> *files = NULL;
28409 macinfo_hash_type *macinfo_htab = NULL;
28410 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
28411
28412 if (! length)
28413 return;
28414
28415 /* output_macinfo* uses these interchangeably. */
28416 gcc_assert ((int) DW_MACINFO_define == (int) DW_MACRO_define
28417 && (int) DW_MACINFO_undef == (int) DW_MACRO_undef
28418 && (int) DW_MACINFO_start_file == (int) DW_MACRO_start_file
28419 && (int) DW_MACINFO_end_file == (int) DW_MACRO_end_file);
28420
28421 /* AIX Assembler inserts the length, so adjust the reference to match the
28422 offset expected by debuggers. */
28423 strcpy (dl_section_ref, debug_line_label);
28424 if (XCOFF_DEBUGGING_INFO)
28425 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
28426
28427 /* For .debug_macro emit the section header. */
28428 if (!dwarf_strict || dwarf_version >= 5)
28429 {
28430 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28431 "DWARF macro version number");
28432 if (DWARF_OFFSET_SIZE == 8)
28433 dw2_asm_output_data (1, 3, "Flags: 64-bit, lineptr present");
28434 else
28435 dw2_asm_output_data (1, 2, "Flags: 32-bit, lineptr present");
28436 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_line_label,
28437 debug_line_section, NULL);
28438 }
28439
28440 /* In the first loop, it emits the primary .debug_macinfo section
28441 and after each emitted op the macinfo_entry is cleared.
28442 If a longer range of define/undef ops can be optimized using
28443 DW_MACRO_import, the DW_MACRO_import op is emitted and kept in
28444 the vector before the first define/undef in the range and the
28445 whole range of define/undef ops is not emitted and kept. */
28446 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28447 {
28448 switch (ref->code)
28449 {
28450 case DW_MACINFO_start_file:
28451 vec_safe_push (files, *ref);
28452 break;
28453 case DW_MACINFO_end_file:
28454 if (!vec_safe_is_empty (files))
28455 files->pop ();
28456 break;
28457 case DW_MACINFO_define:
28458 case DW_MACINFO_undef:
28459 if ((!dwarf_strict || dwarf_version >= 5)
28460 && HAVE_COMDAT_GROUP
28461 && vec_safe_length (files) != 1
28462 && i > 0
28463 && i + 1 < length
28464 && (*macinfo_table)[i - 1].code == 0)
28465 {
28466 unsigned count = optimize_macinfo_range (i, files, &macinfo_htab);
28467 if (count)
28468 {
28469 i += count - 1;
28470 continue;
28471 }
28472 }
28473 break;
28474 case 0:
28475 /* A dummy entry may be inserted at the beginning to be able
28476 to optimize the whole block of predefined macros. */
28477 if (i == 0)
28478 continue;
28479 default:
28480 break;
28481 }
28482 output_macinfo_op (ref);
28483 ref->info = NULL;
28484 ref->code = 0;
28485 }
28486
28487 if (!macinfo_htab)
28488 return;
28489
28490 /* Save the number of transparent includes so we can adjust the
28491 label number for the fat LTO object DWARF. */
28492 unsigned macinfo_label_base_adj = macinfo_htab->elements ();
28493
28494 delete macinfo_htab;
28495 macinfo_htab = NULL;
28496
28497 /* If any DW_MACRO_import were used, on those DW_MACRO_import entries
28498 terminate the current chain and switch to a new comdat .debug_macinfo
28499 section and emit the define/undef entries within it. */
28500 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28501 switch (ref->code)
28502 {
28503 case 0:
28504 continue;
28505 case DW_MACRO_import:
28506 {
28507 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28508 tree comdat_key = get_identifier (ref->info);
28509 /* Terminate the previous .debug_macinfo section. */
28510 dw2_asm_output_data (1, 0, "End compilation unit");
28511 targetm.asm_out.named_section (debug_macinfo_section_name,
28512 SECTION_DEBUG
28513 | SECTION_LINKONCE
28514 | (early_lto_debug
28515 ? SECTION_EXCLUDE : 0),
28516 comdat_key);
28517 ASM_GENERATE_INTERNAL_LABEL (label,
28518 DEBUG_MACRO_SECTION_LABEL,
28519 ref->lineno + macinfo_label_base);
28520 ASM_OUTPUT_LABEL (asm_out_file, label);
28521 ref->code = 0;
28522 ref->info = NULL;
28523 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28524 "DWARF macro version number");
28525 if (DWARF_OFFSET_SIZE == 8)
28526 dw2_asm_output_data (1, 1, "Flags: 64-bit");
28527 else
28528 dw2_asm_output_data (1, 0, "Flags: 32-bit");
28529 }
28530 break;
28531 case DW_MACINFO_define:
28532 case DW_MACINFO_undef:
28533 output_macinfo_op (ref);
28534 ref->code = 0;
28535 ref->info = NULL;
28536 break;
28537 default:
28538 gcc_unreachable ();
28539 }
28540
28541 macinfo_label_base += macinfo_label_base_adj;
28542 }
28543
28544 /* Initialize the various sections and labels for dwarf output and prefix
28545 them with PREFIX if non-NULL. Returns the generation (zero based
28546 number of times function was called). */
28547
28548 static unsigned
28549 init_sections_and_labels (bool early_lto_debug)
28550 {
28551 /* As we may get called multiple times have a generation count for
28552 labels. */
28553 static unsigned generation = 0;
28554
28555 if (early_lto_debug)
28556 {
28557 if (!dwarf_split_debug_info)
28558 {
28559 debug_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28560 SECTION_DEBUG | SECTION_EXCLUDE,
28561 NULL);
28562 debug_abbrev_section = get_section (DEBUG_LTO_ABBREV_SECTION,
28563 SECTION_DEBUG | SECTION_EXCLUDE,
28564 NULL);
28565 debug_macinfo_section_name
28566 = ((dwarf_strict && dwarf_version < 5)
28567 ? DEBUG_LTO_MACINFO_SECTION : DEBUG_LTO_MACRO_SECTION);
28568 debug_macinfo_section = get_section (debug_macinfo_section_name,
28569 SECTION_DEBUG
28570 | SECTION_EXCLUDE, NULL);
28571 }
28572 else
28573 {
28574 /* ??? Which of the following do we need early? */
28575 debug_info_section = get_section (DEBUG_LTO_DWO_INFO_SECTION,
28576 SECTION_DEBUG | SECTION_EXCLUDE,
28577 NULL);
28578 debug_abbrev_section = get_section (DEBUG_LTO_DWO_ABBREV_SECTION,
28579 SECTION_DEBUG | SECTION_EXCLUDE,
28580 NULL);
28581 debug_skeleton_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28582 SECTION_DEBUG
28583 | SECTION_EXCLUDE, NULL);
28584 debug_skeleton_abbrev_section
28585 = get_section (DEBUG_LTO_ABBREV_SECTION,
28586 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28587 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28588 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28589 generation);
28590
28591 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28592 stay in the main .o, but the skeleton_line goes into the split
28593 off dwo. */
28594 debug_skeleton_line_section
28595 = get_section (DEBUG_LTO_LINE_SECTION,
28596 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28597 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28598 DEBUG_SKELETON_LINE_SECTION_LABEL,
28599 generation);
28600 debug_str_offsets_section
28601 = get_section (DEBUG_LTO_DWO_STR_OFFSETS_SECTION,
28602 SECTION_DEBUG | SECTION_EXCLUDE,
28603 NULL);
28604 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28605 DEBUG_SKELETON_INFO_SECTION_LABEL,
28606 generation);
28607 debug_str_dwo_section = get_section (DEBUG_LTO_STR_DWO_SECTION,
28608 DEBUG_STR_DWO_SECTION_FLAGS,
28609 NULL);
28610 debug_macinfo_section_name
28611 = ((dwarf_strict && dwarf_version < 5)
28612 ? DEBUG_LTO_DWO_MACINFO_SECTION : DEBUG_LTO_DWO_MACRO_SECTION);
28613 debug_macinfo_section = get_section (debug_macinfo_section_name,
28614 SECTION_DEBUG | SECTION_EXCLUDE,
28615 NULL);
28616 }
28617 /* For macro info and the file table we have to refer to a
28618 debug_line section. */
28619 debug_line_section = get_section (DEBUG_LTO_LINE_SECTION,
28620 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28621 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28622 DEBUG_LINE_SECTION_LABEL, generation);
28623
28624 debug_str_section = get_section (DEBUG_LTO_STR_SECTION,
28625 DEBUG_STR_SECTION_FLAGS
28626 | SECTION_EXCLUDE, NULL);
28627 if (!dwarf_split_debug_info)
28628 debug_line_str_section
28629 = get_section (DEBUG_LTO_LINE_STR_SECTION,
28630 DEBUG_STR_SECTION_FLAGS | SECTION_EXCLUDE, NULL);
28631 }
28632 else
28633 {
28634 if (!dwarf_split_debug_info)
28635 {
28636 debug_info_section = get_section (DEBUG_INFO_SECTION,
28637 SECTION_DEBUG, NULL);
28638 debug_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28639 SECTION_DEBUG, NULL);
28640 debug_loc_section = get_section (dwarf_version >= 5
28641 ? DEBUG_LOCLISTS_SECTION
28642 : DEBUG_LOC_SECTION,
28643 SECTION_DEBUG, NULL);
28644 debug_macinfo_section_name
28645 = ((dwarf_strict && dwarf_version < 5)
28646 ? DEBUG_MACINFO_SECTION : DEBUG_MACRO_SECTION);
28647 debug_macinfo_section = get_section (debug_macinfo_section_name,
28648 SECTION_DEBUG, NULL);
28649 }
28650 else
28651 {
28652 debug_info_section = get_section (DEBUG_DWO_INFO_SECTION,
28653 SECTION_DEBUG | SECTION_EXCLUDE,
28654 NULL);
28655 debug_abbrev_section = get_section (DEBUG_DWO_ABBREV_SECTION,
28656 SECTION_DEBUG | SECTION_EXCLUDE,
28657 NULL);
28658 debug_addr_section = get_section (DEBUG_ADDR_SECTION,
28659 SECTION_DEBUG, NULL);
28660 debug_skeleton_info_section = get_section (DEBUG_INFO_SECTION,
28661 SECTION_DEBUG, NULL);
28662 debug_skeleton_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28663 SECTION_DEBUG, NULL);
28664 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28665 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28666 generation);
28667
28668 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28669 stay in the main .o, but the skeleton_line goes into the
28670 split off dwo. */
28671 debug_skeleton_line_section
28672 = get_section (DEBUG_DWO_LINE_SECTION,
28673 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28674 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28675 DEBUG_SKELETON_LINE_SECTION_LABEL,
28676 generation);
28677 debug_str_offsets_section
28678 = get_section (DEBUG_DWO_STR_OFFSETS_SECTION,
28679 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28680 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28681 DEBUG_SKELETON_INFO_SECTION_LABEL,
28682 generation);
28683 debug_loc_section = get_section (dwarf_version >= 5
28684 ? DEBUG_DWO_LOCLISTS_SECTION
28685 : DEBUG_DWO_LOC_SECTION,
28686 SECTION_DEBUG | SECTION_EXCLUDE,
28687 NULL);
28688 debug_str_dwo_section = get_section (DEBUG_STR_DWO_SECTION,
28689 DEBUG_STR_DWO_SECTION_FLAGS,
28690 NULL);
28691 debug_macinfo_section_name
28692 = ((dwarf_strict && dwarf_version < 5)
28693 ? DEBUG_DWO_MACINFO_SECTION : DEBUG_DWO_MACRO_SECTION);
28694 debug_macinfo_section = get_section (debug_macinfo_section_name,
28695 SECTION_DEBUG | SECTION_EXCLUDE,
28696 NULL);
28697 }
28698 debug_aranges_section = get_section (DEBUG_ARANGES_SECTION,
28699 SECTION_DEBUG, NULL);
28700 debug_line_section = get_section (DEBUG_LINE_SECTION,
28701 SECTION_DEBUG, NULL);
28702 debug_pubnames_section = get_section (DEBUG_PUBNAMES_SECTION,
28703 SECTION_DEBUG, NULL);
28704 debug_pubtypes_section = get_section (DEBUG_PUBTYPES_SECTION,
28705 SECTION_DEBUG, NULL);
28706 debug_str_section = get_section (DEBUG_STR_SECTION,
28707 DEBUG_STR_SECTION_FLAGS, NULL);
28708 if (!dwarf_split_debug_info && !output_asm_line_debug_info ())
28709 debug_line_str_section = get_section (DEBUG_LINE_STR_SECTION,
28710 DEBUG_STR_SECTION_FLAGS, NULL);
28711
28712 debug_ranges_section = get_section (dwarf_version >= 5
28713 ? DEBUG_RNGLISTS_SECTION
28714 : DEBUG_RANGES_SECTION,
28715 SECTION_DEBUG, NULL);
28716 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
28717 SECTION_DEBUG, NULL);
28718 }
28719
28720 ASM_GENERATE_INTERNAL_LABEL (abbrev_section_label,
28721 DEBUG_ABBREV_SECTION_LABEL, generation);
28722 ASM_GENERATE_INTERNAL_LABEL (debug_info_section_label,
28723 DEBUG_INFO_SECTION_LABEL, generation);
28724 info_section_emitted = false;
28725 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28726 DEBUG_LINE_SECTION_LABEL, generation);
28727 /* There are up to 4 unique ranges labels per generation.
28728 See also output_rnglists. */
28729 ASM_GENERATE_INTERNAL_LABEL (ranges_section_label,
28730 DEBUG_RANGES_SECTION_LABEL, generation * 4);
28731 if (dwarf_version >= 5 && dwarf_split_debug_info)
28732 ASM_GENERATE_INTERNAL_LABEL (ranges_base_label,
28733 DEBUG_RANGES_SECTION_LABEL,
28734 1 + generation * 4);
28735 ASM_GENERATE_INTERNAL_LABEL (debug_addr_section_label,
28736 DEBUG_ADDR_SECTION_LABEL, generation);
28737 ASM_GENERATE_INTERNAL_LABEL (macinfo_section_label,
28738 (dwarf_strict && dwarf_version < 5)
28739 ? DEBUG_MACINFO_SECTION_LABEL
28740 : DEBUG_MACRO_SECTION_LABEL, generation);
28741 ASM_GENERATE_INTERNAL_LABEL (loc_section_label, DEBUG_LOC_SECTION_LABEL,
28742 generation);
28743
28744 ++generation;
28745 return generation - 1;
28746 }
28747
28748 /* Set up for Dwarf output at the start of compilation. */
28749
28750 static void
28751 dwarf2out_init (const char *filename ATTRIBUTE_UNUSED)
28752 {
28753 /* Allocate the file_table. */
28754 file_table = hash_table<dwarf_file_hasher>::create_ggc (50);
28755
28756 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28757 /* Allocate the decl_die_table. */
28758 decl_die_table = hash_table<decl_die_hasher>::create_ggc (10);
28759
28760 /* Allocate the decl_loc_table. */
28761 decl_loc_table = hash_table<decl_loc_hasher>::create_ggc (10);
28762
28763 /* Allocate the cached_dw_loc_list_table. */
28764 cached_dw_loc_list_table = hash_table<dw_loc_list_hasher>::create_ggc (10);
28765
28766 /* Allocate the initial hunk of the abbrev_die_table. */
28767 vec_alloc (abbrev_die_table, 256);
28768 /* Zero-th entry is allocated, but unused. */
28769 abbrev_die_table->quick_push (NULL);
28770
28771 /* Allocate the dwarf_proc_stack_usage_map. */
28772 dwarf_proc_stack_usage_map = new hash_map<dw_die_ref, int>;
28773
28774 /* Allocate the pubtypes and pubnames vectors. */
28775 vec_alloc (pubname_table, 32);
28776 vec_alloc (pubtype_table, 32);
28777
28778 vec_alloc (incomplete_types, 64);
28779
28780 vec_alloc (used_rtx_array, 32);
28781
28782 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28783 vec_alloc (macinfo_table, 64);
28784 #endif
28785
28786 /* If front-ends already registered a main translation unit but we were not
28787 ready to perform the association, do this now. */
28788 if (main_translation_unit != NULL_TREE)
28789 equate_decl_number_to_die (main_translation_unit, comp_unit_die ());
28790 }
28791
28792 /* Called before compile () starts outputtting functions, variables
28793 and toplevel asms into assembly. */
28794
28795 static void
28796 dwarf2out_assembly_start (void)
28797 {
28798 if (text_section_line_info)
28799 return;
28800
28801 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28802 ASM_GENERATE_INTERNAL_LABEL (text_section_label, TEXT_SECTION_LABEL, 0);
28803 ASM_GENERATE_INTERNAL_LABEL (text_end_label, TEXT_END_LABEL, 0);
28804 ASM_GENERATE_INTERNAL_LABEL (cold_text_section_label,
28805 COLD_TEXT_SECTION_LABEL, 0);
28806 ASM_GENERATE_INTERNAL_LABEL (cold_end_label, COLD_END_LABEL, 0);
28807
28808 switch_to_section (text_section);
28809 ASM_OUTPUT_LABEL (asm_out_file, text_section_label);
28810 #endif
28811
28812 /* Make sure the line number table for .text always exists. */
28813 text_section_line_info = new_line_info_table ();
28814 text_section_line_info->end_label = text_end_label;
28815
28816 #ifdef DWARF2_LINENO_DEBUGGING_INFO
28817 cur_line_info_table = text_section_line_info;
28818 #endif
28819
28820 if (HAVE_GAS_CFI_SECTIONS_DIRECTIVE
28821 && dwarf2out_do_cfi_asm ()
28822 && !dwarf2out_do_eh_frame ())
28823 fprintf (asm_out_file, "\t.cfi_sections\t.debug_frame\n");
28824 }
28825
28826 /* A helper function for dwarf2out_finish called through
28827 htab_traverse. Assign a string its index. All strings must be
28828 collected into the table by the time index_string is called,
28829 because the indexing code relies on htab_traverse to traverse nodes
28830 in the same order for each run. */
28831
28832 int
28833 index_string (indirect_string_node **h, unsigned int *index)
28834 {
28835 indirect_string_node *node = *h;
28836
28837 find_string_form (node);
28838 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28839 {
28840 gcc_assert (node->index == NO_INDEX_ASSIGNED);
28841 node->index = *index;
28842 *index += 1;
28843 }
28844 return 1;
28845 }
28846
28847 /* A helper function for output_indirect_strings called through
28848 htab_traverse. Output the offset to a string and update the
28849 current offset. */
28850
28851 int
28852 output_index_string_offset (indirect_string_node **h, unsigned int *offset)
28853 {
28854 indirect_string_node *node = *h;
28855
28856 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28857 {
28858 /* Assert that this node has been assigned an index. */
28859 gcc_assert (node->index != NO_INDEX_ASSIGNED
28860 && node->index != NOT_INDEXED);
28861 dw2_asm_output_data (DWARF_OFFSET_SIZE, *offset,
28862 "indexed string 0x%x: %s", node->index, node->str);
28863 *offset += strlen (node->str) + 1;
28864 }
28865 return 1;
28866 }
28867
28868 /* A helper function for dwarf2out_finish called through
28869 htab_traverse. Output the indexed string. */
28870
28871 int
28872 output_index_string (indirect_string_node **h, unsigned int *cur_idx)
28873 {
28874 struct indirect_string_node *node = *h;
28875
28876 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28877 {
28878 /* Assert that the strings are output in the same order as their
28879 indexes were assigned. */
28880 gcc_assert (*cur_idx == node->index);
28881 assemble_string (node->str, strlen (node->str) + 1);
28882 *cur_idx += 1;
28883 }
28884 return 1;
28885 }
28886
28887 /* A helper function for output_indirect_strings. Counts the number
28888 of index strings offsets. Must match the logic of the functions
28889 output_index_string[_offsets] above. */
28890 int
28891 count_index_strings (indirect_string_node **h, unsigned int *last_idx)
28892 {
28893 struct indirect_string_node *node = *h;
28894
28895 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28896 *last_idx += 1;
28897 return 1;
28898 }
28899
28900 /* A helper function for dwarf2out_finish called through
28901 htab_traverse. Emit one queued .debug_str string. */
28902
28903 int
28904 output_indirect_string (indirect_string_node **h, enum dwarf_form form)
28905 {
28906 struct indirect_string_node *node = *h;
28907
28908 node->form = find_string_form (node);
28909 if (node->form == form && node->refcount > 0)
28910 {
28911 ASM_OUTPUT_LABEL (asm_out_file, node->label);
28912 assemble_string (node->str, strlen (node->str) + 1);
28913 }
28914
28915 return 1;
28916 }
28917
28918 /* Output the indexed string table. */
28919
28920 static void
28921 output_indirect_strings (void)
28922 {
28923 switch_to_section (debug_str_section);
28924 if (!dwarf_split_debug_info)
28925 debug_str_hash->traverse<enum dwarf_form,
28926 output_indirect_string> (DW_FORM_strp);
28927 else
28928 {
28929 unsigned int offset = 0;
28930 unsigned int cur_idx = 0;
28931
28932 if (skeleton_debug_str_hash)
28933 skeleton_debug_str_hash->traverse<enum dwarf_form,
28934 output_indirect_string> (DW_FORM_strp);
28935
28936 switch_to_section (debug_str_offsets_section);
28937 /* For DWARF5 the .debug_str_offsets[.dwo] section needs a unit
28938 header. Note that we don't need to generate a label to the
28939 actual index table following the header here, because this is
28940 for the split dwarf case only. In an .dwo file there is only
28941 one string offsets table (and one debug info section). But
28942 if we would start using string offset tables for the main (or
28943 skeleton) unit, then we have to add a DW_AT_str_offsets_base
28944 pointing to the actual index after the header. Split dwarf
28945 units will never have a string offsets base attribute. When
28946 a split unit is moved into a .dwp file the string offsets can
28947 be found through the .debug_cu_index section table. */
28948 if (dwarf_version >= 5)
28949 {
28950 unsigned int last_idx = 0;
28951 unsigned long str_offsets_length;
28952
28953 debug_str_hash->traverse_noresize
28954 <unsigned int *, count_index_strings> (&last_idx);
28955 str_offsets_length = last_idx * DWARF_OFFSET_SIZE + 4;
28956 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
28957 dw2_asm_output_data (4, 0xffffffff,
28958 "Escape value for 64-bit DWARF extension");
28959 dw2_asm_output_data (DWARF_OFFSET_SIZE, str_offsets_length,
28960 "Length of string offsets unit");
28961 dw2_asm_output_data (2, 5, "DWARF string offsets version");
28962 dw2_asm_output_data (2, 0, "Header zero padding");
28963 }
28964 debug_str_hash->traverse_noresize
28965 <unsigned int *, output_index_string_offset> (&offset);
28966 switch_to_section (debug_str_dwo_section);
28967 debug_str_hash->traverse_noresize<unsigned int *, output_index_string>
28968 (&cur_idx);
28969 }
28970 }
28971
28972 /* Callback for htab_traverse to assign an index to an entry in the
28973 table, and to write that entry to the .debug_addr section. */
28974
28975 int
28976 output_addr_table_entry (addr_table_entry **slot, unsigned int *cur_index)
28977 {
28978 addr_table_entry *entry = *slot;
28979
28980 if (entry->refcount == 0)
28981 {
28982 gcc_assert (entry->index == NO_INDEX_ASSIGNED
28983 || entry->index == NOT_INDEXED);
28984 return 1;
28985 }
28986
28987 gcc_assert (entry->index == *cur_index);
28988 (*cur_index)++;
28989
28990 switch (entry->kind)
28991 {
28992 case ate_kind_rtx:
28993 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, entry->addr.rtl,
28994 "0x%x", entry->index);
28995 break;
28996 case ate_kind_rtx_dtprel:
28997 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
28998 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
28999 DWARF2_ADDR_SIZE,
29000 entry->addr.rtl);
29001 fputc ('\n', asm_out_file);
29002 break;
29003 case ate_kind_label:
29004 dw2_asm_output_addr (DWARF2_ADDR_SIZE, entry->addr.label,
29005 "0x%x", entry->index);
29006 break;
29007 default:
29008 gcc_unreachable ();
29009 }
29010 return 1;
29011 }
29012
29013 /* A helper function for dwarf2out_finish. Counts the number
29014 of indexed addresses. Must match the logic of the functions
29015 output_addr_table_entry above. */
29016 int
29017 count_index_addrs (addr_table_entry **slot, unsigned int *last_idx)
29018 {
29019 addr_table_entry *entry = *slot;
29020
29021 if (entry->refcount > 0)
29022 *last_idx += 1;
29023 return 1;
29024 }
29025
29026 /* Produce the .debug_addr section. */
29027
29028 static void
29029 output_addr_table (void)
29030 {
29031 unsigned int index = 0;
29032 if (addr_index_table == NULL || addr_index_table->size () == 0)
29033 return;
29034
29035 switch_to_section (debug_addr_section);
29036 /* GNU DebugFission https://gcc.gnu.org/wiki/DebugFission
29037 which GCC uses to implement -gsplit-dwarf as DWARF GNU extension
29038 before DWARF5, didn't have a header for .debug_addr units.
29039 DWARF5 specifies a small header when address tables are used. */
29040 if (dwarf_version >= 5)
29041 {
29042 unsigned int last_idx = 0;
29043 unsigned long addrs_length;
29044
29045 addr_index_table->traverse_noresize
29046 <unsigned int *, count_index_addrs> (&last_idx);
29047 addrs_length = last_idx * DWARF2_ADDR_SIZE + 4;
29048
29049 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
29050 dw2_asm_output_data (4, 0xffffffff,
29051 "Escape value for 64-bit DWARF extension");
29052 dw2_asm_output_data (DWARF_OFFSET_SIZE, addrs_length,
29053 "Length of Address Unit");
29054 dw2_asm_output_data (2, 5, "DWARF addr version");
29055 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
29056 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
29057 }
29058 ASM_OUTPUT_LABEL (asm_out_file, debug_addr_section_label);
29059
29060 addr_index_table
29061 ->traverse_noresize<unsigned int *, output_addr_table_entry> (&index);
29062 }
29063
29064 #if ENABLE_ASSERT_CHECKING
29065 /* Verify that all marks are clear. */
29066
29067 static void
29068 verify_marks_clear (dw_die_ref die)
29069 {
29070 dw_die_ref c;
29071
29072 gcc_assert (! die->die_mark);
29073 FOR_EACH_CHILD (die, c, verify_marks_clear (c));
29074 }
29075 #endif /* ENABLE_ASSERT_CHECKING */
29076
29077 /* Clear the marks for a die and its children.
29078 Be cool if the mark isn't set. */
29079
29080 static void
29081 prune_unmark_dies (dw_die_ref die)
29082 {
29083 dw_die_ref c;
29084
29085 if (die->die_mark)
29086 die->die_mark = 0;
29087 FOR_EACH_CHILD (die, c, prune_unmark_dies (c));
29088 }
29089
29090 /* Given LOC that is referenced by a DIE we're marking as used, find all
29091 referenced DWARF procedures it references and mark them as used. */
29092
29093 static void
29094 prune_unused_types_walk_loc_descr (dw_loc_descr_ref loc)
29095 {
29096 for (; loc != NULL; loc = loc->dw_loc_next)
29097 switch (loc->dw_loc_opc)
29098 {
29099 case DW_OP_implicit_pointer:
29100 case DW_OP_convert:
29101 case DW_OP_reinterpret:
29102 case DW_OP_GNU_implicit_pointer:
29103 case DW_OP_GNU_convert:
29104 case DW_OP_GNU_reinterpret:
29105 if (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref)
29106 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
29107 break;
29108 case DW_OP_GNU_variable_value:
29109 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
29110 {
29111 dw_die_ref ref
29112 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
29113 if (ref == NULL)
29114 break;
29115 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29116 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29117 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29118 }
29119 /* FALLTHRU */
29120 case DW_OP_call2:
29121 case DW_OP_call4:
29122 case DW_OP_call_ref:
29123 case DW_OP_const_type:
29124 case DW_OP_GNU_const_type:
29125 case DW_OP_GNU_parameter_ref:
29126 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref);
29127 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
29128 break;
29129 case DW_OP_regval_type:
29130 case DW_OP_deref_type:
29131 case DW_OP_GNU_regval_type:
29132 case DW_OP_GNU_deref_type:
29133 gcc_assert (loc->dw_loc_oprnd2.val_class == dw_val_class_die_ref);
29134 prune_unused_types_mark (loc->dw_loc_oprnd2.v.val_die_ref.die, 1);
29135 break;
29136 case DW_OP_entry_value:
29137 case DW_OP_GNU_entry_value:
29138 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_loc);
29139 prune_unused_types_walk_loc_descr (loc->dw_loc_oprnd1.v.val_loc);
29140 break;
29141 default:
29142 break;
29143 }
29144 }
29145
29146 /* Given DIE that we're marking as used, find any other dies
29147 it references as attributes and mark them as used. */
29148
29149 static void
29150 prune_unused_types_walk_attribs (dw_die_ref die)
29151 {
29152 dw_attr_node *a;
29153 unsigned ix;
29154
29155 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29156 {
29157 switch (AT_class (a))
29158 {
29159 /* Make sure DWARF procedures referenced by location descriptions will
29160 get emitted. */
29161 case dw_val_class_loc:
29162 prune_unused_types_walk_loc_descr (AT_loc (a));
29163 break;
29164 case dw_val_class_loc_list:
29165 for (dw_loc_list_ref list = AT_loc_list (a);
29166 list != NULL;
29167 list = list->dw_loc_next)
29168 prune_unused_types_walk_loc_descr (list->expr);
29169 break;
29170
29171 case dw_val_class_view_list:
29172 /* This points to a loc_list in another attribute, so it's
29173 already covered. */
29174 break;
29175
29176 case dw_val_class_die_ref:
29177 /* A reference to another DIE.
29178 Make sure that it will get emitted.
29179 If it was broken out into a comdat group, don't follow it. */
29180 if (! AT_ref (a)->comdat_type_p
29181 || a->dw_attr == DW_AT_specification)
29182 prune_unused_types_mark (a->dw_attr_val.v.val_die_ref.die, 1);
29183 break;
29184
29185 case dw_val_class_str:
29186 /* Set the string's refcount to 0 so that prune_unused_types_mark
29187 accounts properly for it. */
29188 a->dw_attr_val.v.val_str->refcount = 0;
29189 break;
29190
29191 default:
29192 break;
29193 }
29194 }
29195 }
29196
29197 /* Mark the generic parameters and arguments children DIEs of DIE. */
29198
29199 static void
29200 prune_unused_types_mark_generic_parms_dies (dw_die_ref die)
29201 {
29202 dw_die_ref c;
29203
29204 if (die == NULL || die->die_child == NULL)
29205 return;
29206 c = die->die_child;
29207 do
29208 {
29209 if (is_template_parameter (c))
29210 prune_unused_types_mark (c, 1);
29211 c = c->die_sib;
29212 } while (c && c != die->die_child);
29213 }
29214
29215 /* Mark DIE as being used. If DOKIDS is true, then walk down
29216 to DIE's children. */
29217
29218 static void
29219 prune_unused_types_mark (dw_die_ref die, int dokids)
29220 {
29221 dw_die_ref c;
29222
29223 if (die->die_mark == 0)
29224 {
29225 /* We haven't done this node yet. Mark it as used. */
29226 die->die_mark = 1;
29227 /* If this is the DIE of a generic type instantiation,
29228 mark the children DIEs that describe its generic parms and
29229 args. */
29230 prune_unused_types_mark_generic_parms_dies (die);
29231
29232 /* We also have to mark its parents as used.
29233 (But we don't want to mark our parent's kids due to this,
29234 unless it is a class.) */
29235 if (die->die_parent)
29236 prune_unused_types_mark (die->die_parent,
29237 class_scope_p (die->die_parent));
29238
29239 /* Mark any referenced nodes. */
29240 prune_unused_types_walk_attribs (die);
29241
29242 /* If this node is a specification,
29243 also mark the definition, if it exists. */
29244 if (get_AT_flag (die, DW_AT_declaration) && die->die_definition)
29245 prune_unused_types_mark (die->die_definition, 1);
29246 }
29247
29248 if (dokids && die->die_mark != 2)
29249 {
29250 /* We need to walk the children, but haven't done so yet.
29251 Remember that we've walked the kids. */
29252 die->die_mark = 2;
29253
29254 /* If this is an array type, we need to make sure our
29255 kids get marked, even if they're types. If we're
29256 breaking out types into comdat sections, do this
29257 for all type definitions. */
29258 if (die->die_tag == DW_TAG_array_type
29259 || (use_debug_types
29260 && is_type_die (die) && ! is_declaration_die (die)))
29261 FOR_EACH_CHILD (die, c, prune_unused_types_mark (c, 1));
29262 else
29263 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29264 }
29265 }
29266
29267 /* For local classes, look if any static member functions were emitted
29268 and if so, mark them. */
29269
29270 static void
29271 prune_unused_types_walk_local_classes (dw_die_ref die)
29272 {
29273 dw_die_ref c;
29274
29275 if (die->die_mark == 2)
29276 return;
29277
29278 switch (die->die_tag)
29279 {
29280 case DW_TAG_structure_type:
29281 case DW_TAG_union_type:
29282 case DW_TAG_class_type:
29283 case DW_TAG_interface_type:
29284 break;
29285
29286 case DW_TAG_subprogram:
29287 if (!get_AT_flag (die, DW_AT_declaration)
29288 || die->die_definition != NULL)
29289 prune_unused_types_mark (die, 1);
29290 return;
29291
29292 default:
29293 return;
29294 }
29295
29296 /* Mark children. */
29297 FOR_EACH_CHILD (die, c, prune_unused_types_walk_local_classes (c));
29298 }
29299
29300 /* Walk the tree DIE and mark types that we actually use. */
29301
29302 static void
29303 prune_unused_types_walk (dw_die_ref die)
29304 {
29305 dw_die_ref c;
29306
29307 /* Don't do anything if this node is already marked and
29308 children have been marked as well. */
29309 if (die->die_mark == 2)
29310 return;
29311
29312 switch (die->die_tag)
29313 {
29314 case DW_TAG_structure_type:
29315 case DW_TAG_union_type:
29316 case DW_TAG_class_type:
29317 case DW_TAG_interface_type:
29318 if (die->die_perennial_p)
29319 break;
29320
29321 for (c = die->die_parent; c; c = c->die_parent)
29322 if (c->die_tag == DW_TAG_subprogram)
29323 break;
29324
29325 /* Finding used static member functions inside of classes
29326 is needed just for local classes, because for other classes
29327 static member function DIEs with DW_AT_specification
29328 are emitted outside of the DW_TAG_*_type. If we ever change
29329 it, we'd need to call this even for non-local classes. */
29330 if (c)
29331 prune_unused_types_walk_local_classes (die);
29332
29333 /* It's a type node --- don't mark it. */
29334 return;
29335
29336 case DW_TAG_const_type:
29337 case DW_TAG_packed_type:
29338 case DW_TAG_pointer_type:
29339 case DW_TAG_reference_type:
29340 case DW_TAG_rvalue_reference_type:
29341 case DW_TAG_volatile_type:
29342 case DW_TAG_typedef:
29343 case DW_TAG_array_type:
29344 case DW_TAG_friend:
29345 case DW_TAG_enumeration_type:
29346 case DW_TAG_subroutine_type:
29347 case DW_TAG_string_type:
29348 case DW_TAG_set_type:
29349 case DW_TAG_subrange_type:
29350 case DW_TAG_ptr_to_member_type:
29351 case DW_TAG_file_type:
29352 /* Type nodes are useful only when other DIEs reference them --- don't
29353 mark them. */
29354 /* FALLTHROUGH */
29355
29356 case DW_TAG_dwarf_procedure:
29357 /* Likewise for DWARF procedures. */
29358
29359 if (die->die_perennial_p)
29360 break;
29361
29362 return;
29363
29364 case DW_TAG_variable:
29365 if (flag_debug_only_used_symbols)
29366 {
29367 if (die->die_perennial_p)
29368 break;
29369
29370 /* premark_used_variables marks external variables --- don't mark
29371 them here. But function-local externals are always considered
29372 used. */
29373 if (get_AT (die, DW_AT_external))
29374 {
29375 for (c = die->die_parent; c; c = c->die_parent)
29376 if (c->die_tag == DW_TAG_subprogram)
29377 break;
29378 if (!c)
29379 return;
29380 }
29381 }
29382 /* FALLTHROUGH */
29383
29384 default:
29385 /* Mark everything else. */
29386 break;
29387 }
29388
29389 if (die->die_mark == 0)
29390 {
29391 die->die_mark = 1;
29392
29393 /* Now, mark any dies referenced from here. */
29394 prune_unused_types_walk_attribs (die);
29395 }
29396
29397 die->die_mark = 2;
29398
29399 /* Mark children. */
29400 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29401 }
29402
29403 /* Increment the string counts on strings referred to from DIE's
29404 attributes. */
29405
29406 static void
29407 prune_unused_types_update_strings (dw_die_ref die)
29408 {
29409 dw_attr_node *a;
29410 unsigned ix;
29411
29412 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29413 if (AT_class (a) == dw_val_class_str)
29414 {
29415 struct indirect_string_node *s = a->dw_attr_val.v.val_str;
29416 s->refcount++;
29417 /* Avoid unnecessarily putting strings that are used less than
29418 twice in the hash table. */
29419 if (s->refcount
29420 == ((DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) ? 1 : 2))
29421 {
29422 indirect_string_node **slot
29423 = debug_str_hash->find_slot_with_hash (s->str,
29424 htab_hash_string (s->str),
29425 INSERT);
29426 gcc_assert (*slot == NULL);
29427 *slot = s;
29428 }
29429 }
29430 }
29431
29432 /* Mark DIE and its children as removed. */
29433
29434 static void
29435 mark_removed (dw_die_ref die)
29436 {
29437 dw_die_ref c;
29438 die->removed = true;
29439 FOR_EACH_CHILD (die, c, mark_removed (c));
29440 }
29441
29442 /* Remove from the tree DIE any dies that aren't marked. */
29443
29444 static void
29445 prune_unused_types_prune (dw_die_ref die)
29446 {
29447 dw_die_ref c;
29448
29449 gcc_assert (die->die_mark);
29450 prune_unused_types_update_strings (die);
29451
29452 if (! die->die_child)
29453 return;
29454
29455 c = die->die_child;
29456 do {
29457 dw_die_ref prev = c, next;
29458 for (c = c->die_sib; ! c->die_mark; c = next)
29459 if (c == die->die_child)
29460 {
29461 /* No marked children between 'prev' and the end of the list. */
29462 if (prev == c)
29463 /* No marked children at all. */
29464 die->die_child = NULL;
29465 else
29466 {
29467 prev->die_sib = c->die_sib;
29468 die->die_child = prev;
29469 }
29470 c->die_sib = NULL;
29471 mark_removed (c);
29472 return;
29473 }
29474 else
29475 {
29476 next = c->die_sib;
29477 c->die_sib = NULL;
29478 mark_removed (c);
29479 }
29480
29481 if (c != prev->die_sib)
29482 prev->die_sib = c;
29483 prune_unused_types_prune (c);
29484 } while (c != die->die_child);
29485 }
29486
29487 /* Remove dies representing declarations that we never use. */
29488
29489 static void
29490 prune_unused_types (void)
29491 {
29492 unsigned int i;
29493 limbo_die_node *node;
29494 comdat_type_node *ctnode;
29495 pubname_entry *pub;
29496 dw_die_ref base_type;
29497
29498 #if ENABLE_ASSERT_CHECKING
29499 /* All the marks should already be clear. */
29500 verify_marks_clear (comp_unit_die ());
29501 for (node = limbo_die_list; node; node = node->next)
29502 verify_marks_clear (node->die);
29503 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29504 verify_marks_clear (ctnode->root_die);
29505 #endif /* ENABLE_ASSERT_CHECKING */
29506
29507 /* Mark types that are used in global variables. */
29508 premark_types_used_by_global_vars ();
29509
29510 /* Mark variables used in the symtab. */
29511 if (flag_debug_only_used_symbols)
29512 premark_used_variables ();
29513
29514 /* Set the mark on nodes that are actually used. */
29515 prune_unused_types_walk (comp_unit_die ());
29516 for (node = limbo_die_list; node; node = node->next)
29517 prune_unused_types_walk (node->die);
29518 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29519 {
29520 prune_unused_types_walk (ctnode->root_die);
29521 prune_unused_types_mark (ctnode->type_die, 1);
29522 }
29523
29524 /* Also set the mark on nodes referenced from the pubname_table. Enumerators
29525 are unusual in that they are pubnames that are the children of pubtypes.
29526 They should only be marked via their parent DW_TAG_enumeration_type die,
29527 not as roots in themselves. */
29528 FOR_EACH_VEC_ELT (*pubname_table, i, pub)
29529 if (pub->die->die_tag != DW_TAG_enumerator)
29530 prune_unused_types_mark (pub->die, 1);
29531 for (i = 0; base_types.iterate (i, &base_type); i++)
29532 prune_unused_types_mark (base_type, 1);
29533
29534 /* Also set the mark on nodes that could be referenced by
29535 DW_TAG_call_site DW_AT_call_origin (i.e. direct call callees) or
29536 by DW_TAG_inlined_subroutine origins. */
29537 cgraph_node *cnode;
29538 FOR_EACH_FUNCTION (cnode)
29539 if (cnode->referred_to_p (false))
29540 {
29541 dw_die_ref die = lookup_decl_die (cnode->decl);
29542 if (die == NULL || die->die_mark)
29543 continue;
29544 for (cgraph_edge *e = cnode->callers; e; e = e->next_caller)
29545 if (e->caller != cnode)
29546 {
29547 prune_unused_types_mark (die, 1);
29548 break;
29549 }
29550 }
29551
29552 if (debug_str_hash)
29553 debug_str_hash->empty ();
29554 if (skeleton_debug_str_hash)
29555 skeleton_debug_str_hash->empty ();
29556 prune_unused_types_prune (comp_unit_die ());
29557 for (limbo_die_node **pnode = &limbo_die_list; *pnode; )
29558 {
29559 node = *pnode;
29560 if (!node->die->die_mark)
29561 *pnode = node->next;
29562 else
29563 {
29564 prune_unused_types_prune (node->die);
29565 pnode = &node->next;
29566 }
29567 }
29568 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29569 prune_unused_types_prune (ctnode->root_die);
29570
29571 /* Leave the marks clear. */
29572 prune_unmark_dies (comp_unit_die ());
29573 for (node = limbo_die_list; node; node = node->next)
29574 prune_unmark_dies (node->die);
29575 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29576 prune_unmark_dies (ctnode->root_die);
29577 }
29578
29579 /* Helpers to manipulate hash table of comdat type units. */
29580
29581 struct comdat_type_hasher : nofree_ptr_hash <comdat_type_node>
29582 {
29583 static inline hashval_t hash (const comdat_type_node *);
29584 static inline bool equal (const comdat_type_node *, const comdat_type_node *);
29585 };
29586
29587 inline hashval_t
29588 comdat_type_hasher::hash (const comdat_type_node *type_node)
29589 {
29590 hashval_t h;
29591 memcpy (&h, type_node->signature, sizeof (h));
29592 return h;
29593 }
29594
29595 inline bool
29596 comdat_type_hasher::equal (const comdat_type_node *type_node_1,
29597 const comdat_type_node *type_node_2)
29598 {
29599 return (! memcmp (type_node_1->signature, type_node_2->signature,
29600 DWARF_TYPE_SIGNATURE_SIZE));
29601 }
29602
29603 /* Move a DW_AT_{,MIPS_}linkage_name attribute just added to dw_die_ref
29604 to the location it would have been added, should we know its
29605 DECL_ASSEMBLER_NAME when we added other attributes. This will
29606 probably improve compactness of debug info, removing equivalent
29607 abbrevs, and hide any differences caused by deferring the
29608 computation of the assembler name, triggered by e.g. PCH. */
29609
29610 static inline void
29611 move_linkage_attr (dw_die_ref die)
29612 {
29613 unsigned ix = vec_safe_length (die->die_attr);
29614 dw_attr_node linkage = (*die->die_attr)[ix - 1];
29615
29616 gcc_assert (linkage.dw_attr == DW_AT_linkage_name
29617 || linkage.dw_attr == DW_AT_MIPS_linkage_name);
29618
29619 while (--ix > 0)
29620 {
29621 dw_attr_node *prev = &(*die->die_attr)[ix - 1];
29622
29623 if (prev->dw_attr == DW_AT_decl_line
29624 || prev->dw_attr == DW_AT_decl_column
29625 || prev->dw_attr == DW_AT_name)
29626 break;
29627 }
29628
29629 if (ix != vec_safe_length (die->die_attr) - 1)
29630 {
29631 die->die_attr->pop ();
29632 die->die_attr->quick_insert (ix, linkage);
29633 }
29634 }
29635
29636 /* Helper function for resolve_addr, mark DW_TAG_base_type nodes
29637 referenced from typed stack ops and count how often they are used. */
29638
29639 static void
29640 mark_base_types (dw_loc_descr_ref loc)
29641 {
29642 dw_die_ref base_type = NULL;
29643
29644 for (; loc; loc = loc->dw_loc_next)
29645 {
29646 switch (loc->dw_loc_opc)
29647 {
29648 case DW_OP_regval_type:
29649 case DW_OP_deref_type:
29650 case DW_OP_GNU_regval_type:
29651 case DW_OP_GNU_deref_type:
29652 base_type = loc->dw_loc_oprnd2.v.val_die_ref.die;
29653 break;
29654 case DW_OP_convert:
29655 case DW_OP_reinterpret:
29656 case DW_OP_GNU_convert:
29657 case DW_OP_GNU_reinterpret:
29658 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
29659 continue;
29660 /* FALLTHRU */
29661 case DW_OP_const_type:
29662 case DW_OP_GNU_const_type:
29663 base_type = loc->dw_loc_oprnd1.v.val_die_ref.die;
29664 break;
29665 case DW_OP_entry_value:
29666 case DW_OP_GNU_entry_value:
29667 mark_base_types (loc->dw_loc_oprnd1.v.val_loc);
29668 continue;
29669 default:
29670 continue;
29671 }
29672 gcc_assert (base_type->die_parent == comp_unit_die ());
29673 if (base_type->die_mark)
29674 base_type->die_mark++;
29675 else
29676 {
29677 base_types.safe_push (base_type);
29678 base_type->die_mark = 1;
29679 }
29680 }
29681 }
29682
29683 /* Comparison function for sorting marked base types. */
29684
29685 static int
29686 base_type_cmp (const void *x, const void *y)
29687 {
29688 dw_die_ref dx = *(const dw_die_ref *) x;
29689 dw_die_ref dy = *(const dw_die_ref *) y;
29690 unsigned int byte_size1, byte_size2;
29691 unsigned int encoding1, encoding2;
29692 unsigned int align1, align2;
29693 if (dx->die_mark > dy->die_mark)
29694 return -1;
29695 if (dx->die_mark < dy->die_mark)
29696 return 1;
29697 byte_size1 = get_AT_unsigned (dx, DW_AT_byte_size);
29698 byte_size2 = get_AT_unsigned (dy, DW_AT_byte_size);
29699 if (byte_size1 < byte_size2)
29700 return 1;
29701 if (byte_size1 > byte_size2)
29702 return -1;
29703 encoding1 = get_AT_unsigned (dx, DW_AT_encoding);
29704 encoding2 = get_AT_unsigned (dy, DW_AT_encoding);
29705 if (encoding1 < encoding2)
29706 return 1;
29707 if (encoding1 > encoding2)
29708 return -1;
29709 align1 = get_AT_unsigned (dx, DW_AT_alignment);
29710 align2 = get_AT_unsigned (dy, DW_AT_alignment);
29711 if (align1 < align2)
29712 return 1;
29713 if (align1 > align2)
29714 return -1;
29715 return 0;
29716 }
29717
29718 /* Move base types marked by mark_base_types as early as possible
29719 in the CU, sorted by decreasing usage count both to make the
29720 uleb128 references as small as possible and to make sure they
29721 will have die_offset already computed by calc_die_sizes when
29722 sizes of typed stack loc ops is computed. */
29723
29724 static void
29725 move_marked_base_types (void)
29726 {
29727 unsigned int i;
29728 dw_die_ref base_type, die, c;
29729
29730 if (base_types.is_empty ())
29731 return;
29732
29733 /* Sort by decreasing usage count, they will be added again in that
29734 order later on. */
29735 base_types.qsort (base_type_cmp);
29736 die = comp_unit_die ();
29737 c = die->die_child;
29738 do
29739 {
29740 dw_die_ref prev = c;
29741 c = c->die_sib;
29742 while (c->die_mark)
29743 {
29744 remove_child_with_prev (c, prev);
29745 /* As base types got marked, there must be at least
29746 one node other than DW_TAG_base_type. */
29747 gcc_assert (die->die_child != NULL);
29748 c = prev->die_sib;
29749 }
29750 }
29751 while (c != die->die_child);
29752 gcc_assert (die->die_child);
29753 c = die->die_child;
29754 for (i = 0; base_types.iterate (i, &base_type); i++)
29755 {
29756 base_type->die_mark = 0;
29757 base_type->die_sib = c->die_sib;
29758 c->die_sib = base_type;
29759 c = base_type;
29760 }
29761 }
29762
29763 /* Helper function for resolve_addr, attempt to resolve
29764 one CONST_STRING, return true if successful. Similarly verify that
29765 SYMBOL_REFs refer to variables emitted in the current CU. */
29766
29767 static bool
29768 resolve_one_addr (rtx *addr)
29769 {
29770 rtx rtl = *addr;
29771
29772 if (GET_CODE (rtl) == CONST_STRING)
29773 {
29774 size_t len = strlen (XSTR (rtl, 0)) + 1;
29775 tree t = build_string (len, XSTR (rtl, 0));
29776 tree tlen = size_int (len - 1);
29777 TREE_TYPE (t)
29778 = build_array_type (char_type_node, build_index_type (tlen));
29779 rtl = lookup_constant_def (t);
29780 if (!rtl || !MEM_P (rtl))
29781 return false;
29782 rtl = XEXP (rtl, 0);
29783 if (GET_CODE (rtl) == SYMBOL_REF
29784 && SYMBOL_REF_DECL (rtl)
29785 && !TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29786 return false;
29787 vec_safe_push (used_rtx_array, rtl);
29788 *addr = rtl;
29789 return true;
29790 }
29791
29792 if (GET_CODE (rtl) == SYMBOL_REF
29793 && SYMBOL_REF_DECL (rtl))
29794 {
29795 if (TREE_CONSTANT_POOL_ADDRESS_P (rtl))
29796 {
29797 if (!TREE_ASM_WRITTEN (DECL_INITIAL (SYMBOL_REF_DECL (rtl))))
29798 return false;
29799 }
29800 else if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29801 return false;
29802 }
29803
29804 if (GET_CODE (rtl) == CONST)
29805 {
29806 subrtx_ptr_iterator::array_type array;
29807 FOR_EACH_SUBRTX_PTR (iter, array, &XEXP (rtl, 0), ALL)
29808 if (!resolve_one_addr (*iter))
29809 return false;
29810 }
29811
29812 return true;
29813 }
29814
29815 /* For STRING_CST, return SYMBOL_REF of its constant pool entry,
29816 if possible, and create DW_TAG_dwarf_procedure that can be referenced
29817 from DW_OP_implicit_pointer if the string hasn't been seen yet. */
29818
29819 static rtx
29820 string_cst_pool_decl (tree t)
29821 {
29822 rtx rtl = output_constant_def (t, 1);
29823 unsigned char *array;
29824 dw_loc_descr_ref l;
29825 tree decl;
29826 size_t len;
29827 dw_die_ref ref;
29828
29829 if (!rtl || !MEM_P (rtl))
29830 return NULL_RTX;
29831 rtl = XEXP (rtl, 0);
29832 if (GET_CODE (rtl) != SYMBOL_REF
29833 || SYMBOL_REF_DECL (rtl) == NULL_TREE)
29834 return NULL_RTX;
29835
29836 decl = SYMBOL_REF_DECL (rtl);
29837 if (!lookup_decl_die (decl))
29838 {
29839 len = TREE_STRING_LENGTH (t);
29840 vec_safe_push (used_rtx_array, rtl);
29841 ref = new_die (DW_TAG_dwarf_procedure, comp_unit_die (), decl);
29842 array = ggc_vec_alloc<unsigned char> (len);
29843 memcpy (array, TREE_STRING_POINTER (t), len);
29844 l = new_loc_descr (DW_OP_implicit_value, len, 0);
29845 l->dw_loc_oprnd2.val_class = dw_val_class_vec;
29846 l->dw_loc_oprnd2.v.val_vec.length = len;
29847 l->dw_loc_oprnd2.v.val_vec.elt_size = 1;
29848 l->dw_loc_oprnd2.v.val_vec.array = array;
29849 add_AT_loc (ref, DW_AT_location, l);
29850 equate_decl_number_to_die (decl, ref);
29851 }
29852 return rtl;
29853 }
29854
29855 /* Helper function of resolve_addr_in_expr. LOC is
29856 a DW_OP_addr followed by DW_OP_stack_value, either at the start
29857 of exprloc or after DW_OP_{,bit_}piece, and val_addr can't be
29858 resolved. Replace it (both DW_OP_addr and DW_OP_stack_value)
29859 with DW_OP_implicit_pointer if possible
29860 and return true, if unsuccessful, return false. */
29861
29862 static bool
29863 optimize_one_addr_into_implicit_ptr (dw_loc_descr_ref loc)
29864 {
29865 rtx rtl = loc->dw_loc_oprnd1.v.val_addr;
29866 HOST_WIDE_INT offset = 0;
29867 dw_die_ref ref = NULL;
29868 tree decl;
29869
29870 if (GET_CODE (rtl) == CONST
29871 && GET_CODE (XEXP (rtl, 0)) == PLUS
29872 && CONST_INT_P (XEXP (XEXP (rtl, 0), 1)))
29873 {
29874 offset = INTVAL (XEXP (XEXP (rtl, 0), 1));
29875 rtl = XEXP (XEXP (rtl, 0), 0);
29876 }
29877 if (GET_CODE (rtl) == CONST_STRING)
29878 {
29879 size_t len = strlen (XSTR (rtl, 0)) + 1;
29880 tree t = build_string (len, XSTR (rtl, 0));
29881 tree tlen = size_int (len - 1);
29882
29883 TREE_TYPE (t)
29884 = build_array_type (char_type_node, build_index_type (tlen));
29885 rtl = string_cst_pool_decl (t);
29886 if (!rtl)
29887 return false;
29888 }
29889 if (GET_CODE (rtl) == SYMBOL_REF && SYMBOL_REF_DECL (rtl))
29890 {
29891 decl = SYMBOL_REF_DECL (rtl);
29892 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
29893 {
29894 ref = lookup_decl_die (decl);
29895 if (ref && (get_AT (ref, DW_AT_location)
29896 || get_AT (ref, DW_AT_const_value)))
29897 {
29898 loc->dw_loc_opc = dwarf_OP (DW_OP_implicit_pointer);
29899 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29900 loc->dw_loc_oprnd1.val_entry = NULL;
29901 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29902 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29903 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
29904 loc->dw_loc_oprnd2.v.val_int = offset;
29905 return true;
29906 }
29907 }
29908 }
29909 return false;
29910 }
29911
29912 /* Helper function for resolve_addr, handle one location
29913 expression, return false if at least one CONST_STRING or SYMBOL_REF in
29914 the location list couldn't be resolved. */
29915
29916 static bool
29917 resolve_addr_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
29918 {
29919 dw_loc_descr_ref keep = NULL;
29920 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = loc->dw_loc_next)
29921 switch (loc->dw_loc_opc)
29922 {
29923 case DW_OP_addr:
29924 if (!resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
29925 {
29926 if ((prev == NULL
29927 || prev->dw_loc_opc == DW_OP_piece
29928 || prev->dw_loc_opc == DW_OP_bit_piece)
29929 && loc->dw_loc_next
29930 && loc->dw_loc_next->dw_loc_opc == DW_OP_stack_value
29931 && (!dwarf_strict || dwarf_version >= 5)
29932 && optimize_one_addr_into_implicit_ptr (loc))
29933 break;
29934 return false;
29935 }
29936 break;
29937 case DW_OP_GNU_addr_index:
29938 case DW_OP_addrx:
29939 case DW_OP_GNU_const_index:
29940 case DW_OP_constx:
29941 if ((loc->dw_loc_opc == DW_OP_GNU_addr_index
29942 || loc->dw_loc_opc == DW_OP_addrx)
29943 || ((loc->dw_loc_opc == DW_OP_GNU_const_index
29944 || loc->dw_loc_opc == DW_OP_constx)
29945 && loc->dtprel))
29946 {
29947 rtx rtl = loc->dw_loc_oprnd1.val_entry->addr.rtl;
29948 if (!resolve_one_addr (&rtl))
29949 return false;
29950 remove_addr_table_entry (loc->dw_loc_oprnd1.val_entry);
29951 loc->dw_loc_oprnd1.val_entry
29952 = add_addr_table_entry (rtl, ate_kind_rtx);
29953 }
29954 break;
29955 case DW_OP_const4u:
29956 case DW_OP_const8u:
29957 if (loc->dtprel
29958 && !resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
29959 return false;
29960 break;
29961 case DW_OP_plus_uconst:
29962 if (size_of_loc_descr (loc)
29963 > size_of_int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned)
29964 + 1
29965 && loc->dw_loc_oprnd1.v.val_unsigned > 0)
29966 {
29967 dw_loc_descr_ref repl
29968 = int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned);
29969 add_loc_descr (&repl, new_loc_descr (DW_OP_plus, 0, 0));
29970 add_loc_descr (&repl, loc->dw_loc_next);
29971 *loc = *repl;
29972 }
29973 break;
29974 case DW_OP_implicit_value:
29975 if (loc->dw_loc_oprnd2.val_class == dw_val_class_addr
29976 && !resolve_one_addr (&loc->dw_loc_oprnd2.v.val_addr))
29977 return false;
29978 break;
29979 case DW_OP_implicit_pointer:
29980 case DW_OP_GNU_implicit_pointer:
29981 case DW_OP_GNU_parameter_ref:
29982 case DW_OP_GNU_variable_value:
29983 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
29984 {
29985 dw_die_ref ref
29986 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
29987 if (ref == NULL)
29988 return false;
29989 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29990 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29991 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29992 }
29993 if (loc->dw_loc_opc == DW_OP_GNU_variable_value)
29994 {
29995 if (prev == NULL
29996 && loc->dw_loc_next == NULL
29997 && AT_class (a) == dw_val_class_loc)
29998 switch (a->dw_attr)
29999 {
30000 /* Following attributes allow both exprloc and reference,
30001 so if the whole expression is DW_OP_GNU_variable_value
30002 alone we could transform it into reference. */
30003 case DW_AT_byte_size:
30004 case DW_AT_bit_size:
30005 case DW_AT_lower_bound:
30006 case DW_AT_upper_bound:
30007 case DW_AT_bit_stride:
30008 case DW_AT_count:
30009 case DW_AT_allocated:
30010 case DW_AT_associated:
30011 case DW_AT_byte_stride:
30012 a->dw_attr_val.val_class = dw_val_class_die_ref;
30013 a->dw_attr_val.val_entry = NULL;
30014 a->dw_attr_val.v.val_die_ref.die
30015 = loc->dw_loc_oprnd1.v.val_die_ref.die;
30016 a->dw_attr_val.v.val_die_ref.external = 0;
30017 return true;
30018 default:
30019 break;
30020 }
30021 if (dwarf_strict)
30022 return false;
30023 }
30024 break;
30025 case DW_OP_const_type:
30026 case DW_OP_regval_type:
30027 case DW_OP_deref_type:
30028 case DW_OP_convert:
30029 case DW_OP_reinterpret:
30030 case DW_OP_GNU_const_type:
30031 case DW_OP_GNU_regval_type:
30032 case DW_OP_GNU_deref_type:
30033 case DW_OP_GNU_convert:
30034 case DW_OP_GNU_reinterpret:
30035 while (loc->dw_loc_next
30036 && (loc->dw_loc_next->dw_loc_opc == DW_OP_convert
30037 || loc->dw_loc_next->dw_loc_opc == DW_OP_GNU_convert))
30038 {
30039 dw_die_ref base1, base2;
30040 unsigned enc1, enc2, size1, size2;
30041 if (loc->dw_loc_opc == DW_OP_regval_type
30042 || loc->dw_loc_opc == DW_OP_deref_type
30043 || loc->dw_loc_opc == DW_OP_GNU_regval_type
30044 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
30045 base1 = loc->dw_loc_oprnd2.v.val_die_ref.die;
30046 else if (loc->dw_loc_oprnd1.val_class
30047 == dw_val_class_unsigned_const)
30048 break;
30049 else
30050 base1 = loc->dw_loc_oprnd1.v.val_die_ref.die;
30051 if (loc->dw_loc_next->dw_loc_oprnd1.val_class
30052 == dw_val_class_unsigned_const)
30053 break;
30054 base2 = loc->dw_loc_next->dw_loc_oprnd1.v.val_die_ref.die;
30055 gcc_assert (base1->die_tag == DW_TAG_base_type
30056 && base2->die_tag == DW_TAG_base_type);
30057 enc1 = get_AT_unsigned (base1, DW_AT_encoding);
30058 enc2 = get_AT_unsigned (base2, DW_AT_encoding);
30059 size1 = get_AT_unsigned (base1, DW_AT_byte_size);
30060 size2 = get_AT_unsigned (base2, DW_AT_byte_size);
30061 if (size1 == size2
30062 && (((enc1 == DW_ATE_unsigned || enc1 == DW_ATE_signed)
30063 && (enc2 == DW_ATE_unsigned || enc2 == DW_ATE_signed)
30064 && loc != keep)
30065 || enc1 == enc2))
30066 {
30067 /* Optimize away next DW_OP_convert after
30068 adjusting LOC's base type die reference. */
30069 if (loc->dw_loc_opc == DW_OP_regval_type
30070 || loc->dw_loc_opc == DW_OP_deref_type
30071 || loc->dw_loc_opc == DW_OP_GNU_regval_type
30072 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
30073 loc->dw_loc_oprnd2.v.val_die_ref.die = base2;
30074 else
30075 loc->dw_loc_oprnd1.v.val_die_ref.die = base2;
30076 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
30077 continue;
30078 }
30079 /* Don't change integer DW_OP_convert after e.g. floating
30080 point typed stack entry. */
30081 else if (enc1 != DW_ATE_unsigned && enc1 != DW_ATE_signed)
30082 keep = loc->dw_loc_next;
30083 break;
30084 }
30085 break;
30086 default:
30087 break;
30088 }
30089 return true;
30090 }
30091
30092 /* Helper function of resolve_addr. DIE had DW_AT_location of
30093 DW_OP_addr alone, which referred to DECL in DW_OP_addr's operand
30094 and DW_OP_addr couldn't be resolved. resolve_addr has already
30095 removed the DW_AT_location attribute. This function attempts to
30096 add a new DW_AT_location attribute with DW_OP_implicit_pointer
30097 to it or DW_AT_const_value attribute, if possible. */
30098
30099 static void
30100 optimize_location_into_implicit_ptr (dw_die_ref die, tree decl)
30101 {
30102 if (!VAR_P (decl)
30103 || lookup_decl_die (decl) != die
30104 || DECL_EXTERNAL (decl)
30105 || !TREE_STATIC (decl)
30106 || DECL_INITIAL (decl) == NULL_TREE
30107 || DECL_P (DECL_INITIAL (decl))
30108 || get_AT (die, DW_AT_const_value))
30109 return;
30110
30111 tree init = DECL_INITIAL (decl);
30112 HOST_WIDE_INT offset = 0;
30113 /* For variables that have been optimized away and thus
30114 don't have a memory location, see if we can emit
30115 DW_AT_const_value instead. */
30116 if (tree_add_const_value_attribute (die, init))
30117 return;
30118 if (dwarf_strict && dwarf_version < 5)
30119 return;
30120 /* If init is ADDR_EXPR or POINTER_PLUS_EXPR of ADDR_EXPR,
30121 and ADDR_EXPR refers to a decl that has DW_AT_location or
30122 DW_AT_const_value (but isn't addressable, otherwise
30123 resolving the original DW_OP_addr wouldn't fail), see if
30124 we can add DW_OP_implicit_pointer. */
30125 STRIP_NOPS (init);
30126 if (TREE_CODE (init) == POINTER_PLUS_EXPR
30127 && tree_fits_shwi_p (TREE_OPERAND (init, 1)))
30128 {
30129 offset = tree_to_shwi (TREE_OPERAND (init, 1));
30130 init = TREE_OPERAND (init, 0);
30131 STRIP_NOPS (init);
30132 }
30133 if (TREE_CODE (init) != ADDR_EXPR)
30134 return;
30135 if ((TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST
30136 && !TREE_ASM_WRITTEN (TREE_OPERAND (init, 0)))
30137 || (TREE_CODE (TREE_OPERAND (init, 0)) == VAR_DECL
30138 && !DECL_EXTERNAL (TREE_OPERAND (init, 0))
30139 && TREE_OPERAND (init, 0) != decl))
30140 {
30141 dw_die_ref ref;
30142 dw_loc_descr_ref l;
30143
30144 if (TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST)
30145 {
30146 rtx rtl = string_cst_pool_decl (TREE_OPERAND (init, 0));
30147 if (!rtl)
30148 return;
30149 decl = SYMBOL_REF_DECL (rtl);
30150 }
30151 else
30152 decl = TREE_OPERAND (init, 0);
30153 ref = lookup_decl_die (decl);
30154 if (ref == NULL
30155 || (!get_AT (ref, DW_AT_location)
30156 && !get_AT (ref, DW_AT_const_value)))
30157 return;
30158 l = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
30159 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30160 l->dw_loc_oprnd1.v.val_die_ref.die = ref;
30161 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
30162 add_AT_loc (die, DW_AT_location, l);
30163 }
30164 }
30165
30166 /* Return NULL if l is a DWARF expression, or first op that is not
30167 valid DWARF expression. */
30168
30169 static dw_loc_descr_ref
30170 non_dwarf_expression (dw_loc_descr_ref l)
30171 {
30172 while (l)
30173 {
30174 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
30175 return l;
30176 switch (l->dw_loc_opc)
30177 {
30178 case DW_OP_regx:
30179 case DW_OP_implicit_value:
30180 case DW_OP_stack_value:
30181 case DW_OP_implicit_pointer:
30182 case DW_OP_GNU_implicit_pointer:
30183 case DW_OP_GNU_parameter_ref:
30184 case DW_OP_piece:
30185 case DW_OP_bit_piece:
30186 return l;
30187 default:
30188 break;
30189 }
30190 l = l->dw_loc_next;
30191 }
30192 return NULL;
30193 }
30194
30195 /* Return adjusted copy of EXPR:
30196 If it is empty DWARF expression, return it.
30197 If it is valid non-empty DWARF expression,
30198 return copy of EXPR with DW_OP_deref appended to it.
30199 If it is DWARF expression followed by DW_OP_reg{N,x}, return
30200 copy of the DWARF expression with DW_OP_breg{N,x} <0> appended.
30201 If it is DWARF expression followed by DW_OP_stack_value, return
30202 copy of the DWARF expression without anything appended.
30203 Otherwise, return NULL. */
30204
30205 static dw_loc_descr_ref
30206 copy_deref_exprloc (dw_loc_descr_ref expr)
30207 {
30208 dw_loc_descr_ref tail = NULL;
30209
30210 if (expr == NULL)
30211 return NULL;
30212
30213 dw_loc_descr_ref l = non_dwarf_expression (expr);
30214 if (l && l->dw_loc_next)
30215 return NULL;
30216
30217 if (l)
30218 {
30219 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
30220 tail = new_loc_descr ((enum dwarf_location_atom)
30221 (DW_OP_breg0 + (l->dw_loc_opc - DW_OP_reg0)),
30222 0, 0);
30223 else
30224 switch (l->dw_loc_opc)
30225 {
30226 case DW_OP_regx:
30227 tail = new_loc_descr (DW_OP_bregx,
30228 l->dw_loc_oprnd1.v.val_unsigned, 0);
30229 break;
30230 case DW_OP_stack_value:
30231 break;
30232 default:
30233 return NULL;
30234 }
30235 }
30236 else
30237 tail = new_loc_descr (DW_OP_deref, 0, 0);
30238
30239 dw_loc_descr_ref ret = NULL, *p = &ret;
30240 while (expr != l)
30241 {
30242 *p = new_loc_descr (expr->dw_loc_opc, 0, 0);
30243 (*p)->dw_loc_oprnd1 = expr->dw_loc_oprnd1;
30244 (*p)->dw_loc_oprnd2 = expr->dw_loc_oprnd2;
30245 p = &(*p)->dw_loc_next;
30246 expr = expr->dw_loc_next;
30247 }
30248 *p = tail;
30249 return ret;
30250 }
30251
30252 /* For DW_AT_string_length attribute with DW_OP_GNU_variable_value
30253 reference to a variable or argument, adjust it if needed and return:
30254 -1 if the DW_AT_string_length attribute and DW_AT_{string_length_,}byte_size
30255 attribute if present should be removed
30256 0 keep the attribute perhaps with minor modifications, no need to rescan
30257 1 if the attribute has been successfully adjusted. */
30258
30259 static int
30260 optimize_string_length (dw_attr_node *a)
30261 {
30262 dw_loc_descr_ref l = AT_loc (a), lv;
30263 dw_die_ref die;
30264 if (l->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
30265 {
30266 tree decl = l->dw_loc_oprnd1.v.val_decl_ref;
30267 die = lookup_decl_die (decl);
30268 if (die)
30269 {
30270 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30271 l->dw_loc_oprnd1.v.val_die_ref.die = die;
30272 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
30273 }
30274 else
30275 return -1;
30276 }
30277 else
30278 die = l->dw_loc_oprnd1.v.val_die_ref.die;
30279
30280 /* DWARF5 allows reference class, so we can then reference the DIE.
30281 Only do this for DW_OP_GNU_variable_value DW_OP_stack_value. */
30282 if (l->dw_loc_next != NULL && dwarf_version >= 5)
30283 {
30284 a->dw_attr_val.val_class = dw_val_class_die_ref;
30285 a->dw_attr_val.val_entry = NULL;
30286 a->dw_attr_val.v.val_die_ref.die = die;
30287 a->dw_attr_val.v.val_die_ref.external = 0;
30288 return 0;
30289 }
30290
30291 dw_attr_node *av = get_AT (die, DW_AT_location);
30292 dw_loc_list_ref d;
30293 bool non_dwarf_expr = false;
30294
30295 if (av == NULL)
30296 return dwarf_strict ? -1 : 0;
30297 switch (AT_class (av))
30298 {
30299 case dw_val_class_loc_list:
30300 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30301 if (d->expr && non_dwarf_expression (d->expr))
30302 non_dwarf_expr = true;
30303 break;
30304 case dw_val_class_view_list:
30305 gcc_unreachable ();
30306 case dw_val_class_loc:
30307 lv = AT_loc (av);
30308 if (lv == NULL)
30309 return dwarf_strict ? -1 : 0;
30310 if (non_dwarf_expression (lv))
30311 non_dwarf_expr = true;
30312 break;
30313 default:
30314 return dwarf_strict ? -1 : 0;
30315 }
30316
30317 /* If it is safe to transform DW_OP_GNU_variable_value DW_OP_stack_value
30318 into DW_OP_call4 or DW_OP_GNU_variable_value into
30319 DW_OP_call4 DW_OP_deref, do so. */
30320 if (!non_dwarf_expr
30321 && (l->dw_loc_next != NULL || AT_class (av) == dw_val_class_loc))
30322 {
30323 l->dw_loc_opc = DW_OP_call4;
30324 if (l->dw_loc_next)
30325 l->dw_loc_next = NULL;
30326 else
30327 l->dw_loc_next = new_loc_descr (DW_OP_deref, 0, 0);
30328 return 0;
30329 }
30330
30331 /* For DW_OP_GNU_variable_value DW_OP_stack_value, we can just
30332 copy over the DW_AT_location attribute from die to a. */
30333 if (l->dw_loc_next != NULL)
30334 {
30335 a->dw_attr_val = av->dw_attr_val;
30336 return 1;
30337 }
30338
30339 dw_loc_list_ref list, *p;
30340 switch (AT_class (av))
30341 {
30342 case dw_val_class_loc_list:
30343 p = &list;
30344 list = NULL;
30345 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30346 {
30347 lv = copy_deref_exprloc (d->expr);
30348 if (lv)
30349 {
30350 *p = new_loc_list (lv, d->begin, d->vbegin, d->end, d->vend, d->section);
30351 p = &(*p)->dw_loc_next;
30352 }
30353 else if (!dwarf_strict && d->expr)
30354 return 0;
30355 }
30356 if (list == NULL)
30357 return dwarf_strict ? -1 : 0;
30358 a->dw_attr_val.val_class = dw_val_class_loc_list;
30359 gen_llsym (list);
30360 *AT_loc_list_ptr (a) = list;
30361 return 1;
30362 case dw_val_class_loc:
30363 lv = copy_deref_exprloc (AT_loc (av));
30364 if (lv == NULL)
30365 return dwarf_strict ? -1 : 0;
30366 a->dw_attr_val.v.val_loc = lv;
30367 return 1;
30368 default:
30369 gcc_unreachable ();
30370 }
30371 }
30372
30373 /* Resolve DW_OP_addr and DW_AT_const_value CONST_STRING arguments to
30374 an address in .rodata section if the string literal is emitted there,
30375 or remove the containing location list or replace DW_AT_const_value
30376 with DW_AT_location and empty location expression, if it isn't found
30377 in .rodata. Similarly for SYMBOL_REFs, keep only those that refer
30378 to something that has been emitted in the current CU. */
30379
30380 static void
30381 resolve_addr (dw_die_ref die)
30382 {
30383 dw_die_ref c;
30384 dw_attr_node *a;
30385 dw_loc_list_ref *curr, *start, loc;
30386 unsigned ix;
30387 bool remove_AT_byte_size = false;
30388
30389 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30390 switch (AT_class (a))
30391 {
30392 case dw_val_class_loc_list:
30393 start = curr = AT_loc_list_ptr (a);
30394 loc = *curr;
30395 gcc_assert (loc);
30396 /* The same list can be referenced more than once. See if we have
30397 already recorded the result from a previous pass. */
30398 if (loc->replaced)
30399 *curr = loc->dw_loc_next;
30400 else if (!loc->resolved_addr)
30401 {
30402 /* As things stand, we do not expect or allow one die to
30403 reference a suffix of another die's location list chain.
30404 References must be identical or completely separate.
30405 There is therefore no need to cache the result of this
30406 pass on any list other than the first; doing so
30407 would lead to unnecessary writes. */
30408 while (*curr)
30409 {
30410 gcc_assert (!(*curr)->replaced && !(*curr)->resolved_addr);
30411 if (!resolve_addr_in_expr (a, (*curr)->expr))
30412 {
30413 dw_loc_list_ref next = (*curr)->dw_loc_next;
30414 dw_loc_descr_ref l = (*curr)->expr;
30415
30416 if (next && (*curr)->ll_symbol)
30417 {
30418 gcc_assert (!next->ll_symbol);
30419 next->ll_symbol = (*curr)->ll_symbol;
30420 next->vl_symbol = (*curr)->vl_symbol;
30421 }
30422 if (dwarf_split_debug_info)
30423 remove_loc_list_addr_table_entries (l);
30424 *curr = next;
30425 }
30426 else
30427 {
30428 mark_base_types ((*curr)->expr);
30429 curr = &(*curr)->dw_loc_next;
30430 }
30431 }
30432 if (loc == *start)
30433 loc->resolved_addr = 1;
30434 else
30435 {
30436 loc->replaced = 1;
30437 loc->dw_loc_next = *start;
30438 }
30439 }
30440 if (!*start)
30441 {
30442 remove_AT (die, a->dw_attr);
30443 ix--;
30444 }
30445 break;
30446 case dw_val_class_view_list:
30447 {
30448 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
30449 gcc_checking_assert (dwarf2out_locviews_in_attribute ());
30450 dw_val_node *llnode
30451 = view_list_to_loc_list_val_node (&a->dw_attr_val);
30452 /* If we no longer have a loclist, or it no longer needs
30453 views, drop this attribute. */
30454 if (!llnode || !llnode->v.val_loc_list->vl_symbol)
30455 {
30456 remove_AT (die, a->dw_attr);
30457 ix--;
30458 }
30459 break;
30460 }
30461 case dw_val_class_loc:
30462 {
30463 dw_loc_descr_ref l = AT_loc (a);
30464 /* DW_OP_GNU_variable_value DW_OP_stack_value or
30465 DW_OP_GNU_variable_value in DW_AT_string_length can be converted
30466 into DW_OP_call4 or DW_OP_call4 DW_OP_deref, which is standard
30467 DWARF4 unlike DW_OP_GNU_variable_value. Or for DWARF5
30468 DW_OP_GNU_variable_value DW_OP_stack_value can be replaced
30469 with DW_FORM_ref referencing the same DIE as
30470 DW_OP_GNU_variable_value used to reference. */
30471 if (a->dw_attr == DW_AT_string_length
30472 && l
30473 && l->dw_loc_opc == DW_OP_GNU_variable_value
30474 && (l->dw_loc_next == NULL
30475 || (l->dw_loc_next->dw_loc_next == NULL
30476 && l->dw_loc_next->dw_loc_opc == DW_OP_stack_value)))
30477 {
30478 switch (optimize_string_length (a))
30479 {
30480 case -1:
30481 remove_AT (die, a->dw_attr);
30482 ix--;
30483 /* If we drop DW_AT_string_length, we need to drop also
30484 DW_AT_{string_length_,}byte_size. */
30485 remove_AT_byte_size = true;
30486 continue;
30487 default:
30488 break;
30489 case 1:
30490 /* Even if we keep the optimized DW_AT_string_length,
30491 it might have changed AT_class, so process it again. */
30492 ix--;
30493 continue;
30494 }
30495 }
30496 /* For -gdwarf-2 don't attempt to optimize
30497 DW_AT_data_member_location containing
30498 DW_OP_plus_uconst - older consumers might
30499 rely on it being that op instead of a more complex,
30500 but shorter, location description. */
30501 if ((dwarf_version > 2
30502 || a->dw_attr != DW_AT_data_member_location
30503 || l == NULL
30504 || l->dw_loc_opc != DW_OP_plus_uconst
30505 || l->dw_loc_next != NULL)
30506 && !resolve_addr_in_expr (a, l))
30507 {
30508 if (dwarf_split_debug_info)
30509 remove_loc_list_addr_table_entries (l);
30510 if (l != NULL
30511 && l->dw_loc_next == NULL
30512 && l->dw_loc_opc == DW_OP_addr
30513 && GET_CODE (l->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF
30514 && SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr)
30515 && a->dw_attr == DW_AT_location)
30516 {
30517 tree decl = SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr);
30518 remove_AT (die, a->dw_attr);
30519 ix--;
30520 optimize_location_into_implicit_ptr (die, decl);
30521 break;
30522 }
30523 if (a->dw_attr == DW_AT_string_length)
30524 /* If we drop DW_AT_string_length, we need to drop also
30525 DW_AT_{string_length_,}byte_size. */
30526 remove_AT_byte_size = true;
30527 remove_AT (die, a->dw_attr);
30528 ix--;
30529 }
30530 else
30531 mark_base_types (l);
30532 }
30533 break;
30534 case dw_val_class_addr:
30535 if (a->dw_attr == DW_AT_const_value
30536 && !resolve_one_addr (&a->dw_attr_val.v.val_addr))
30537 {
30538 if (AT_index (a) != NOT_INDEXED)
30539 remove_addr_table_entry (a->dw_attr_val.val_entry);
30540 remove_AT (die, a->dw_attr);
30541 ix--;
30542 }
30543 if ((die->die_tag == DW_TAG_call_site
30544 && a->dw_attr == DW_AT_call_origin)
30545 || (die->die_tag == DW_TAG_GNU_call_site
30546 && a->dw_attr == DW_AT_abstract_origin))
30547 {
30548 tree tdecl = SYMBOL_REF_DECL (a->dw_attr_val.v.val_addr);
30549 dw_die_ref tdie = lookup_decl_die (tdecl);
30550 dw_die_ref cdie;
30551 if (tdie == NULL
30552 && DECL_EXTERNAL (tdecl)
30553 && DECL_ABSTRACT_ORIGIN (tdecl) == NULL_TREE
30554 && (cdie = lookup_context_die (DECL_CONTEXT (tdecl))))
30555 {
30556 dw_die_ref pdie = cdie;
30557 /* Make sure we don't add these DIEs into type units.
30558 We could emit skeleton DIEs for context (namespaces,
30559 outer structs/classes) and a skeleton DIE for the
30560 innermost context with DW_AT_signature pointing to the
30561 type unit. See PR78835. */
30562 while (pdie && pdie->die_tag != DW_TAG_type_unit)
30563 pdie = pdie->die_parent;
30564 if (pdie == NULL)
30565 {
30566 /* Creating a full DIE for tdecl is overly expensive and
30567 at this point even wrong when in the LTO phase
30568 as it can end up generating new type DIEs we didn't
30569 output and thus optimize_external_refs will crash. */
30570 tdie = new_die (DW_TAG_subprogram, cdie, NULL_TREE);
30571 add_AT_flag (tdie, DW_AT_external, 1);
30572 add_AT_flag (tdie, DW_AT_declaration, 1);
30573 add_linkage_attr (tdie, tdecl);
30574 add_name_and_src_coords_attributes (tdie, tdecl, true);
30575 equate_decl_number_to_die (tdecl, tdie);
30576 }
30577 }
30578 if (tdie)
30579 {
30580 a->dw_attr_val.val_class = dw_val_class_die_ref;
30581 a->dw_attr_val.v.val_die_ref.die = tdie;
30582 a->dw_attr_val.v.val_die_ref.external = 0;
30583 }
30584 else
30585 {
30586 if (AT_index (a) != NOT_INDEXED)
30587 remove_addr_table_entry (a->dw_attr_val.val_entry);
30588 remove_AT (die, a->dw_attr);
30589 ix--;
30590 }
30591 }
30592 break;
30593 default:
30594 break;
30595 }
30596
30597 if (remove_AT_byte_size)
30598 remove_AT (die, dwarf_version >= 5
30599 ? DW_AT_string_length_byte_size
30600 : DW_AT_byte_size);
30601
30602 FOR_EACH_CHILD (die, c, resolve_addr (c));
30603 }
30604 \f
30605 /* Helper routines for optimize_location_lists.
30606 This pass tries to share identical local lists in .debug_loc
30607 section. */
30608
30609 /* Iteratively hash operands of LOC opcode into HSTATE. */
30610
30611 static void
30612 hash_loc_operands (dw_loc_descr_ref loc, inchash::hash &hstate)
30613 {
30614 dw_val_ref val1 = &loc->dw_loc_oprnd1;
30615 dw_val_ref val2 = &loc->dw_loc_oprnd2;
30616
30617 switch (loc->dw_loc_opc)
30618 {
30619 case DW_OP_const4u:
30620 case DW_OP_const8u:
30621 if (loc->dtprel)
30622 goto hash_addr;
30623 /* FALLTHRU */
30624 case DW_OP_const1u:
30625 case DW_OP_const1s:
30626 case DW_OP_const2u:
30627 case DW_OP_const2s:
30628 case DW_OP_const4s:
30629 case DW_OP_const8s:
30630 case DW_OP_constu:
30631 case DW_OP_consts:
30632 case DW_OP_pick:
30633 case DW_OP_plus_uconst:
30634 case DW_OP_breg0:
30635 case DW_OP_breg1:
30636 case DW_OP_breg2:
30637 case DW_OP_breg3:
30638 case DW_OP_breg4:
30639 case DW_OP_breg5:
30640 case DW_OP_breg6:
30641 case DW_OP_breg7:
30642 case DW_OP_breg8:
30643 case DW_OP_breg9:
30644 case DW_OP_breg10:
30645 case DW_OP_breg11:
30646 case DW_OP_breg12:
30647 case DW_OP_breg13:
30648 case DW_OP_breg14:
30649 case DW_OP_breg15:
30650 case DW_OP_breg16:
30651 case DW_OP_breg17:
30652 case DW_OP_breg18:
30653 case DW_OP_breg19:
30654 case DW_OP_breg20:
30655 case DW_OP_breg21:
30656 case DW_OP_breg22:
30657 case DW_OP_breg23:
30658 case DW_OP_breg24:
30659 case DW_OP_breg25:
30660 case DW_OP_breg26:
30661 case DW_OP_breg27:
30662 case DW_OP_breg28:
30663 case DW_OP_breg29:
30664 case DW_OP_breg30:
30665 case DW_OP_breg31:
30666 case DW_OP_regx:
30667 case DW_OP_fbreg:
30668 case DW_OP_piece:
30669 case DW_OP_deref_size:
30670 case DW_OP_xderef_size:
30671 hstate.add_object (val1->v.val_int);
30672 break;
30673 case DW_OP_skip:
30674 case DW_OP_bra:
30675 {
30676 int offset;
30677
30678 gcc_assert (val1->val_class == dw_val_class_loc);
30679 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
30680 hstate.add_object (offset);
30681 }
30682 break;
30683 case DW_OP_implicit_value:
30684 hstate.add_object (val1->v.val_unsigned);
30685 switch (val2->val_class)
30686 {
30687 case dw_val_class_const:
30688 hstate.add_object (val2->v.val_int);
30689 break;
30690 case dw_val_class_vec:
30691 {
30692 unsigned int elt_size = val2->v.val_vec.elt_size;
30693 unsigned int len = val2->v.val_vec.length;
30694
30695 hstate.add_int (elt_size);
30696 hstate.add_int (len);
30697 hstate.add (val2->v.val_vec.array, len * elt_size);
30698 }
30699 break;
30700 case dw_val_class_const_double:
30701 hstate.add_object (val2->v.val_double.low);
30702 hstate.add_object (val2->v.val_double.high);
30703 break;
30704 case dw_val_class_wide_int:
30705 hstate.add (val2->v.val_wide->get_val (),
30706 get_full_len (*val2->v.val_wide)
30707 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30708 break;
30709 case dw_val_class_addr:
30710 inchash::add_rtx (val2->v.val_addr, hstate);
30711 break;
30712 default:
30713 gcc_unreachable ();
30714 }
30715 break;
30716 case DW_OP_bregx:
30717 case DW_OP_bit_piece:
30718 hstate.add_object (val1->v.val_int);
30719 hstate.add_object (val2->v.val_int);
30720 break;
30721 case DW_OP_addr:
30722 hash_addr:
30723 if (loc->dtprel)
30724 {
30725 unsigned char dtprel = 0xd1;
30726 hstate.add_object (dtprel);
30727 }
30728 inchash::add_rtx (val1->v.val_addr, hstate);
30729 break;
30730 case DW_OP_GNU_addr_index:
30731 case DW_OP_addrx:
30732 case DW_OP_GNU_const_index:
30733 case DW_OP_constx:
30734 {
30735 if (loc->dtprel)
30736 {
30737 unsigned char dtprel = 0xd1;
30738 hstate.add_object (dtprel);
30739 }
30740 inchash::add_rtx (val1->val_entry->addr.rtl, hstate);
30741 }
30742 break;
30743 case DW_OP_implicit_pointer:
30744 case DW_OP_GNU_implicit_pointer:
30745 hstate.add_int (val2->v.val_int);
30746 break;
30747 case DW_OP_entry_value:
30748 case DW_OP_GNU_entry_value:
30749 hstate.add_object (val1->v.val_loc);
30750 break;
30751 case DW_OP_regval_type:
30752 case DW_OP_deref_type:
30753 case DW_OP_GNU_regval_type:
30754 case DW_OP_GNU_deref_type:
30755 {
30756 unsigned int byte_size
30757 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_byte_size);
30758 unsigned int encoding
30759 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_encoding);
30760 hstate.add_object (val1->v.val_int);
30761 hstate.add_object (byte_size);
30762 hstate.add_object (encoding);
30763 }
30764 break;
30765 case DW_OP_convert:
30766 case DW_OP_reinterpret:
30767 case DW_OP_GNU_convert:
30768 case DW_OP_GNU_reinterpret:
30769 if (val1->val_class == dw_val_class_unsigned_const)
30770 {
30771 hstate.add_object (val1->v.val_unsigned);
30772 break;
30773 }
30774 /* FALLTHRU */
30775 case DW_OP_const_type:
30776 case DW_OP_GNU_const_type:
30777 {
30778 unsigned int byte_size
30779 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_byte_size);
30780 unsigned int encoding
30781 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_encoding);
30782 hstate.add_object (byte_size);
30783 hstate.add_object (encoding);
30784 if (loc->dw_loc_opc != DW_OP_const_type
30785 && loc->dw_loc_opc != DW_OP_GNU_const_type)
30786 break;
30787 hstate.add_object (val2->val_class);
30788 switch (val2->val_class)
30789 {
30790 case dw_val_class_const:
30791 hstate.add_object (val2->v.val_int);
30792 break;
30793 case dw_val_class_vec:
30794 {
30795 unsigned int elt_size = val2->v.val_vec.elt_size;
30796 unsigned int len = val2->v.val_vec.length;
30797
30798 hstate.add_object (elt_size);
30799 hstate.add_object (len);
30800 hstate.add (val2->v.val_vec.array, len * elt_size);
30801 }
30802 break;
30803 case dw_val_class_const_double:
30804 hstate.add_object (val2->v.val_double.low);
30805 hstate.add_object (val2->v.val_double.high);
30806 break;
30807 case dw_val_class_wide_int:
30808 hstate.add (val2->v.val_wide->get_val (),
30809 get_full_len (*val2->v.val_wide)
30810 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30811 break;
30812 default:
30813 gcc_unreachable ();
30814 }
30815 }
30816 break;
30817
30818 default:
30819 /* Other codes have no operands. */
30820 break;
30821 }
30822 }
30823
30824 /* Iteratively hash the whole DWARF location expression LOC into HSTATE. */
30825
30826 static inline void
30827 hash_locs (dw_loc_descr_ref loc, inchash::hash &hstate)
30828 {
30829 dw_loc_descr_ref l;
30830 bool sizes_computed = false;
30831 /* Compute sizes, so that DW_OP_skip/DW_OP_bra can be checksummed. */
30832 size_of_locs (loc);
30833
30834 for (l = loc; l != NULL; l = l->dw_loc_next)
30835 {
30836 enum dwarf_location_atom opc = l->dw_loc_opc;
30837 hstate.add_object (opc);
30838 if ((opc == DW_OP_skip || opc == DW_OP_bra) && !sizes_computed)
30839 {
30840 size_of_locs (loc);
30841 sizes_computed = true;
30842 }
30843 hash_loc_operands (l, hstate);
30844 }
30845 }
30846
30847 /* Compute hash of the whole location list LIST_HEAD. */
30848
30849 static inline void
30850 hash_loc_list (dw_loc_list_ref list_head)
30851 {
30852 dw_loc_list_ref curr = list_head;
30853 inchash::hash hstate;
30854
30855 for (curr = list_head; curr != NULL; curr = curr->dw_loc_next)
30856 {
30857 hstate.add (curr->begin, strlen (curr->begin) + 1);
30858 hstate.add (curr->end, strlen (curr->end) + 1);
30859 hstate.add_object (curr->vbegin);
30860 hstate.add_object (curr->vend);
30861 if (curr->section)
30862 hstate.add (curr->section, strlen (curr->section) + 1);
30863 hash_locs (curr->expr, hstate);
30864 }
30865 list_head->hash = hstate.end ();
30866 }
30867
30868 /* Return true if X and Y opcodes have the same operands. */
30869
30870 static inline bool
30871 compare_loc_operands (dw_loc_descr_ref x, dw_loc_descr_ref y)
30872 {
30873 dw_val_ref valx1 = &x->dw_loc_oprnd1;
30874 dw_val_ref valx2 = &x->dw_loc_oprnd2;
30875 dw_val_ref valy1 = &y->dw_loc_oprnd1;
30876 dw_val_ref valy2 = &y->dw_loc_oprnd2;
30877
30878 switch (x->dw_loc_opc)
30879 {
30880 case DW_OP_const4u:
30881 case DW_OP_const8u:
30882 if (x->dtprel)
30883 goto hash_addr;
30884 /* FALLTHRU */
30885 case DW_OP_const1u:
30886 case DW_OP_const1s:
30887 case DW_OP_const2u:
30888 case DW_OP_const2s:
30889 case DW_OP_const4s:
30890 case DW_OP_const8s:
30891 case DW_OP_constu:
30892 case DW_OP_consts:
30893 case DW_OP_pick:
30894 case DW_OP_plus_uconst:
30895 case DW_OP_breg0:
30896 case DW_OP_breg1:
30897 case DW_OP_breg2:
30898 case DW_OP_breg3:
30899 case DW_OP_breg4:
30900 case DW_OP_breg5:
30901 case DW_OP_breg6:
30902 case DW_OP_breg7:
30903 case DW_OP_breg8:
30904 case DW_OP_breg9:
30905 case DW_OP_breg10:
30906 case DW_OP_breg11:
30907 case DW_OP_breg12:
30908 case DW_OP_breg13:
30909 case DW_OP_breg14:
30910 case DW_OP_breg15:
30911 case DW_OP_breg16:
30912 case DW_OP_breg17:
30913 case DW_OP_breg18:
30914 case DW_OP_breg19:
30915 case DW_OP_breg20:
30916 case DW_OP_breg21:
30917 case DW_OP_breg22:
30918 case DW_OP_breg23:
30919 case DW_OP_breg24:
30920 case DW_OP_breg25:
30921 case DW_OP_breg26:
30922 case DW_OP_breg27:
30923 case DW_OP_breg28:
30924 case DW_OP_breg29:
30925 case DW_OP_breg30:
30926 case DW_OP_breg31:
30927 case DW_OP_regx:
30928 case DW_OP_fbreg:
30929 case DW_OP_piece:
30930 case DW_OP_deref_size:
30931 case DW_OP_xderef_size:
30932 return valx1->v.val_int == valy1->v.val_int;
30933 case DW_OP_skip:
30934 case DW_OP_bra:
30935 /* If splitting debug info, the use of DW_OP_GNU_addr_index
30936 can cause irrelevant differences in dw_loc_addr. */
30937 gcc_assert (valx1->val_class == dw_val_class_loc
30938 && valy1->val_class == dw_val_class_loc
30939 && (dwarf_split_debug_info
30940 || x->dw_loc_addr == y->dw_loc_addr));
30941 return valx1->v.val_loc->dw_loc_addr == valy1->v.val_loc->dw_loc_addr;
30942 case DW_OP_implicit_value:
30943 if (valx1->v.val_unsigned != valy1->v.val_unsigned
30944 || valx2->val_class != valy2->val_class)
30945 return false;
30946 switch (valx2->val_class)
30947 {
30948 case dw_val_class_const:
30949 return valx2->v.val_int == valy2->v.val_int;
30950 case dw_val_class_vec:
30951 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
30952 && valx2->v.val_vec.length == valy2->v.val_vec.length
30953 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
30954 valx2->v.val_vec.elt_size
30955 * valx2->v.val_vec.length) == 0;
30956 case dw_val_class_const_double:
30957 return valx2->v.val_double.low == valy2->v.val_double.low
30958 && valx2->v.val_double.high == valy2->v.val_double.high;
30959 case dw_val_class_wide_int:
30960 return *valx2->v.val_wide == *valy2->v.val_wide;
30961 case dw_val_class_addr:
30962 return rtx_equal_p (valx2->v.val_addr, valy2->v.val_addr);
30963 default:
30964 gcc_unreachable ();
30965 }
30966 case DW_OP_bregx:
30967 case DW_OP_bit_piece:
30968 return valx1->v.val_int == valy1->v.val_int
30969 && valx2->v.val_int == valy2->v.val_int;
30970 case DW_OP_addr:
30971 hash_addr:
30972 return rtx_equal_p (valx1->v.val_addr, valy1->v.val_addr);
30973 case DW_OP_GNU_addr_index:
30974 case DW_OP_addrx:
30975 case DW_OP_GNU_const_index:
30976 case DW_OP_constx:
30977 {
30978 rtx ax1 = valx1->val_entry->addr.rtl;
30979 rtx ay1 = valy1->val_entry->addr.rtl;
30980 return rtx_equal_p (ax1, ay1);
30981 }
30982 case DW_OP_implicit_pointer:
30983 case DW_OP_GNU_implicit_pointer:
30984 return valx1->val_class == dw_val_class_die_ref
30985 && valx1->val_class == valy1->val_class
30986 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die
30987 && valx2->v.val_int == valy2->v.val_int;
30988 case DW_OP_entry_value:
30989 case DW_OP_GNU_entry_value:
30990 return compare_loc_operands (valx1->v.val_loc, valy1->v.val_loc);
30991 case DW_OP_const_type:
30992 case DW_OP_GNU_const_type:
30993 if (valx1->v.val_die_ref.die != valy1->v.val_die_ref.die
30994 || valx2->val_class != valy2->val_class)
30995 return false;
30996 switch (valx2->val_class)
30997 {
30998 case dw_val_class_const:
30999 return valx2->v.val_int == valy2->v.val_int;
31000 case dw_val_class_vec:
31001 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
31002 && valx2->v.val_vec.length == valy2->v.val_vec.length
31003 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
31004 valx2->v.val_vec.elt_size
31005 * valx2->v.val_vec.length) == 0;
31006 case dw_val_class_const_double:
31007 return valx2->v.val_double.low == valy2->v.val_double.low
31008 && valx2->v.val_double.high == valy2->v.val_double.high;
31009 case dw_val_class_wide_int:
31010 return *valx2->v.val_wide == *valy2->v.val_wide;
31011 default:
31012 gcc_unreachable ();
31013 }
31014 case DW_OP_regval_type:
31015 case DW_OP_deref_type:
31016 case DW_OP_GNU_regval_type:
31017 case DW_OP_GNU_deref_type:
31018 return valx1->v.val_int == valy1->v.val_int
31019 && valx2->v.val_die_ref.die == valy2->v.val_die_ref.die;
31020 case DW_OP_convert:
31021 case DW_OP_reinterpret:
31022 case DW_OP_GNU_convert:
31023 case DW_OP_GNU_reinterpret:
31024 if (valx1->val_class != valy1->val_class)
31025 return false;
31026 if (valx1->val_class == dw_val_class_unsigned_const)
31027 return valx1->v.val_unsigned == valy1->v.val_unsigned;
31028 return valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
31029 case DW_OP_GNU_parameter_ref:
31030 return valx1->val_class == dw_val_class_die_ref
31031 && valx1->val_class == valy1->val_class
31032 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
31033 default:
31034 /* Other codes have no operands. */
31035 return true;
31036 }
31037 }
31038
31039 /* Return true if DWARF location expressions X and Y are the same. */
31040
31041 static inline bool
31042 compare_locs (dw_loc_descr_ref x, dw_loc_descr_ref y)
31043 {
31044 for (; x != NULL && y != NULL; x = x->dw_loc_next, y = y->dw_loc_next)
31045 if (x->dw_loc_opc != y->dw_loc_opc
31046 || x->dtprel != y->dtprel
31047 || !compare_loc_operands (x, y))
31048 break;
31049 return x == NULL && y == NULL;
31050 }
31051
31052 /* Hashtable helpers. */
31053
31054 struct loc_list_hasher : nofree_ptr_hash <dw_loc_list_struct>
31055 {
31056 static inline hashval_t hash (const dw_loc_list_struct *);
31057 static inline bool equal (const dw_loc_list_struct *,
31058 const dw_loc_list_struct *);
31059 };
31060
31061 /* Return precomputed hash of location list X. */
31062
31063 inline hashval_t
31064 loc_list_hasher::hash (const dw_loc_list_struct *x)
31065 {
31066 return x->hash;
31067 }
31068
31069 /* Return true if location lists A and B are the same. */
31070
31071 inline bool
31072 loc_list_hasher::equal (const dw_loc_list_struct *a,
31073 const dw_loc_list_struct *b)
31074 {
31075 if (a == b)
31076 return 1;
31077 if (a->hash != b->hash)
31078 return 0;
31079 for (; a != NULL && b != NULL; a = a->dw_loc_next, b = b->dw_loc_next)
31080 if (strcmp (a->begin, b->begin) != 0
31081 || strcmp (a->end, b->end) != 0
31082 || (a->section == NULL) != (b->section == NULL)
31083 || (a->section && strcmp (a->section, b->section) != 0)
31084 || a->vbegin != b->vbegin || a->vend != b->vend
31085 || !compare_locs (a->expr, b->expr))
31086 break;
31087 return a == NULL && b == NULL;
31088 }
31089
31090 typedef hash_table<loc_list_hasher> loc_list_hash_type;
31091
31092
31093 /* Recursively optimize location lists referenced from DIE
31094 children and share them whenever possible. */
31095
31096 static void
31097 optimize_location_lists_1 (dw_die_ref die, loc_list_hash_type *htab)
31098 {
31099 dw_die_ref c;
31100 dw_attr_node *a;
31101 unsigned ix;
31102 dw_loc_list_struct **slot;
31103 bool drop_locviews = false;
31104 bool has_locviews = false;
31105
31106 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31107 if (AT_class (a) == dw_val_class_loc_list)
31108 {
31109 dw_loc_list_ref list = AT_loc_list (a);
31110 /* TODO: perform some optimizations here, before hashing
31111 it and storing into the hash table. */
31112 hash_loc_list (list);
31113 slot = htab->find_slot_with_hash (list, list->hash, INSERT);
31114 if (*slot == NULL)
31115 {
31116 *slot = list;
31117 if (loc_list_has_views (list))
31118 gcc_assert (list->vl_symbol);
31119 else if (list->vl_symbol)
31120 {
31121 drop_locviews = true;
31122 list->vl_symbol = NULL;
31123 }
31124 }
31125 else
31126 {
31127 if (list->vl_symbol && !(*slot)->vl_symbol)
31128 drop_locviews = true;
31129 a->dw_attr_val.v.val_loc_list = *slot;
31130 }
31131 }
31132 else if (AT_class (a) == dw_val_class_view_list)
31133 {
31134 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
31135 has_locviews = true;
31136 }
31137
31138
31139 if (drop_locviews && has_locviews)
31140 remove_AT (die, DW_AT_GNU_locviews);
31141
31142 FOR_EACH_CHILD (die, c, optimize_location_lists_1 (c, htab));
31143 }
31144
31145
31146 /* Recursively assign each location list a unique index into the debug_addr
31147 section. */
31148
31149 static void
31150 index_location_lists (dw_die_ref die)
31151 {
31152 dw_die_ref c;
31153 dw_attr_node *a;
31154 unsigned ix;
31155
31156 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31157 if (AT_class (a) == dw_val_class_loc_list)
31158 {
31159 dw_loc_list_ref list = AT_loc_list (a);
31160 dw_loc_list_ref curr;
31161 for (curr = list; curr != NULL; curr = curr->dw_loc_next)
31162 {
31163 /* Don't index an entry that has already been indexed
31164 or won't be output. Make sure skip_loc_list_entry doesn't
31165 call size_of_locs, because that might cause circular dependency,
31166 index_location_lists requiring address table indexes to be
31167 computed, but adding new indexes through add_addr_table_entry
31168 and address table index computation requiring no new additions
31169 to the hash table. In the rare case of DWARF[234] >= 64KB
31170 location expression, we'll just waste unused address table entry
31171 for it. */
31172 if (curr->begin_entry != NULL
31173 || skip_loc_list_entry (curr))
31174 continue;
31175
31176 curr->begin_entry
31177 = add_addr_table_entry (xstrdup (curr->begin), ate_kind_label);
31178 }
31179 }
31180
31181 FOR_EACH_CHILD (die, c, index_location_lists (c));
31182 }
31183
31184 /* Optimize location lists referenced from DIE
31185 children and share them whenever possible. */
31186
31187 static void
31188 optimize_location_lists (dw_die_ref die)
31189 {
31190 loc_list_hash_type htab (500);
31191 optimize_location_lists_1 (die, &htab);
31192 }
31193 \f
31194 /* Traverse the limbo die list, and add parent/child links. The only
31195 dies without parents that should be here are concrete instances of
31196 inline functions, and the comp_unit_die. We can ignore the comp_unit_die.
31197 For concrete instances, we can get the parent die from the abstract
31198 instance. */
31199
31200 static void
31201 flush_limbo_die_list (void)
31202 {
31203 limbo_die_node *node;
31204
31205 /* get_context_die calls force_decl_die, which can put new DIEs on the
31206 limbo list in LTO mode when nested functions are put in a different
31207 partition than that of their parent function. */
31208 while ((node = limbo_die_list))
31209 {
31210 dw_die_ref die = node->die;
31211 limbo_die_list = node->next;
31212
31213 if (die->die_parent == NULL)
31214 {
31215 dw_die_ref origin = get_AT_ref (die, DW_AT_abstract_origin);
31216
31217 if (origin && origin->die_parent)
31218 add_child_die (origin->die_parent, die);
31219 else if (is_cu_die (die))
31220 ;
31221 else if (seen_error ())
31222 /* It's OK to be confused by errors in the input. */
31223 add_child_die (comp_unit_die (), die);
31224 else
31225 {
31226 /* In certain situations, the lexical block containing a
31227 nested function can be optimized away, which results
31228 in the nested function die being orphaned. Likewise
31229 with the return type of that nested function. Force
31230 this to be a child of the containing function.
31231
31232 It may happen that even the containing function got fully
31233 inlined and optimized out. In that case we are lost and
31234 assign the empty child. This should not be big issue as
31235 the function is likely unreachable too. */
31236 gcc_assert (node->created_for);
31237
31238 if (DECL_P (node->created_for))
31239 origin = get_context_die (DECL_CONTEXT (node->created_for));
31240 else if (TYPE_P (node->created_for))
31241 origin = scope_die_for (node->created_for, comp_unit_die ());
31242 else
31243 origin = comp_unit_die ();
31244
31245 add_child_die (origin, die);
31246 }
31247 }
31248 }
31249 }
31250
31251 /* Reset DIEs so we can output them again. */
31252
31253 static void
31254 reset_dies (dw_die_ref die)
31255 {
31256 dw_die_ref c;
31257
31258 /* Remove stuff we re-generate. */
31259 die->die_mark = 0;
31260 die->die_offset = 0;
31261 die->die_abbrev = 0;
31262 remove_AT (die, DW_AT_sibling);
31263
31264 FOR_EACH_CHILD (die, c, reset_dies (c));
31265 }
31266
31267 /* Output stuff that dwarf requires at the end of every file,
31268 and generate the DWARF-2 debugging info. */
31269
31270 static void
31271 dwarf2out_finish (const char *filename)
31272 {
31273 comdat_type_node *ctnode;
31274 dw_die_ref main_comp_unit_die;
31275 unsigned char checksum[16];
31276 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
31277
31278 /* Flush out any latecomers to the limbo party. */
31279 flush_limbo_die_list ();
31280
31281 if (inline_entry_data_table)
31282 gcc_assert (inline_entry_data_table->is_empty ());
31283
31284 if (flag_checking)
31285 {
31286 verify_die (comp_unit_die ());
31287 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31288 verify_die (node->die);
31289 }
31290
31291 /* We shouldn't have any symbols with delayed asm names for
31292 DIEs generated after early finish. */
31293 gcc_assert (deferred_asm_name == NULL);
31294
31295 gen_remaining_tmpl_value_param_die_attribute ();
31296
31297 if (flag_generate_lto || flag_generate_offload)
31298 {
31299 gcc_assert (flag_fat_lto_objects || flag_generate_offload);
31300
31301 /* Prune stuff so that dwarf2out_finish runs successfully
31302 for the fat part of the object. */
31303 reset_dies (comp_unit_die ());
31304 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31305 reset_dies (node->die);
31306
31307 hash_table<comdat_type_hasher> comdat_type_table (100);
31308 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31309 {
31310 comdat_type_node **slot
31311 = comdat_type_table.find_slot (ctnode, INSERT);
31312
31313 /* Don't reset types twice. */
31314 if (*slot != HTAB_EMPTY_ENTRY)
31315 continue;
31316
31317 /* Remove the pointer to the line table. */
31318 remove_AT (ctnode->root_die, DW_AT_stmt_list);
31319
31320 if (debug_info_level >= DINFO_LEVEL_TERSE)
31321 reset_dies (ctnode->root_die);
31322
31323 *slot = ctnode;
31324 }
31325
31326 /* Reset die CU symbol so we don't output it twice. */
31327 comp_unit_die ()->die_id.die_symbol = NULL;
31328
31329 /* Remove DW_AT_macro and DW_AT_stmt_list from the early output. */
31330 remove_AT (comp_unit_die (), DW_AT_stmt_list);
31331 if (have_macinfo)
31332 remove_AT (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE);
31333
31334 /* Remove indirect string decisions. */
31335 debug_str_hash->traverse<void *, reset_indirect_string> (NULL);
31336 if (debug_line_str_hash)
31337 {
31338 debug_line_str_hash->traverse<void *, reset_indirect_string> (NULL);
31339 debug_line_str_hash = NULL;
31340 }
31341 }
31342
31343 #if ENABLE_ASSERT_CHECKING
31344 {
31345 dw_die_ref die = comp_unit_die (), c;
31346 FOR_EACH_CHILD (die, c, gcc_assert (! c->die_mark));
31347 }
31348 #endif
31349 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31350 resolve_addr (ctnode->root_die);
31351 resolve_addr (comp_unit_die ());
31352 move_marked_base_types ();
31353
31354 if (dump_file)
31355 {
31356 fprintf (dump_file, "DWARF for %s\n", filename);
31357 print_die (comp_unit_die (), dump_file);
31358 }
31359
31360 /* Initialize sections and labels used for actual assembler output. */
31361 unsigned generation = init_sections_and_labels (false);
31362
31363 /* Traverse the DIE's and add sibling attributes to those DIE's that
31364 have children. */
31365 add_sibling_attributes (comp_unit_die ());
31366 limbo_die_node *node;
31367 for (node = cu_die_list; node; node = node->next)
31368 add_sibling_attributes (node->die);
31369 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31370 add_sibling_attributes (ctnode->root_die);
31371
31372 /* When splitting DWARF info, we put some attributes in the
31373 skeleton compile_unit DIE that remains in the .o, while
31374 most attributes go in the DWO compile_unit_die. */
31375 if (dwarf_split_debug_info)
31376 {
31377 limbo_die_node *cu;
31378 main_comp_unit_die = gen_compile_unit_die (NULL);
31379 if (dwarf_version >= 5)
31380 main_comp_unit_die->die_tag = DW_TAG_skeleton_unit;
31381 cu = limbo_die_list;
31382 gcc_assert (cu->die == main_comp_unit_die);
31383 limbo_die_list = limbo_die_list->next;
31384 cu->next = cu_die_list;
31385 cu_die_list = cu;
31386 }
31387 else
31388 main_comp_unit_die = comp_unit_die ();
31389
31390 /* Output a terminator label for the .text section. */
31391 switch_to_section (text_section);
31392 targetm.asm_out.internal_label (asm_out_file, TEXT_END_LABEL, 0);
31393 if (cold_text_section)
31394 {
31395 switch_to_section (cold_text_section);
31396 targetm.asm_out.internal_label (asm_out_file, COLD_END_LABEL, 0);
31397 }
31398
31399 /* We can only use the low/high_pc attributes if all of the code was
31400 in .text. */
31401 if (!have_multiple_function_sections
31402 || (dwarf_version < 3 && dwarf_strict))
31403 {
31404 /* Don't add if the CU has no associated code. */
31405 if (text_section_used)
31406 add_AT_low_high_pc (main_comp_unit_die, text_section_label,
31407 text_end_label, true);
31408 }
31409 else
31410 {
31411 unsigned fde_idx;
31412 dw_fde_ref fde;
31413 bool range_list_added = false;
31414
31415 if (text_section_used)
31416 add_ranges_by_labels (main_comp_unit_die, text_section_label,
31417 text_end_label, &range_list_added, true);
31418 if (cold_text_section_used)
31419 add_ranges_by_labels (main_comp_unit_die, cold_text_section_label,
31420 cold_end_label, &range_list_added, true);
31421
31422 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
31423 {
31424 if (DECL_IGNORED_P (fde->decl))
31425 continue;
31426 if (!fde->in_std_section)
31427 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_begin,
31428 fde->dw_fde_end, &range_list_added,
31429 true);
31430 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
31431 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_second_begin,
31432 fde->dw_fde_second_end, &range_list_added,
31433 true);
31434 }
31435
31436 if (range_list_added)
31437 {
31438 /* We need to give .debug_loc and .debug_ranges an appropriate
31439 "base address". Use zero so that these addresses become
31440 absolute. Historically, we've emitted the unexpected
31441 DW_AT_entry_pc instead of DW_AT_low_pc for this purpose.
31442 Emit both to give time for other tools to adapt. */
31443 add_AT_addr (main_comp_unit_die, DW_AT_low_pc, const0_rtx, true);
31444 if (! dwarf_strict && dwarf_version < 4)
31445 add_AT_addr (main_comp_unit_die, DW_AT_entry_pc, const0_rtx, true);
31446
31447 add_ranges (NULL);
31448 }
31449 }
31450
31451 /* AIX Assembler inserts the length, so adjust the reference to match the
31452 offset expected by debuggers. */
31453 strcpy (dl_section_ref, debug_line_section_label);
31454 if (XCOFF_DEBUGGING_INFO)
31455 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
31456
31457 if (debug_info_level >= DINFO_LEVEL_TERSE)
31458 add_AT_lineptr (main_comp_unit_die, DW_AT_stmt_list,
31459 dl_section_ref);
31460
31461 if (have_macinfo)
31462 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
31463 macinfo_section_label);
31464
31465 if (dwarf_split_debug_info)
31466 {
31467 if (have_location_lists)
31468 {
31469 /* Since we generate the loclists in the split DWARF .dwo
31470 file itself, we don't need to generate a loclists_base
31471 attribute for the split compile unit DIE. That attribute
31472 (and using relocatable sec_offset FORMs) isn't allowed
31473 for a split compile unit. Only if the .debug_loclists
31474 section was in the main file, would we need to generate a
31475 loclists_base attribute here (for the full or skeleton
31476 unit DIE). */
31477
31478 /* optimize_location_lists calculates the size of the lists,
31479 so index them first, and assign indices to the entries.
31480 Although optimize_location_lists will remove entries from
31481 the table, it only does so for duplicates, and therefore
31482 only reduces ref_counts to 1. */
31483 index_location_lists (comp_unit_die ());
31484 }
31485
31486 if (addr_index_table != NULL)
31487 {
31488 unsigned int index = 0;
31489 addr_index_table
31490 ->traverse_noresize<unsigned int *, index_addr_table_entry>
31491 (&index);
31492 }
31493 }
31494
31495 loc_list_idx = 0;
31496 if (have_location_lists)
31497 {
31498 optimize_location_lists (comp_unit_die ());
31499 /* And finally assign indexes to the entries for -gsplit-dwarf. */
31500 if (dwarf_version >= 5 && dwarf_split_debug_info)
31501 assign_location_list_indexes (comp_unit_die ());
31502 }
31503
31504 save_macinfo_strings ();
31505
31506 if (dwarf_split_debug_info)
31507 {
31508 unsigned int index = 0;
31509
31510 /* Add attributes common to skeleton compile_units and
31511 type_units. Because these attributes include strings, it
31512 must be done before freezing the string table. Top-level
31513 skeleton die attrs are added when the skeleton type unit is
31514 created, so ensure it is created by this point. */
31515 add_top_level_skeleton_die_attrs (main_comp_unit_die);
31516 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
31517 }
31518
31519 /* Output all of the compilation units. We put the main one last so that
31520 the offsets are available to output_pubnames. */
31521 for (node = cu_die_list; node; node = node->next)
31522 output_comp_unit (node->die, 0, NULL);
31523
31524 hash_table<comdat_type_hasher> comdat_type_table (100);
31525 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31526 {
31527 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
31528
31529 /* Don't output duplicate types. */
31530 if (*slot != HTAB_EMPTY_ENTRY)
31531 continue;
31532
31533 /* Add a pointer to the line table for the main compilation unit
31534 so that the debugger can make sense of DW_AT_decl_file
31535 attributes. */
31536 if (debug_info_level >= DINFO_LEVEL_TERSE)
31537 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
31538 (!dwarf_split_debug_info
31539 ? dl_section_ref
31540 : debug_skeleton_line_section_label));
31541
31542 output_comdat_type_unit (ctnode, false);
31543 *slot = ctnode;
31544 }
31545
31546 if (dwarf_split_debug_info)
31547 {
31548 int mark;
31549 struct md5_ctx ctx;
31550
31551 if (dwarf_version >= 5 && !vec_safe_is_empty (ranges_table))
31552 index_rnglists ();
31553
31554 /* Compute a checksum of the comp_unit to use as the dwo_id. */
31555 md5_init_ctx (&ctx);
31556 mark = 0;
31557 die_checksum (comp_unit_die (), &ctx, &mark);
31558 unmark_all_dies (comp_unit_die ());
31559 md5_finish_ctx (&ctx, checksum);
31560
31561 if (dwarf_version < 5)
31562 {
31563 /* Use the first 8 bytes of the checksum as the dwo_id,
31564 and add it to both comp-unit DIEs. */
31565 add_AT_data8 (main_comp_unit_die, DW_AT_GNU_dwo_id, checksum);
31566 add_AT_data8 (comp_unit_die (), DW_AT_GNU_dwo_id, checksum);
31567 }
31568
31569 /* Add the base offset of the ranges table to the skeleton
31570 comp-unit DIE. */
31571 if (!vec_safe_is_empty (ranges_table))
31572 {
31573 if (dwarf_version >= 5)
31574 add_AT_lineptr (main_comp_unit_die, DW_AT_rnglists_base,
31575 ranges_base_label);
31576 else
31577 add_AT_lineptr (main_comp_unit_die, DW_AT_GNU_ranges_base,
31578 ranges_section_label);
31579 }
31580
31581 output_addr_table ();
31582 }
31583
31584 /* Output the main compilation unit if non-empty or if .debug_macinfo
31585 or .debug_macro will be emitted. */
31586 output_comp_unit (comp_unit_die (), have_macinfo,
31587 dwarf_split_debug_info ? checksum : NULL);
31588
31589 if (dwarf_split_debug_info && info_section_emitted)
31590 output_skeleton_debug_sections (main_comp_unit_die, checksum);
31591
31592 /* Output the abbreviation table. */
31593 if (vec_safe_length (abbrev_die_table) != 1)
31594 {
31595 switch_to_section (debug_abbrev_section);
31596 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
31597 output_abbrev_section ();
31598 }
31599
31600 /* Output location list section if necessary. */
31601 if (have_location_lists)
31602 {
31603 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
31604 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
31605 /* Output the location lists info. */
31606 switch_to_section (debug_loc_section);
31607 if (dwarf_version >= 5)
31608 {
31609 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_LOC_SECTION_LABEL, 2);
31610 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_LOC_SECTION_LABEL, 3);
31611 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
31612 dw2_asm_output_data (4, 0xffffffff,
31613 "Initial length escape value indicating "
31614 "64-bit DWARF extension");
31615 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
31616 "Length of Location Lists");
31617 ASM_OUTPUT_LABEL (asm_out_file, l1);
31618 output_dwarf_version ();
31619 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
31620 dw2_asm_output_data (1, 0, "Segment Size");
31621 dw2_asm_output_data (4, dwarf_split_debug_info ? loc_list_idx : 0,
31622 "Offset Entry Count");
31623 }
31624 ASM_OUTPUT_LABEL (asm_out_file, loc_section_label);
31625 if (dwarf_version >= 5 && dwarf_split_debug_info)
31626 {
31627 unsigned int save_loc_list_idx = loc_list_idx;
31628 loc_list_idx = 0;
31629 output_loclists_offsets (comp_unit_die ());
31630 gcc_assert (save_loc_list_idx == loc_list_idx);
31631 }
31632 output_location_lists (comp_unit_die ());
31633 if (dwarf_version >= 5)
31634 ASM_OUTPUT_LABEL (asm_out_file, l2);
31635 }
31636
31637 output_pubtables ();
31638
31639 /* Output the address range information if a CU (.debug_info section)
31640 was emitted. We output an empty table even if we had no functions
31641 to put in it. This because the consumer has no way to tell the
31642 difference between an empty table that we omitted and failure to
31643 generate a table that would have contained data. */
31644 if (info_section_emitted)
31645 {
31646 switch_to_section (debug_aranges_section);
31647 output_aranges ();
31648 }
31649
31650 /* Output ranges section if necessary. */
31651 if (!vec_safe_is_empty (ranges_table))
31652 {
31653 if (dwarf_version >= 5)
31654 output_rnglists (generation);
31655 else
31656 output_ranges ();
31657 }
31658
31659 /* Have to end the macro section. */
31660 if (have_macinfo)
31661 {
31662 switch_to_section (debug_macinfo_section);
31663 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
31664 output_macinfo (!dwarf_split_debug_info ? debug_line_section_label
31665 : debug_skeleton_line_section_label, false);
31666 dw2_asm_output_data (1, 0, "End compilation unit");
31667 }
31668
31669 /* Output the source line correspondence table. We must do this
31670 even if there is no line information. Otherwise, on an empty
31671 translation unit, we will generate a present, but empty,
31672 .debug_info section. IRIX 6.5 `nm' will then complain when
31673 examining the file. This is done late so that any filenames
31674 used by the debug_info section are marked as 'used'. */
31675 switch_to_section (debug_line_section);
31676 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
31677 if (! output_asm_line_debug_info ())
31678 output_line_info (false);
31679
31680 if (dwarf_split_debug_info && info_section_emitted)
31681 {
31682 switch_to_section (debug_skeleton_line_section);
31683 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_line_section_label);
31684 output_line_info (true);
31685 }
31686
31687 /* If we emitted any indirect strings, output the string table too. */
31688 if (debug_str_hash || skeleton_debug_str_hash)
31689 output_indirect_strings ();
31690 if (debug_line_str_hash)
31691 {
31692 switch_to_section (debug_line_str_section);
31693 const enum dwarf_form form = DW_FORM_line_strp;
31694 debug_line_str_hash->traverse<enum dwarf_form,
31695 output_indirect_string> (form);
31696 }
31697
31698 /* ??? Move lvugid out of dwarf2out_source_line and reset it too? */
31699 symview_upper_bound = 0;
31700 if (zero_view_p)
31701 bitmap_clear (zero_view_p);
31702 }
31703
31704 /* Returns a hash value for X (which really is a variable_value_struct). */
31705
31706 inline hashval_t
31707 variable_value_hasher::hash (variable_value_struct *x)
31708 {
31709 return (hashval_t) x->decl_id;
31710 }
31711
31712 /* Return nonzero if decl_id of variable_value_struct X is the same as
31713 UID of decl Y. */
31714
31715 inline bool
31716 variable_value_hasher::equal (variable_value_struct *x, tree y)
31717 {
31718 return x->decl_id == DECL_UID (y);
31719 }
31720
31721 /* Helper function for resolve_variable_value, handle
31722 DW_OP_GNU_variable_value in one location expression.
31723 Return true if exprloc has been changed into loclist. */
31724
31725 static bool
31726 resolve_variable_value_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
31727 {
31728 dw_loc_descr_ref next;
31729 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = next)
31730 {
31731 next = loc->dw_loc_next;
31732 if (loc->dw_loc_opc != DW_OP_GNU_variable_value
31733 || loc->dw_loc_oprnd1.val_class != dw_val_class_decl_ref)
31734 continue;
31735
31736 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31737 if (DECL_CONTEXT (decl) != current_function_decl)
31738 continue;
31739
31740 dw_die_ref ref = lookup_decl_die (decl);
31741 if (ref)
31742 {
31743 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31744 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31745 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31746 continue;
31747 }
31748 dw_loc_list_ref l = loc_list_from_tree (decl, 0, NULL);
31749 if (l == NULL)
31750 continue;
31751 if (l->dw_loc_next)
31752 {
31753 if (AT_class (a) != dw_val_class_loc)
31754 continue;
31755 switch (a->dw_attr)
31756 {
31757 /* Following attributes allow both exprloc and loclist
31758 classes, so we can change them into a loclist. */
31759 case DW_AT_location:
31760 case DW_AT_string_length:
31761 case DW_AT_return_addr:
31762 case DW_AT_data_member_location:
31763 case DW_AT_frame_base:
31764 case DW_AT_segment:
31765 case DW_AT_static_link:
31766 case DW_AT_use_location:
31767 case DW_AT_vtable_elem_location:
31768 if (prev)
31769 {
31770 prev->dw_loc_next = NULL;
31771 prepend_loc_descr_to_each (l, AT_loc (a));
31772 }
31773 if (next)
31774 add_loc_descr_to_each (l, next);
31775 a->dw_attr_val.val_class = dw_val_class_loc_list;
31776 a->dw_attr_val.val_entry = NULL;
31777 a->dw_attr_val.v.val_loc_list = l;
31778 have_location_lists = true;
31779 return true;
31780 /* Following attributes allow both exprloc and reference,
31781 so if the whole expression is DW_OP_GNU_variable_value alone
31782 we could transform it into reference. */
31783 case DW_AT_byte_size:
31784 case DW_AT_bit_size:
31785 case DW_AT_lower_bound:
31786 case DW_AT_upper_bound:
31787 case DW_AT_bit_stride:
31788 case DW_AT_count:
31789 case DW_AT_allocated:
31790 case DW_AT_associated:
31791 case DW_AT_byte_stride:
31792 if (prev == NULL && next == NULL)
31793 break;
31794 /* FALLTHRU */
31795 default:
31796 if (dwarf_strict)
31797 continue;
31798 break;
31799 }
31800 /* Create DW_TAG_variable that we can refer to. */
31801 gen_decl_die (decl, NULL_TREE, NULL,
31802 lookup_decl_die (current_function_decl));
31803 ref = lookup_decl_die (decl);
31804 if (ref)
31805 {
31806 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31807 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31808 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31809 }
31810 continue;
31811 }
31812 if (prev)
31813 {
31814 prev->dw_loc_next = l->expr;
31815 add_loc_descr (&prev->dw_loc_next, next);
31816 free_loc_descr (loc, NULL);
31817 next = prev->dw_loc_next;
31818 }
31819 else
31820 {
31821 memcpy (loc, l->expr, sizeof (dw_loc_descr_node));
31822 add_loc_descr (&loc, next);
31823 next = loc;
31824 }
31825 loc = prev;
31826 }
31827 return false;
31828 }
31829
31830 /* Attempt to resolve DW_OP_GNU_variable_value using loc_list_from_tree. */
31831
31832 static void
31833 resolve_variable_value (dw_die_ref die)
31834 {
31835 dw_attr_node *a;
31836 dw_loc_list_ref loc;
31837 unsigned ix;
31838
31839 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31840 switch (AT_class (a))
31841 {
31842 case dw_val_class_loc:
31843 if (!resolve_variable_value_in_expr (a, AT_loc (a)))
31844 break;
31845 /* FALLTHRU */
31846 case dw_val_class_loc_list:
31847 loc = AT_loc_list (a);
31848 gcc_assert (loc);
31849 for (; loc; loc = loc->dw_loc_next)
31850 resolve_variable_value_in_expr (a, loc->expr);
31851 break;
31852 default:
31853 break;
31854 }
31855 }
31856
31857 /* Attempt to optimize DW_OP_GNU_variable_value refering to
31858 temporaries in the current function. */
31859
31860 static void
31861 resolve_variable_values (void)
31862 {
31863 if (!variable_value_hash || !current_function_decl)
31864 return;
31865
31866 struct variable_value_struct *node
31867 = variable_value_hash->find_with_hash (current_function_decl,
31868 DECL_UID (current_function_decl));
31869
31870 if (node == NULL)
31871 return;
31872
31873 unsigned int i;
31874 dw_die_ref die;
31875 FOR_EACH_VEC_SAFE_ELT (node->dies, i, die)
31876 resolve_variable_value (die);
31877 }
31878
31879 /* Helper function for note_variable_value, handle one location
31880 expression. */
31881
31882 static void
31883 note_variable_value_in_expr (dw_die_ref die, dw_loc_descr_ref loc)
31884 {
31885 for (; loc; loc = loc->dw_loc_next)
31886 if (loc->dw_loc_opc == DW_OP_GNU_variable_value
31887 && loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
31888 {
31889 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31890 dw_die_ref ref = lookup_decl_die (decl);
31891 if (! ref && (flag_generate_lto || flag_generate_offload))
31892 {
31893 /* ??? This is somewhat a hack because we do not create DIEs
31894 for variables not in BLOCK trees early but when generating
31895 early LTO output we need the dw_val_class_decl_ref to be
31896 fully resolved. For fat LTO objects we'd also like to
31897 undo this after LTO dwarf output. */
31898 gcc_assert (DECL_CONTEXT (decl));
31899 dw_die_ref ctx = lookup_decl_die (DECL_CONTEXT (decl));
31900 gcc_assert (ctx != NULL);
31901 gen_decl_die (decl, NULL_TREE, NULL, ctx);
31902 ref = lookup_decl_die (decl);
31903 gcc_assert (ref != NULL);
31904 }
31905 if (ref)
31906 {
31907 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31908 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31909 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31910 continue;
31911 }
31912 if (VAR_P (decl)
31913 && DECL_CONTEXT (decl)
31914 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
31915 && lookup_decl_die (DECL_CONTEXT (decl)))
31916 {
31917 if (!variable_value_hash)
31918 variable_value_hash
31919 = hash_table<variable_value_hasher>::create_ggc (10);
31920
31921 tree fndecl = DECL_CONTEXT (decl);
31922 struct variable_value_struct *node;
31923 struct variable_value_struct **slot
31924 = variable_value_hash->find_slot_with_hash (fndecl,
31925 DECL_UID (fndecl),
31926 INSERT);
31927 if (*slot == NULL)
31928 {
31929 node = ggc_cleared_alloc<variable_value_struct> ();
31930 node->decl_id = DECL_UID (fndecl);
31931 *slot = node;
31932 }
31933 else
31934 node = *slot;
31935
31936 vec_safe_push (node->dies, die);
31937 }
31938 }
31939 }
31940
31941 /* Walk the tree DIE and note DIEs with DW_OP_GNU_variable_value still
31942 with dw_val_class_decl_ref operand. */
31943
31944 static void
31945 note_variable_value (dw_die_ref die)
31946 {
31947 dw_die_ref c;
31948 dw_attr_node *a;
31949 dw_loc_list_ref loc;
31950 unsigned ix;
31951
31952 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31953 switch (AT_class (a))
31954 {
31955 case dw_val_class_loc_list:
31956 loc = AT_loc_list (a);
31957 gcc_assert (loc);
31958 if (!loc->noted_variable_value)
31959 {
31960 loc->noted_variable_value = 1;
31961 for (; loc; loc = loc->dw_loc_next)
31962 note_variable_value_in_expr (die, loc->expr);
31963 }
31964 break;
31965 case dw_val_class_loc:
31966 note_variable_value_in_expr (die, AT_loc (a));
31967 break;
31968 default:
31969 break;
31970 }
31971
31972 /* Mark children. */
31973 FOR_EACH_CHILD (die, c, note_variable_value (c));
31974 }
31975
31976 /* Perform any cleanups needed after the early debug generation pass
31977 has run. */
31978
31979 static void
31980 dwarf2out_early_finish (const char *filename)
31981 {
31982 set_early_dwarf s;
31983 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
31984
31985 /* PCH might result in DW_AT_producer string being restored from the
31986 header compilation, so always fill it with empty string initially
31987 and overwrite only here. */
31988 dw_attr_node *producer = get_AT (comp_unit_die (), DW_AT_producer);
31989 producer_string = gen_producer_string ();
31990 producer->dw_attr_val.v.val_str->refcount--;
31991 producer->dw_attr_val.v.val_str = find_AT_string (producer_string);
31992
31993 /* Add the name for the main input file now. We delayed this from
31994 dwarf2out_init to avoid complications with PCH. */
31995 add_name_attribute (comp_unit_die (), remap_debug_filename (filename));
31996 add_comp_dir_attribute (comp_unit_die ());
31997
31998 /* When emitting DWARF5 .debug_line_str, move DW_AT_name and
31999 DW_AT_comp_dir into .debug_line_str section. */
32000 if (!output_asm_line_debug_info ()
32001 && dwarf_version >= 5
32002 && DWARF5_USE_DEBUG_LINE_STR)
32003 {
32004 for (int i = 0; i < 2; i++)
32005 {
32006 dw_attr_node *a = get_AT (comp_unit_die (),
32007 i ? DW_AT_comp_dir : DW_AT_name);
32008 if (a == NULL
32009 || AT_class (a) != dw_val_class_str
32010 || strlen (AT_string (a)) + 1 <= DWARF_OFFSET_SIZE)
32011 continue;
32012
32013 if (! debug_line_str_hash)
32014 debug_line_str_hash
32015 = hash_table<indirect_string_hasher>::create_ggc (10);
32016
32017 struct indirect_string_node *node
32018 = find_AT_string_in_table (AT_string (a), debug_line_str_hash);
32019 set_indirect_string (node);
32020 node->form = DW_FORM_line_strp;
32021 a->dw_attr_val.v.val_str->refcount--;
32022 a->dw_attr_val.v.val_str = node;
32023 }
32024 }
32025
32026 /* With LTO early dwarf was really finished at compile-time, so make
32027 sure to adjust the phase after annotating the LTRANS CU DIE. */
32028 if (in_lto_p)
32029 {
32030 /* Force DW_TAG_imported_unit to be created now, otherwise
32031 we might end up without it or ordered after DW_TAG_inlined_subroutine
32032 referencing DIEs from it. */
32033 if (! flag_wpa && flag_incremental_link != INCREMENTAL_LINK_LTO)
32034 {
32035 unsigned i;
32036 tree tu;
32037 if (external_die_map)
32038 FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, tu)
32039 if (sym_off_pair *desc = external_die_map->get (tu))
32040 {
32041 dw_die_ref import = new_die (DW_TAG_imported_unit,
32042 comp_unit_die (), NULL_TREE);
32043 add_AT_external_die_ref (import, DW_AT_import,
32044 desc->sym, desc->off);
32045 }
32046 }
32047
32048 early_dwarf_finished = true;
32049 if (dump_file)
32050 {
32051 fprintf (dump_file, "LTO EARLY DWARF for %s\n", filename);
32052 print_die (comp_unit_die (), dump_file);
32053 }
32054 return;
32055 }
32056
32057 /* Walk through the list of incomplete types again, trying once more to
32058 emit full debugging info for them. */
32059 retry_incomplete_types ();
32060
32061 /* The point here is to flush out the limbo list so that it is empty
32062 and we don't need to stream it for LTO. */
32063 flush_limbo_die_list ();
32064
32065 gen_scheduled_generic_parms_dies ();
32066 gen_remaining_tmpl_value_param_die_attribute ();
32067
32068 /* Add DW_AT_linkage_name for all deferred DIEs. */
32069 for (limbo_die_node *node = deferred_asm_name; node; node = node->next)
32070 {
32071 tree decl = node->created_for;
32072 if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl)
32073 /* A missing DECL_ASSEMBLER_NAME can be a constant DIE that
32074 ended up in deferred_asm_name before we knew it was
32075 constant and never written to disk. */
32076 && DECL_ASSEMBLER_NAME (decl))
32077 {
32078 add_linkage_attr (node->die, decl);
32079 move_linkage_attr (node->die);
32080 }
32081 }
32082 deferred_asm_name = NULL;
32083
32084 if (flag_eliminate_unused_debug_types)
32085 prune_unused_types ();
32086
32087 /* Generate separate COMDAT sections for type DIEs. */
32088 if (use_debug_types)
32089 {
32090 break_out_comdat_types (comp_unit_die ());
32091
32092 /* Each new type_unit DIE was added to the limbo die list when created.
32093 Since these have all been added to comdat_type_list, clear the
32094 limbo die list. */
32095 limbo_die_list = NULL;
32096
32097 /* For each new comdat type unit, copy declarations for incomplete
32098 types to make the new unit self-contained (i.e., no direct
32099 references to the main compile unit). */
32100 for (comdat_type_node *ctnode = comdat_type_list;
32101 ctnode != NULL; ctnode = ctnode->next)
32102 copy_decls_for_unworthy_types (ctnode->root_die);
32103 copy_decls_for_unworthy_types (comp_unit_die ());
32104
32105 /* In the process of copying declarations from one unit to another,
32106 we may have left some declarations behind that are no longer
32107 referenced. Prune them. */
32108 prune_unused_types ();
32109 }
32110
32111 /* Traverse the DIE's and note DIEs with DW_OP_GNU_variable_value still
32112 with dw_val_class_decl_ref operand. */
32113 note_variable_value (comp_unit_die ());
32114 for (limbo_die_node *node = cu_die_list; node; node = node->next)
32115 note_variable_value (node->die);
32116 for (comdat_type_node *ctnode = comdat_type_list; ctnode != NULL;
32117 ctnode = ctnode->next)
32118 note_variable_value (ctnode->root_die);
32119 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
32120 note_variable_value (node->die);
32121
32122 /* The AT_pubnames attribute needs to go in all skeleton dies, including
32123 both the main_cu and all skeleton TUs. Making this call unconditional
32124 would end up either adding a second copy of the AT_pubnames attribute, or
32125 requiring a special case in add_top_level_skeleton_die_attrs. */
32126 if (!dwarf_split_debug_info)
32127 add_AT_pubnames (comp_unit_die ());
32128
32129 /* The early debug phase is now finished. */
32130 early_dwarf_finished = true;
32131 if (dump_file)
32132 {
32133 fprintf (dump_file, "EARLY DWARF for %s\n", filename);
32134 print_die (comp_unit_die (), dump_file);
32135 }
32136
32137 /* Do not generate DWARF assembler now when not producing LTO bytecode. */
32138 if ((!flag_generate_lto && !flag_generate_offload)
32139 /* FIXME: Disable debug info generation for (PE-)COFF targets since the
32140 copy_lto_debug_sections operation of the simple object support in
32141 libiberty is not implemented for them yet. */
32142 || TARGET_PECOFF || TARGET_COFF)
32143 return;
32144
32145 /* Now as we are going to output for LTO initialize sections and labels
32146 to the LTO variants. We don't need a random-seed postfix as other
32147 LTO sections as linking the LTO debug sections into one in a partial
32148 link is fine. */
32149 init_sections_and_labels (true);
32150
32151 /* The output below is modeled after dwarf2out_finish with all
32152 location related output removed and some LTO specific changes.
32153 Some refactoring might make both smaller and easier to match up. */
32154
32155 /* Traverse the DIE's and add add sibling attributes to those DIE's
32156 that have children. */
32157 add_sibling_attributes (comp_unit_die ());
32158 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
32159 add_sibling_attributes (node->die);
32160 for (comdat_type_node *ctnode = comdat_type_list;
32161 ctnode != NULL; ctnode = ctnode->next)
32162 add_sibling_attributes (ctnode->root_die);
32163
32164 /* AIX Assembler inserts the length, so adjust the reference to match the
32165 offset expected by debuggers. */
32166 strcpy (dl_section_ref, debug_line_section_label);
32167 if (XCOFF_DEBUGGING_INFO)
32168 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
32169
32170 if (debug_info_level >= DINFO_LEVEL_TERSE)
32171 add_AT_lineptr (comp_unit_die (), DW_AT_stmt_list, dl_section_ref);
32172
32173 if (have_macinfo)
32174 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
32175 macinfo_section_label);
32176
32177 save_macinfo_strings ();
32178
32179 if (dwarf_split_debug_info)
32180 {
32181 unsigned int index = 0;
32182 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
32183 }
32184
32185 /* Output all of the compilation units. We put the main one last so that
32186 the offsets are available to output_pubnames. */
32187 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
32188 output_comp_unit (node->die, 0, NULL);
32189
32190 hash_table<comdat_type_hasher> comdat_type_table (100);
32191 for (comdat_type_node *ctnode = comdat_type_list;
32192 ctnode != NULL; ctnode = ctnode->next)
32193 {
32194 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
32195
32196 /* Don't output duplicate types. */
32197 if (*slot != HTAB_EMPTY_ENTRY)
32198 continue;
32199
32200 /* Add a pointer to the line table for the main compilation unit
32201 so that the debugger can make sense of DW_AT_decl_file
32202 attributes. */
32203 if (debug_info_level >= DINFO_LEVEL_TERSE)
32204 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
32205 (!dwarf_split_debug_info
32206 ? debug_line_section_label
32207 : debug_skeleton_line_section_label));
32208
32209 output_comdat_type_unit (ctnode, true);
32210 *slot = ctnode;
32211 }
32212
32213 /* Stick a unique symbol to the main debuginfo section. */
32214 compute_comp_unit_symbol (comp_unit_die ());
32215
32216 /* Output the main compilation unit. We always need it if only for
32217 the CU symbol. */
32218 output_comp_unit (comp_unit_die (), true, NULL);
32219
32220 /* Output the abbreviation table. */
32221 if (vec_safe_length (abbrev_die_table) != 1)
32222 {
32223 switch_to_section (debug_abbrev_section);
32224 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
32225 output_abbrev_section ();
32226 }
32227
32228 /* Have to end the macro section. */
32229 if (have_macinfo)
32230 {
32231 /* We have to save macinfo state if we need to output it again
32232 for the FAT part of the object. */
32233 vec<macinfo_entry, va_gc> *saved_macinfo_table = macinfo_table;
32234 if (flag_fat_lto_objects)
32235 macinfo_table = macinfo_table->copy ();
32236
32237 switch_to_section (debug_macinfo_section);
32238 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
32239 output_macinfo (debug_line_section_label, true);
32240 dw2_asm_output_data (1, 0, "End compilation unit");
32241
32242 if (flag_fat_lto_objects)
32243 {
32244 vec_free (macinfo_table);
32245 macinfo_table = saved_macinfo_table;
32246 }
32247 }
32248
32249 /* Emit a skeleton debug_line section. */
32250 switch_to_section (debug_line_section);
32251 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
32252 output_line_info (true);
32253
32254 /* If we emitted any indirect strings, output the string table too. */
32255 if (debug_str_hash || skeleton_debug_str_hash)
32256 output_indirect_strings ();
32257 if (debug_line_str_hash)
32258 {
32259 switch_to_section (debug_line_str_section);
32260 const enum dwarf_form form = DW_FORM_line_strp;
32261 debug_line_str_hash->traverse<enum dwarf_form,
32262 output_indirect_string> (form);
32263 }
32264
32265 /* Switch back to the text section. */
32266 switch_to_section (text_section);
32267 }
32268
32269 /* Reset all state within dwarf2out.c so that we can rerun the compiler
32270 within the same process. For use by toplev::finalize. */
32271
32272 void
32273 dwarf2out_c_finalize (void)
32274 {
32275 last_var_location_insn = NULL;
32276 cached_next_real_insn = NULL;
32277 used_rtx_array = NULL;
32278 incomplete_types = NULL;
32279 debug_info_section = NULL;
32280 debug_skeleton_info_section = NULL;
32281 debug_abbrev_section = NULL;
32282 debug_skeleton_abbrev_section = NULL;
32283 debug_aranges_section = NULL;
32284 debug_addr_section = NULL;
32285 debug_macinfo_section = NULL;
32286 debug_line_section = NULL;
32287 debug_skeleton_line_section = NULL;
32288 debug_loc_section = NULL;
32289 debug_pubnames_section = NULL;
32290 debug_pubtypes_section = NULL;
32291 debug_str_section = NULL;
32292 debug_line_str_section = NULL;
32293 debug_str_dwo_section = NULL;
32294 debug_str_offsets_section = NULL;
32295 debug_ranges_section = NULL;
32296 debug_frame_section = NULL;
32297 fde_vec = NULL;
32298 debug_str_hash = NULL;
32299 debug_line_str_hash = NULL;
32300 skeleton_debug_str_hash = NULL;
32301 dw2_string_counter = 0;
32302 have_multiple_function_sections = false;
32303 text_section_used = false;
32304 cold_text_section_used = false;
32305 cold_text_section = NULL;
32306 current_unit_personality = NULL;
32307
32308 early_dwarf = false;
32309 early_dwarf_finished = false;
32310
32311 next_die_offset = 0;
32312 single_comp_unit_die = NULL;
32313 comdat_type_list = NULL;
32314 limbo_die_list = NULL;
32315 file_table = NULL;
32316 decl_die_table = NULL;
32317 common_block_die_table = NULL;
32318 decl_loc_table = NULL;
32319 call_arg_locations = NULL;
32320 call_arg_loc_last = NULL;
32321 call_site_count = -1;
32322 tail_call_site_count = -1;
32323 cached_dw_loc_list_table = NULL;
32324 abbrev_die_table = NULL;
32325 delete dwarf_proc_stack_usage_map;
32326 dwarf_proc_stack_usage_map = NULL;
32327 line_info_label_num = 0;
32328 cur_line_info_table = NULL;
32329 text_section_line_info = NULL;
32330 cold_text_section_line_info = NULL;
32331 separate_line_info = NULL;
32332 info_section_emitted = false;
32333 pubname_table = NULL;
32334 pubtype_table = NULL;
32335 macinfo_table = NULL;
32336 ranges_table = NULL;
32337 ranges_by_label = NULL;
32338 rnglist_idx = 0;
32339 have_location_lists = false;
32340 loclabel_num = 0;
32341 poc_label_num = 0;
32342 last_emitted_file = NULL;
32343 label_num = 0;
32344 tmpl_value_parm_die_table = NULL;
32345 generic_type_instances = NULL;
32346 frame_pointer_fb_offset = 0;
32347 frame_pointer_fb_offset_valid = false;
32348 base_types.release ();
32349 XDELETEVEC (producer_string);
32350 producer_string = NULL;
32351 }
32352
32353 #include "gt-dwarf2out.h"
This page took 1.460436 seconds and 6 git commands to generate.