]> gcc.gnu.org Git - gcc.git/blob - gcc/dwarf2out.c
Two more POLY_INT cases for dwarf2out.c
[gcc.git] / gcc / dwarf2out.c
1 /* Output Dwarf2 format symbol table information from GCC.
2 Copyright (C) 1992-2019 Free Software Foundation, Inc.
3 Contributed by Gary Funck (gary@intrepid.com).
4 Derived from DWARF 1 implementation of Ron Guilmette (rfg@monkeys.com).
5 Extensively modified by Jason Merrill (jason@cygnus.com).
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 /* TODO: Emit .debug_line header even when there are no functions, since
24 the file numbers are used by .debug_info. Alternately, leave
25 out locations for types and decls.
26 Avoid talking about ctors and op= for PODs.
27 Factor out common prologue sequences into multiple CIEs. */
28
29 /* The first part of this file deals with the DWARF 2 frame unwind
30 information, which is also used by the GCC efficient exception handling
31 mechanism. The second part, controlled only by an #ifdef
32 DWARF2_DEBUGGING_INFO, deals with the other DWARF 2 debugging
33 information. */
34
35 /* DWARF2 Abbreviation Glossary:
36
37 CFA = Canonical Frame Address
38 a fixed address on the stack which identifies a call frame.
39 We define it to be the value of SP just before the call insn.
40 The CFA register and offset, which may change during the course
41 of the function, are used to calculate its value at runtime.
42
43 CFI = Call Frame Instruction
44 an instruction for the DWARF2 abstract machine
45
46 CIE = Common Information Entry
47 information describing information common to one or more FDEs
48
49 DIE = Debugging Information Entry
50
51 FDE = Frame Description Entry
52 information describing the stack call frame, in particular,
53 how to restore registers
54
55 DW_CFA_... = DWARF2 CFA call frame instruction
56 DW_TAG_... = DWARF2 DIE tag */
57
58 #include "config.h"
59 #include "system.h"
60 #include "coretypes.h"
61 #include "target.h"
62 #include "function.h"
63 #include "rtl.h"
64 #include "tree.h"
65 #include "memmodel.h"
66 #include "tm_p.h"
67 #include "stringpool.h"
68 #include "insn-config.h"
69 #include "ira.h"
70 #include "cgraph.h"
71 #include "diagnostic.h"
72 #include "fold-const.h"
73 #include "stor-layout.h"
74 #include "varasm.h"
75 #include "version.h"
76 #include "flags.h"
77 #include "rtlhash.h"
78 #include "reload.h"
79 #include "output.h"
80 #include "expr.h"
81 #include "dwarf2out.h"
82 #include "dwarf2asm.h"
83 #include "toplev.h"
84 #include "md5.h"
85 #include "tree-pretty-print.h"
86 #include "print-rtl.h"
87 #include "debug.h"
88 #include "common/common-target.h"
89 #include "langhooks.h"
90 #include "lra.h"
91 #include "dumpfile.h"
92 #include "opts.h"
93 #include "tree-dfa.h"
94 #include "gdb/gdb-index.h"
95 #include "rtl-iter.h"
96 #include "stringpool.h"
97 #include "attribs.h"
98 #include "file-prefix-map.h" /* remap_debug_filename() */
99
100 static void dwarf2out_source_line (unsigned int, unsigned int, const char *,
101 int, bool);
102 static rtx_insn *last_var_location_insn;
103 static rtx_insn *cached_next_real_insn;
104 static void dwarf2out_decl (tree);
105 static bool is_redundant_typedef (const_tree);
106
107 #ifndef XCOFF_DEBUGGING_INFO
108 #define XCOFF_DEBUGGING_INFO 0
109 #endif
110
111 #ifndef HAVE_XCOFF_DWARF_EXTRAS
112 #define HAVE_XCOFF_DWARF_EXTRAS 0
113 #endif
114
115 #ifdef VMS_DEBUGGING_INFO
116 int vms_file_stats_name (const char *, long long *, long *, char *, int *);
117
118 /* Define this macro to be a nonzero value if the directory specifications
119 which are output in the debug info should end with a separator. */
120 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 1
121 /* Define this macro to evaluate to a nonzero value if GCC should refrain
122 from generating indirect strings in DWARF2 debug information, for instance
123 if your target is stuck with an old version of GDB that is unable to
124 process them properly or uses VMS Debug. */
125 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 1
126 #else
127 #define DWARF2_DIR_SHOULD_END_WITH_SEPARATOR 0
128 #define DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET 0
129 #endif
130
131 /* ??? Poison these here until it can be done generically. They've been
132 totally replaced in this file; make sure it stays that way. */
133 #undef DWARF2_UNWIND_INFO
134 #undef DWARF2_FRAME_INFO
135 #if (GCC_VERSION >= 3000)
136 #pragma GCC poison DWARF2_UNWIND_INFO DWARF2_FRAME_INFO
137 #endif
138
139 /* The size of the target's pointer type. */
140 #ifndef PTR_SIZE
141 #define PTR_SIZE (POINTER_SIZE / BITS_PER_UNIT)
142 #endif
143
144 /* Array of RTXes referenced by the debugging information, which therefore
145 must be kept around forever. */
146 static GTY(()) vec<rtx, va_gc> *used_rtx_array;
147
148 /* A pointer to the base of a list of incomplete types which might be
149 completed at some later time. incomplete_types_list needs to be a
150 vec<tree, va_gc> *because we want to tell the garbage collector about
151 it. */
152 static GTY(()) vec<tree, va_gc> *incomplete_types;
153
154 /* Pointers to various DWARF2 sections. */
155 static GTY(()) section *debug_info_section;
156 static GTY(()) section *debug_skeleton_info_section;
157 static GTY(()) section *debug_abbrev_section;
158 static GTY(()) section *debug_skeleton_abbrev_section;
159 static GTY(()) section *debug_aranges_section;
160 static GTY(()) section *debug_addr_section;
161 static GTY(()) section *debug_macinfo_section;
162 static const char *debug_macinfo_section_name;
163 static unsigned macinfo_label_base = 1;
164 static GTY(()) section *debug_line_section;
165 static GTY(()) section *debug_skeleton_line_section;
166 static GTY(()) section *debug_loc_section;
167 static GTY(()) section *debug_pubnames_section;
168 static GTY(()) section *debug_pubtypes_section;
169 static GTY(()) section *debug_str_section;
170 static GTY(()) section *debug_line_str_section;
171 static GTY(()) section *debug_str_dwo_section;
172 static GTY(()) section *debug_str_offsets_section;
173 static GTY(()) section *debug_ranges_section;
174 static GTY(()) section *debug_frame_section;
175
176 /* Maximum size (in bytes) of an artificially generated label. */
177 #define MAX_ARTIFICIAL_LABEL_BYTES 40
178
179 /* According to the (draft) DWARF 3 specification, the initial length
180 should either be 4 or 12 bytes. When it's 12 bytes, the first 4
181 bytes are 0xffffffff, followed by the length stored in the next 8
182 bytes.
183
184 However, the SGI/MIPS ABI uses an initial length which is equal to
185 DWARF_OFFSET_SIZE. It is defined (elsewhere) accordingly. */
186
187 #ifndef DWARF_INITIAL_LENGTH_SIZE
188 #define DWARF_INITIAL_LENGTH_SIZE (DWARF_OFFSET_SIZE == 4 ? 4 : 12)
189 #endif
190
191 #ifndef DWARF_INITIAL_LENGTH_SIZE_STR
192 #define DWARF_INITIAL_LENGTH_SIZE_STR (DWARF_OFFSET_SIZE == 4 ? "-4" : "-12")
193 #endif
194
195 /* Round SIZE up to the nearest BOUNDARY. */
196 #define DWARF_ROUND(SIZE,BOUNDARY) \
197 ((((SIZE) + (BOUNDARY) - 1) / (BOUNDARY)) * (BOUNDARY))
198
199 /* CIE identifier. */
200 #if HOST_BITS_PER_WIDE_INT >= 64
201 #define DWARF_CIE_ID \
202 (unsigned HOST_WIDE_INT) (DWARF_OFFSET_SIZE == 4 ? DW_CIE_ID : DW64_CIE_ID)
203 #else
204 #define DWARF_CIE_ID DW_CIE_ID
205 #endif
206
207
208 /* A vector for a table that contains frame description
209 information for each routine. */
210 #define NOT_INDEXED (-1U)
211 #define NO_INDEX_ASSIGNED (-2U)
212
213 static GTY(()) vec<dw_fde_ref, va_gc> *fde_vec;
214
215 struct GTY((for_user)) indirect_string_node {
216 const char *str;
217 unsigned int refcount;
218 enum dwarf_form form;
219 char *label;
220 unsigned int index;
221 };
222
223 struct indirect_string_hasher : ggc_ptr_hash<indirect_string_node>
224 {
225 typedef const char *compare_type;
226
227 static hashval_t hash (indirect_string_node *);
228 static bool equal (indirect_string_node *, const char *);
229 };
230
231 static GTY (()) hash_table<indirect_string_hasher> *debug_str_hash;
232
233 static GTY (()) hash_table<indirect_string_hasher> *debug_line_str_hash;
234
235 /* With split_debug_info, both the comp_dir and dwo_name go in the
236 main object file, rather than the dwo, similar to the force_direct
237 parameter elsewhere but with additional complications:
238
239 1) The string is needed in both the main object file and the dwo.
240 That is, the comp_dir and dwo_name will appear in both places.
241
242 2) Strings can use four forms: DW_FORM_string, DW_FORM_strp,
243 DW_FORM_line_strp or DW_FORM_strx/GNU_str_index.
244
245 3) GCC chooses the form to use late, depending on the size and
246 reference count.
247
248 Rather than forcing the all debug string handling functions and
249 callers to deal with these complications, simply use a separate,
250 special-cased string table for any attribute that should go in the
251 main object file. This limits the complexity to just the places
252 that need it. */
253
254 static GTY (()) hash_table<indirect_string_hasher> *skeleton_debug_str_hash;
255
256 static GTY(()) int dw2_string_counter;
257
258 /* True if the compilation unit places functions in more than one section. */
259 static GTY(()) bool have_multiple_function_sections = false;
260
261 /* Whether the default text and cold text sections have been used at all. */
262 static GTY(()) bool text_section_used = false;
263 static GTY(()) bool cold_text_section_used = false;
264
265 /* The default cold text section. */
266 static GTY(()) section *cold_text_section;
267
268 /* The DIE for C++14 'auto' in a function return type. */
269 static GTY(()) dw_die_ref auto_die;
270
271 /* The DIE for C++14 'decltype(auto)' in a function return type. */
272 static GTY(()) dw_die_ref decltype_auto_die;
273
274 /* Forward declarations for functions defined in this file. */
275
276 static void output_call_frame_info (int);
277 static void dwarf2out_note_section_used (void);
278
279 /* Personality decl of current unit. Used only when assembler does not support
280 personality CFI. */
281 static GTY(()) rtx current_unit_personality;
282
283 /* Whether an eh_frame section is required. */
284 static GTY(()) bool do_eh_frame = false;
285
286 /* .debug_rnglists next index. */
287 static unsigned int rnglist_idx;
288
289 /* Data and reference forms for relocatable data. */
290 #define DW_FORM_data (DWARF_OFFSET_SIZE == 8 ? DW_FORM_data8 : DW_FORM_data4)
291 #define DW_FORM_ref (DWARF_OFFSET_SIZE == 8 ? DW_FORM_ref8 : DW_FORM_ref4)
292
293 #ifndef DEBUG_FRAME_SECTION
294 #define DEBUG_FRAME_SECTION ".debug_frame"
295 #endif
296
297 #ifndef FUNC_BEGIN_LABEL
298 #define FUNC_BEGIN_LABEL "LFB"
299 #endif
300
301 #ifndef FUNC_SECOND_SECT_LABEL
302 #define FUNC_SECOND_SECT_LABEL "LFSB"
303 #endif
304
305 #ifndef FUNC_END_LABEL
306 #define FUNC_END_LABEL "LFE"
307 #endif
308
309 #ifndef PROLOGUE_END_LABEL
310 #define PROLOGUE_END_LABEL "LPE"
311 #endif
312
313 #ifndef EPILOGUE_BEGIN_LABEL
314 #define EPILOGUE_BEGIN_LABEL "LEB"
315 #endif
316
317 #ifndef FRAME_BEGIN_LABEL
318 #define FRAME_BEGIN_LABEL "Lframe"
319 #endif
320 #define CIE_AFTER_SIZE_LABEL "LSCIE"
321 #define CIE_END_LABEL "LECIE"
322 #define FDE_LABEL "LSFDE"
323 #define FDE_AFTER_SIZE_LABEL "LASFDE"
324 #define FDE_END_LABEL "LEFDE"
325 #define LINE_NUMBER_BEGIN_LABEL "LSLT"
326 #define LINE_NUMBER_END_LABEL "LELT"
327 #define LN_PROLOG_AS_LABEL "LASLTP"
328 #define LN_PROLOG_END_LABEL "LELTP"
329 #define DIE_LABEL_PREFIX "DW"
330 \f
331 /* Match the base name of a file to the base name of a compilation unit. */
332
333 static int
334 matches_main_base (const char *path)
335 {
336 /* Cache the last query. */
337 static const char *last_path = NULL;
338 static int last_match = 0;
339 if (path != last_path)
340 {
341 const char *base;
342 int length = base_of_path (path, &base);
343 last_path = path;
344 last_match = (length == main_input_baselength
345 && memcmp (base, main_input_basename, length) == 0);
346 }
347 return last_match;
348 }
349
350 #ifdef DEBUG_DEBUG_STRUCT
351
352 static int
353 dump_struct_debug (tree type, enum debug_info_usage usage,
354 enum debug_struct_file criterion, int generic,
355 int matches, int result)
356 {
357 /* Find the type name. */
358 tree type_decl = TYPE_STUB_DECL (type);
359 tree t = type_decl;
360 const char *name = 0;
361 if (TREE_CODE (t) == TYPE_DECL)
362 t = DECL_NAME (t);
363 if (t)
364 name = IDENTIFIER_POINTER (t);
365
366 fprintf (stderr, " struct %d %s %s %s %s %d %p %s\n",
367 criterion,
368 DECL_IN_SYSTEM_HEADER (type_decl) ? "sys" : "usr",
369 matches ? "bas" : "hdr",
370 generic ? "gen" : "ord",
371 usage == DINFO_USAGE_DFN ? ";" :
372 usage == DINFO_USAGE_DIR_USE ? "." : "*",
373 result,
374 (void*) type_decl, name);
375 return result;
376 }
377 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
378 dump_struct_debug (type, usage, criterion, generic, matches, result)
379
380 #else
381
382 #define DUMP_GSTRUCT(type, usage, criterion, generic, matches, result) \
383 (result)
384
385 #endif
386
387 /* Get the number of HOST_WIDE_INTs needed to represent the precision
388 of the number. Some constants have a large uniform precision, so
389 we get the precision needed for the actual value of the number. */
390
391 static unsigned int
392 get_full_len (const wide_int &op)
393 {
394 int prec = wi::min_precision (op, UNSIGNED);
395 return ((prec + HOST_BITS_PER_WIDE_INT - 1)
396 / HOST_BITS_PER_WIDE_INT);
397 }
398
399 static bool
400 should_emit_struct_debug (tree type, enum debug_info_usage usage)
401 {
402 enum debug_struct_file criterion;
403 tree type_decl;
404 bool generic = lang_hooks.types.generic_p (type);
405
406 if (generic)
407 criterion = debug_struct_generic[usage];
408 else
409 criterion = debug_struct_ordinary[usage];
410
411 if (criterion == DINFO_STRUCT_FILE_NONE)
412 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
413 if (criterion == DINFO_STRUCT_FILE_ANY)
414 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
415
416 type_decl = TYPE_STUB_DECL (TYPE_MAIN_VARIANT (type));
417
418 if (type_decl != NULL)
419 {
420 if (criterion == DINFO_STRUCT_FILE_SYS && DECL_IN_SYSTEM_HEADER (type_decl))
421 return DUMP_GSTRUCT (type, usage, criterion, generic, false, true);
422
423 if (matches_main_base (DECL_SOURCE_FILE (type_decl)))
424 return DUMP_GSTRUCT (type, usage, criterion, generic, true, true);
425 }
426
427 return DUMP_GSTRUCT (type, usage, criterion, generic, false, false);
428 }
429 \f
430 /* Switch [BACK] to eh_frame_section. If we don't have an eh_frame_section,
431 switch to the data section instead, and write out a synthetic start label
432 for collect2 the first time around. */
433
434 static void
435 switch_to_eh_frame_section (bool back ATTRIBUTE_UNUSED)
436 {
437 if (eh_frame_section == 0)
438 {
439 int flags;
440
441 if (EH_TABLES_CAN_BE_READ_ONLY)
442 {
443 int fde_encoding;
444 int per_encoding;
445 int lsda_encoding;
446
447 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1,
448 /*global=*/0);
449 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2,
450 /*global=*/1);
451 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0,
452 /*global=*/0);
453 flags = ((! flag_pic
454 || ((fde_encoding & 0x70) != DW_EH_PE_absptr
455 && (fde_encoding & 0x70) != DW_EH_PE_aligned
456 && (per_encoding & 0x70) != DW_EH_PE_absptr
457 && (per_encoding & 0x70) != DW_EH_PE_aligned
458 && (lsda_encoding & 0x70) != DW_EH_PE_absptr
459 && (lsda_encoding & 0x70) != DW_EH_PE_aligned))
460 ? 0 : SECTION_WRITE);
461 }
462 else
463 flags = SECTION_WRITE;
464
465 #ifdef EH_FRAME_SECTION_NAME
466 eh_frame_section = get_section (EH_FRAME_SECTION_NAME, flags, NULL);
467 #else
468 eh_frame_section = ((flags == SECTION_WRITE)
469 ? data_section : readonly_data_section);
470 #endif /* EH_FRAME_SECTION_NAME */
471 }
472
473 switch_to_section (eh_frame_section);
474
475 #ifdef EH_FRAME_THROUGH_COLLECT2
476 /* We have no special eh_frame section. Emit special labels to guide
477 collect2. */
478 if (!back)
479 {
480 tree label = get_file_function_name ("F");
481 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
482 targetm.asm_out.globalize_label (asm_out_file,
483 IDENTIFIER_POINTER (label));
484 ASM_OUTPUT_LABEL (asm_out_file, IDENTIFIER_POINTER (label));
485 }
486 #endif
487 }
488
489 /* Switch [BACK] to the eh or debug frame table section, depending on
490 FOR_EH. */
491
492 static void
493 switch_to_frame_table_section (int for_eh, bool back)
494 {
495 if (for_eh)
496 switch_to_eh_frame_section (back);
497 else
498 {
499 if (!debug_frame_section)
500 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
501 SECTION_DEBUG, NULL);
502 switch_to_section (debug_frame_section);
503 }
504 }
505
506 /* Describe for the GTY machinery what parts of dw_cfi_oprnd1 are used. */
507
508 enum dw_cfi_oprnd_type
509 dw_cfi_oprnd1_desc (enum dwarf_call_frame_info cfi)
510 {
511 switch (cfi)
512 {
513 case DW_CFA_nop:
514 case DW_CFA_GNU_window_save:
515 case DW_CFA_remember_state:
516 case DW_CFA_restore_state:
517 return dw_cfi_oprnd_unused;
518
519 case DW_CFA_set_loc:
520 case DW_CFA_advance_loc1:
521 case DW_CFA_advance_loc2:
522 case DW_CFA_advance_loc4:
523 case DW_CFA_MIPS_advance_loc8:
524 return dw_cfi_oprnd_addr;
525
526 case DW_CFA_offset:
527 case DW_CFA_offset_extended:
528 case DW_CFA_def_cfa:
529 case DW_CFA_offset_extended_sf:
530 case DW_CFA_def_cfa_sf:
531 case DW_CFA_restore:
532 case DW_CFA_restore_extended:
533 case DW_CFA_undefined:
534 case DW_CFA_same_value:
535 case DW_CFA_def_cfa_register:
536 case DW_CFA_register:
537 case DW_CFA_expression:
538 case DW_CFA_val_expression:
539 return dw_cfi_oprnd_reg_num;
540
541 case DW_CFA_def_cfa_offset:
542 case DW_CFA_GNU_args_size:
543 case DW_CFA_def_cfa_offset_sf:
544 return dw_cfi_oprnd_offset;
545
546 case DW_CFA_def_cfa_expression:
547 return dw_cfi_oprnd_loc;
548
549 default:
550 gcc_unreachable ();
551 }
552 }
553
554 /* Describe for the GTY machinery what parts of dw_cfi_oprnd2 are used. */
555
556 enum dw_cfi_oprnd_type
557 dw_cfi_oprnd2_desc (enum dwarf_call_frame_info cfi)
558 {
559 switch (cfi)
560 {
561 case DW_CFA_def_cfa:
562 case DW_CFA_def_cfa_sf:
563 case DW_CFA_offset:
564 case DW_CFA_offset_extended_sf:
565 case DW_CFA_offset_extended:
566 return dw_cfi_oprnd_offset;
567
568 case DW_CFA_register:
569 return dw_cfi_oprnd_reg_num;
570
571 case DW_CFA_expression:
572 case DW_CFA_val_expression:
573 return dw_cfi_oprnd_loc;
574
575 case DW_CFA_def_cfa_expression:
576 return dw_cfi_oprnd_cfa_loc;
577
578 default:
579 return dw_cfi_oprnd_unused;
580 }
581 }
582
583 /* Output one FDE. */
584
585 static void
586 output_fde (dw_fde_ref fde, bool for_eh, bool second,
587 char *section_start_label, int fde_encoding, char *augmentation,
588 bool any_lsda_needed, int lsda_encoding)
589 {
590 const char *begin, *end;
591 static unsigned int j;
592 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
593
594 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, for_eh,
595 /* empty */ 0);
596 targetm.asm_out.internal_label (asm_out_file, FDE_LABEL,
597 for_eh + j);
598 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_AFTER_SIZE_LABEL, for_eh + j);
599 ASM_GENERATE_INTERNAL_LABEL (l2, FDE_END_LABEL, for_eh + j);
600 if (!XCOFF_DEBUGGING_INFO || for_eh)
601 {
602 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
603 dw2_asm_output_data (4, 0xffffffff, "Initial length escape value"
604 " indicating 64-bit DWARF extension");
605 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
606 "FDE Length");
607 }
608 ASM_OUTPUT_LABEL (asm_out_file, l1);
609
610 if (for_eh)
611 dw2_asm_output_delta (4, l1, section_start_label, "FDE CIE offset");
612 else
613 dw2_asm_output_offset (DWARF_OFFSET_SIZE, section_start_label,
614 debug_frame_section, "FDE CIE offset");
615
616 begin = second ? fde->dw_fde_second_begin : fde->dw_fde_begin;
617 end = second ? fde->dw_fde_second_end : fde->dw_fde_end;
618
619 if (for_eh)
620 {
621 rtx sym_ref = gen_rtx_SYMBOL_REF (Pmode, begin);
622 SYMBOL_REF_FLAGS (sym_ref) |= SYMBOL_FLAG_LOCAL;
623 dw2_asm_output_encoded_addr_rtx (fde_encoding, sym_ref, false,
624 "FDE initial location");
625 dw2_asm_output_delta (size_of_encoded_value (fde_encoding),
626 end, begin, "FDE address range");
627 }
628 else
629 {
630 dw2_asm_output_addr (DWARF2_ADDR_SIZE, begin, "FDE initial location");
631 dw2_asm_output_delta (DWARF2_ADDR_SIZE, end, begin, "FDE address range");
632 }
633
634 if (augmentation[0])
635 {
636 if (any_lsda_needed)
637 {
638 int size = size_of_encoded_value (lsda_encoding);
639
640 if (lsda_encoding == DW_EH_PE_aligned)
641 {
642 int offset = ( 4 /* Length */
643 + 4 /* CIE offset */
644 + 2 * size_of_encoded_value (fde_encoding)
645 + 1 /* Augmentation size */ );
646 int pad = -offset & (PTR_SIZE - 1);
647
648 size += pad;
649 gcc_assert (size_of_uleb128 (size) == 1);
650 }
651
652 dw2_asm_output_data_uleb128 (size, "Augmentation size");
653
654 if (fde->uses_eh_lsda)
655 {
656 ASM_GENERATE_INTERNAL_LABEL (l1, second ? "LLSDAC" : "LLSDA",
657 fde->funcdef_number);
658 dw2_asm_output_encoded_addr_rtx (lsda_encoding,
659 gen_rtx_SYMBOL_REF (Pmode, l1),
660 false,
661 "Language Specific Data Area");
662 }
663 else
664 {
665 if (lsda_encoding == DW_EH_PE_aligned)
666 ASM_OUTPUT_ALIGN (asm_out_file, floor_log2 (PTR_SIZE));
667 dw2_asm_output_data (size_of_encoded_value (lsda_encoding), 0,
668 "Language Specific Data Area (none)");
669 }
670 }
671 else
672 dw2_asm_output_data_uleb128 (0, "Augmentation size");
673 }
674
675 /* Loop through the Call Frame Instructions associated with this FDE. */
676 fde->dw_fde_current_label = begin;
677 {
678 size_t from, until, i;
679
680 from = 0;
681 until = vec_safe_length (fde->dw_fde_cfi);
682
683 if (fde->dw_fde_second_begin == NULL)
684 ;
685 else if (!second)
686 until = fde->dw_fde_switch_cfi_index;
687 else
688 from = fde->dw_fde_switch_cfi_index;
689
690 for (i = from; i < until; i++)
691 output_cfi ((*fde->dw_fde_cfi)[i], fde, for_eh);
692 }
693
694 /* If we are to emit a ref/link from function bodies to their frame tables,
695 do it now. This is typically performed to make sure that tables
696 associated with functions are dragged with them and not discarded in
697 garbage collecting links. We need to do this on a per function basis to
698 cope with -ffunction-sections. */
699
700 #ifdef ASM_OUTPUT_DWARF_TABLE_REF
701 /* Switch to the function section, emit the ref to the tables, and
702 switch *back* into the table section. */
703 switch_to_section (function_section (fde->decl));
704 ASM_OUTPUT_DWARF_TABLE_REF (section_start_label);
705 switch_to_frame_table_section (for_eh, true);
706 #endif
707
708 /* Pad the FDE out to an address sized boundary. */
709 ASM_OUTPUT_ALIGN (asm_out_file,
710 floor_log2 ((for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE)));
711 ASM_OUTPUT_LABEL (asm_out_file, l2);
712
713 j += 2;
714 }
715
716 /* Return true if frame description entry FDE is needed for EH. */
717
718 static bool
719 fde_needed_for_eh_p (dw_fde_ref fde)
720 {
721 if (flag_asynchronous_unwind_tables)
722 return true;
723
724 if (TARGET_USES_WEAK_UNWIND_INFO && DECL_WEAK (fde->decl))
725 return true;
726
727 if (fde->uses_eh_lsda)
728 return true;
729
730 /* If exceptions are enabled, we have collected nothrow info. */
731 if (flag_exceptions && (fde->all_throwers_are_sibcalls || fde->nothrow))
732 return false;
733
734 return true;
735 }
736
737 /* Output the call frame information used to record information
738 that relates to calculating the frame pointer, and records the
739 location of saved registers. */
740
741 static void
742 output_call_frame_info (int for_eh)
743 {
744 unsigned int i;
745 dw_fde_ref fde;
746 dw_cfi_ref cfi;
747 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
748 char section_start_label[MAX_ARTIFICIAL_LABEL_BYTES];
749 bool any_lsda_needed = false;
750 char augmentation[6];
751 int augmentation_size;
752 int fde_encoding = DW_EH_PE_absptr;
753 int per_encoding = DW_EH_PE_absptr;
754 int lsda_encoding = DW_EH_PE_absptr;
755 int return_reg;
756 rtx personality = NULL;
757 int dw_cie_version;
758
759 /* Don't emit a CIE if there won't be any FDEs. */
760 if (!fde_vec)
761 return;
762
763 /* Nothing to do if the assembler's doing it all. */
764 if (dwarf2out_do_cfi_asm ())
765 return;
766
767 /* If we don't have any functions we'll want to unwind out of, don't emit
768 any EH unwind information. If we make FDEs linkonce, we may have to
769 emit an empty label for an FDE that wouldn't otherwise be emitted. We
770 want to avoid having an FDE kept around when the function it refers to
771 is discarded. Example where this matters: a primary function template
772 in C++ requires EH information, an explicit specialization doesn't. */
773 if (for_eh)
774 {
775 bool any_eh_needed = false;
776
777 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
778 {
779 if (fde->uses_eh_lsda)
780 any_eh_needed = any_lsda_needed = true;
781 else if (fde_needed_for_eh_p (fde))
782 any_eh_needed = true;
783 else if (TARGET_USES_WEAK_UNWIND_INFO)
784 targetm.asm_out.emit_unwind_label (asm_out_file, fde->decl, 1, 1);
785 }
786
787 if (!any_eh_needed)
788 return;
789 }
790
791 /* We're going to be generating comments, so turn on app. */
792 if (flag_debug_asm)
793 app_enable ();
794
795 /* Switch to the proper frame section, first time. */
796 switch_to_frame_table_section (for_eh, false);
797
798 ASM_GENERATE_INTERNAL_LABEL (section_start_label, FRAME_BEGIN_LABEL, for_eh);
799 ASM_OUTPUT_LABEL (asm_out_file, section_start_label);
800
801 /* Output the CIE. */
802 ASM_GENERATE_INTERNAL_LABEL (l1, CIE_AFTER_SIZE_LABEL, for_eh);
803 ASM_GENERATE_INTERNAL_LABEL (l2, CIE_END_LABEL, for_eh);
804 if (!XCOFF_DEBUGGING_INFO || for_eh)
805 {
806 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4 && !for_eh)
807 dw2_asm_output_data (4, 0xffffffff,
808 "Initial length escape value indicating 64-bit DWARF extension");
809 dw2_asm_output_delta (for_eh ? 4 : DWARF_OFFSET_SIZE, l2, l1,
810 "Length of Common Information Entry");
811 }
812 ASM_OUTPUT_LABEL (asm_out_file, l1);
813
814 /* Now that the CIE pointer is PC-relative for EH,
815 use 0 to identify the CIE. */
816 dw2_asm_output_data ((for_eh ? 4 : DWARF_OFFSET_SIZE),
817 (for_eh ? 0 : DWARF_CIE_ID),
818 "CIE Identifier Tag");
819
820 /* Use the CIE version 3 for DWARF3; allow DWARF2 to continue to
821 use CIE version 1, unless that would produce incorrect results
822 due to overflowing the return register column. */
823 return_reg = DWARF2_FRAME_REG_OUT (DWARF_FRAME_RETURN_COLUMN, for_eh);
824 dw_cie_version = 1;
825 if (return_reg >= 256 || dwarf_version > 2)
826 dw_cie_version = 3;
827 dw2_asm_output_data (1, dw_cie_version, "CIE Version");
828
829 augmentation[0] = 0;
830 augmentation_size = 0;
831
832 personality = current_unit_personality;
833 if (for_eh)
834 {
835 char *p;
836
837 /* Augmentation:
838 z Indicates that a uleb128 is present to size the
839 augmentation section.
840 L Indicates the encoding (and thus presence) of
841 an LSDA pointer in the FDE augmentation.
842 R Indicates a non-default pointer encoding for
843 FDE code pointers.
844 P Indicates the presence of an encoding + language
845 personality routine in the CIE augmentation. */
846
847 fde_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/1, /*global=*/0);
848 per_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
849 lsda_encoding = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
850
851 p = augmentation + 1;
852 if (personality)
853 {
854 *p++ = 'P';
855 augmentation_size += 1 + size_of_encoded_value (per_encoding);
856 assemble_external_libcall (personality);
857 }
858 if (any_lsda_needed)
859 {
860 *p++ = 'L';
861 augmentation_size += 1;
862 }
863 if (fde_encoding != DW_EH_PE_absptr)
864 {
865 *p++ = 'R';
866 augmentation_size += 1;
867 }
868 if (p > augmentation + 1)
869 {
870 augmentation[0] = 'z';
871 *p = '\0';
872 }
873
874 /* Ug. Some platforms can't do unaligned dynamic relocations at all. */
875 if (personality && per_encoding == DW_EH_PE_aligned)
876 {
877 int offset = ( 4 /* Length */
878 + 4 /* CIE Id */
879 + 1 /* CIE version */
880 + strlen (augmentation) + 1 /* Augmentation */
881 + size_of_uleb128 (1) /* Code alignment */
882 + size_of_sleb128 (DWARF_CIE_DATA_ALIGNMENT)
883 + 1 /* RA column */
884 + 1 /* Augmentation size */
885 + 1 /* Personality encoding */ );
886 int pad = -offset & (PTR_SIZE - 1);
887
888 augmentation_size += pad;
889
890 /* Augmentations should be small, so there's scarce need to
891 iterate for a solution. Die if we exceed one uleb128 byte. */
892 gcc_assert (size_of_uleb128 (augmentation_size) == 1);
893 }
894 }
895
896 dw2_asm_output_nstring (augmentation, -1, "CIE Augmentation");
897 if (dw_cie_version >= 4)
898 {
899 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "CIE Address Size");
900 dw2_asm_output_data (1, 0, "CIE Segment Size");
901 }
902 dw2_asm_output_data_uleb128 (1, "CIE Code Alignment Factor");
903 dw2_asm_output_data_sleb128 (DWARF_CIE_DATA_ALIGNMENT,
904 "CIE Data Alignment Factor");
905
906 if (dw_cie_version == 1)
907 dw2_asm_output_data (1, return_reg, "CIE RA Column");
908 else
909 dw2_asm_output_data_uleb128 (return_reg, "CIE RA Column");
910
911 if (augmentation[0])
912 {
913 dw2_asm_output_data_uleb128 (augmentation_size, "Augmentation size");
914 if (personality)
915 {
916 dw2_asm_output_data (1, per_encoding, "Personality (%s)",
917 eh_data_format_name (per_encoding));
918 dw2_asm_output_encoded_addr_rtx (per_encoding,
919 personality,
920 true, NULL);
921 }
922
923 if (any_lsda_needed)
924 dw2_asm_output_data (1, lsda_encoding, "LSDA Encoding (%s)",
925 eh_data_format_name (lsda_encoding));
926
927 if (fde_encoding != DW_EH_PE_absptr)
928 dw2_asm_output_data (1, fde_encoding, "FDE Encoding (%s)",
929 eh_data_format_name (fde_encoding));
930 }
931
932 FOR_EACH_VEC_ELT (*cie_cfi_vec, i, cfi)
933 output_cfi (cfi, NULL, for_eh);
934
935 /* Pad the CIE out to an address sized boundary. */
936 ASM_OUTPUT_ALIGN (asm_out_file,
937 floor_log2 (for_eh ? PTR_SIZE : DWARF2_ADDR_SIZE));
938 ASM_OUTPUT_LABEL (asm_out_file, l2);
939
940 /* Loop through all of the FDE's. */
941 FOR_EACH_VEC_ELT (*fde_vec, i, fde)
942 {
943 unsigned int k;
944
945 /* Don't emit EH unwind info for leaf functions that don't need it. */
946 if (for_eh && !fde_needed_for_eh_p (fde))
947 continue;
948
949 for (k = 0; k < (fde->dw_fde_second_begin ? 2 : 1); k++)
950 output_fde (fde, for_eh, k, section_start_label, fde_encoding,
951 augmentation, any_lsda_needed, lsda_encoding);
952 }
953
954 if (for_eh && targetm.terminate_dw2_eh_frame_info)
955 dw2_asm_output_data (4, 0, "End of Table");
956
957 /* Turn off app to make assembly quicker. */
958 if (flag_debug_asm)
959 app_disable ();
960 }
961
962 /* Emit .cfi_startproc and .cfi_personality/.cfi_lsda if needed. */
963
964 static void
965 dwarf2out_do_cfi_startproc (bool second)
966 {
967 int enc;
968 rtx ref;
969
970 fprintf (asm_out_file, "\t.cfi_startproc\n");
971
972 targetm.asm_out.post_cfi_startproc (asm_out_file, current_function_decl);
973
974 /* .cfi_personality and .cfi_lsda are only relevant to DWARF2
975 eh unwinders. */
976 if (targetm_common.except_unwind_info (&global_options) != UI_DWARF2)
977 return;
978
979 rtx personality = get_personality_function (current_function_decl);
980
981 if (personality)
982 {
983 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/2, /*global=*/1);
984 ref = personality;
985
986 /* ??? The GAS support isn't entirely consistent. We have to
987 handle indirect support ourselves, but PC-relative is done
988 in the assembler. Further, the assembler can't handle any
989 of the weirder relocation types. */
990 if (enc & DW_EH_PE_indirect)
991 ref = dw2_force_const_mem (ref, true);
992
993 fprintf (asm_out_file, "\t.cfi_personality %#x,", enc);
994 output_addr_const (asm_out_file, ref);
995 fputc ('\n', asm_out_file);
996 }
997
998 if (crtl->uses_eh_lsda)
999 {
1000 char lab[MAX_ARTIFICIAL_LABEL_BYTES];
1001
1002 enc = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/0);
1003 ASM_GENERATE_INTERNAL_LABEL (lab, second ? "LLSDAC" : "LLSDA",
1004 current_function_funcdef_no);
1005 ref = gen_rtx_SYMBOL_REF (Pmode, lab);
1006 SYMBOL_REF_FLAGS (ref) = SYMBOL_FLAG_LOCAL;
1007
1008 if (enc & DW_EH_PE_indirect)
1009 ref = dw2_force_const_mem (ref, true);
1010
1011 fprintf (asm_out_file, "\t.cfi_lsda %#x,", enc);
1012 output_addr_const (asm_out_file, ref);
1013 fputc ('\n', asm_out_file);
1014 }
1015 }
1016
1017 /* Allocate CURRENT_FDE. Immediately initialize all we can, noting that
1018 this allocation may be done before pass_final. */
1019
1020 dw_fde_ref
1021 dwarf2out_alloc_current_fde (void)
1022 {
1023 dw_fde_ref fde;
1024
1025 fde = ggc_cleared_alloc<dw_fde_node> ();
1026 fde->decl = current_function_decl;
1027 fde->funcdef_number = current_function_funcdef_no;
1028 fde->fde_index = vec_safe_length (fde_vec);
1029 fde->all_throwers_are_sibcalls = crtl->all_throwers_are_sibcalls;
1030 fde->uses_eh_lsda = crtl->uses_eh_lsda;
1031 fde->nothrow = crtl->nothrow;
1032 fde->drap_reg = INVALID_REGNUM;
1033 fde->vdrap_reg = INVALID_REGNUM;
1034
1035 /* Record the FDE associated with this function. */
1036 cfun->fde = fde;
1037 vec_safe_push (fde_vec, fde);
1038
1039 return fde;
1040 }
1041
1042 /* Output a marker (i.e. a label) for the beginning of a function, before
1043 the prologue. */
1044
1045 void
1046 dwarf2out_begin_prologue (unsigned int line ATTRIBUTE_UNUSED,
1047 unsigned int column ATTRIBUTE_UNUSED,
1048 const char *file ATTRIBUTE_UNUSED)
1049 {
1050 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1051 char * dup_label;
1052 dw_fde_ref fde;
1053 section *fnsec;
1054 bool do_frame;
1055
1056 current_function_func_begin_label = NULL;
1057
1058 do_frame = dwarf2out_do_frame ();
1059
1060 /* ??? current_function_func_begin_label is also used by except.c for
1061 call-site information. We must emit this label if it might be used. */
1062 if (!do_frame
1063 && (!flag_exceptions
1064 || targetm_common.except_unwind_info (&global_options) == UI_SJLJ))
1065 return;
1066
1067 fnsec = function_section (current_function_decl);
1068 switch_to_section (fnsec);
1069 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_BEGIN_LABEL,
1070 current_function_funcdef_no);
1071 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, FUNC_BEGIN_LABEL,
1072 current_function_funcdef_no);
1073 dup_label = xstrdup (label);
1074 current_function_func_begin_label = dup_label;
1075
1076 /* We can elide FDE allocation if we're not emitting frame unwind info. */
1077 if (!do_frame)
1078 return;
1079
1080 /* Unlike the debug version, the EH version of frame unwind info is a per-
1081 function setting so we need to record whether we need it for the unit. */
1082 do_eh_frame |= dwarf2out_do_eh_frame ();
1083
1084 /* Cater to the various TARGET_ASM_OUTPUT_MI_THUNK implementations that
1085 emit insns as rtx but bypass the bulk of rest_of_compilation, which
1086 would include pass_dwarf2_frame. If we've not created the FDE yet,
1087 do so now. */
1088 fde = cfun->fde;
1089 if (fde == NULL)
1090 fde = dwarf2out_alloc_current_fde ();
1091
1092 /* Initialize the bits of CURRENT_FDE that were not available earlier. */
1093 fde->dw_fde_begin = dup_label;
1094 fde->dw_fde_current_label = dup_label;
1095 fde->in_std_section = (fnsec == text_section
1096 || (cold_text_section && fnsec == cold_text_section));
1097
1098 /* We only want to output line number information for the genuine dwarf2
1099 prologue case, not the eh frame case. */
1100 #ifdef DWARF2_DEBUGGING_INFO
1101 if (file)
1102 dwarf2out_source_line (line, column, file, 0, true);
1103 #endif
1104
1105 if (dwarf2out_do_cfi_asm ())
1106 dwarf2out_do_cfi_startproc (false);
1107 else
1108 {
1109 rtx personality = get_personality_function (current_function_decl);
1110 if (!current_unit_personality)
1111 current_unit_personality = personality;
1112
1113 /* We cannot keep a current personality per function as without CFI
1114 asm, at the point where we emit the CFI data, there is no current
1115 function anymore. */
1116 if (personality && current_unit_personality != personality)
1117 sorry ("multiple EH personalities are supported only with assemblers "
1118 "supporting %<.cfi_personality%> directive");
1119 }
1120 }
1121
1122 /* Output a marker (i.e. a label) for the end of the generated code
1123 for a function prologue. This gets called *after* the prologue code has
1124 been generated. */
1125
1126 void
1127 dwarf2out_vms_end_prologue (unsigned int line ATTRIBUTE_UNUSED,
1128 const char *file ATTRIBUTE_UNUSED)
1129 {
1130 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1131
1132 /* Output a label to mark the endpoint of the code generated for this
1133 function. */
1134 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
1135 current_function_funcdef_no);
1136 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, PROLOGUE_END_LABEL,
1137 current_function_funcdef_no);
1138 cfun->fde->dw_fde_vms_end_prologue = xstrdup (label);
1139 }
1140
1141 /* Output a marker (i.e. a label) for the beginning of the generated code
1142 for a function epilogue. This gets called *before* the prologue code has
1143 been generated. */
1144
1145 void
1146 dwarf2out_vms_begin_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1147 const char *file ATTRIBUTE_UNUSED)
1148 {
1149 dw_fde_ref fde = cfun->fde;
1150 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1151
1152 if (fde->dw_fde_vms_begin_epilogue)
1153 return;
1154
1155 /* Output a label to mark the endpoint of the code generated for this
1156 function. */
1157 ASM_GENERATE_INTERNAL_LABEL (label, EPILOGUE_BEGIN_LABEL,
1158 current_function_funcdef_no);
1159 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, EPILOGUE_BEGIN_LABEL,
1160 current_function_funcdef_no);
1161 fde->dw_fde_vms_begin_epilogue = xstrdup (label);
1162 }
1163
1164 /* Output a marker (i.e. a label) for the absolute end of the generated code
1165 for a function definition. This gets called *after* the epilogue code has
1166 been generated. */
1167
1168 void
1169 dwarf2out_end_epilogue (unsigned int line ATTRIBUTE_UNUSED,
1170 const char *file ATTRIBUTE_UNUSED)
1171 {
1172 dw_fde_ref fde;
1173 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1174
1175 last_var_location_insn = NULL;
1176 cached_next_real_insn = NULL;
1177
1178 if (dwarf2out_do_cfi_asm ())
1179 fprintf (asm_out_file, "\t.cfi_endproc\n");
1180
1181 /* Output a label to mark the endpoint of the code generated for this
1182 function. */
1183 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
1184 current_function_funcdef_no);
1185 ASM_OUTPUT_LABEL (asm_out_file, label);
1186 fde = cfun->fde;
1187 gcc_assert (fde != NULL);
1188 if (fde->dw_fde_second_begin == NULL)
1189 fde->dw_fde_end = xstrdup (label);
1190 }
1191
1192 void
1193 dwarf2out_frame_finish (void)
1194 {
1195 /* Output call frame information. */
1196 if (targetm.debug_unwind_info () == UI_DWARF2)
1197 output_call_frame_info (0);
1198
1199 /* Output another copy for the unwinder. */
1200 if (do_eh_frame)
1201 output_call_frame_info (1);
1202 }
1203
1204 /* Note that the current function section is being used for code. */
1205
1206 static void
1207 dwarf2out_note_section_used (void)
1208 {
1209 section *sec = current_function_section ();
1210 if (sec == text_section)
1211 text_section_used = true;
1212 else if (sec == cold_text_section)
1213 cold_text_section_used = true;
1214 }
1215
1216 static void var_location_switch_text_section (void);
1217 static void set_cur_line_info_table (section *);
1218
1219 void
1220 dwarf2out_switch_text_section (void)
1221 {
1222 char label[MAX_ARTIFICIAL_LABEL_BYTES];
1223 section *sect;
1224 dw_fde_ref fde = cfun->fde;
1225
1226 gcc_assert (cfun && fde && fde->dw_fde_second_begin == NULL);
1227
1228 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_SECOND_SECT_LABEL,
1229 current_function_funcdef_no);
1230
1231 fde->dw_fde_second_begin = ggc_strdup (label);
1232 if (!in_cold_section_p)
1233 {
1234 fde->dw_fde_end = crtl->subsections.cold_section_end_label;
1235 fde->dw_fde_second_end = crtl->subsections.hot_section_end_label;
1236 }
1237 else
1238 {
1239 fde->dw_fde_end = crtl->subsections.hot_section_end_label;
1240 fde->dw_fde_second_end = crtl->subsections.cold_section_end_label;
1241 }
1242 have_multiple_function_sections = true;
1243
1244 /* There is no need to mark used sections when not debugging. */
1245 if (cold_text_section != NULL)
1246 dwarf2out_note_section_used ();
1247
1248 if (dwarf2out_do_cfi_asm ())
1249 fprintf (asm_out_file, "\t.cfi_endproc\n");
1250
1251 /* Now do the real section switch. */
1252 sect = current_function_section ();
1253 switch_to_section (sect);
1254
1255 fde->second_in_std_section
1256 = (sect == text_section
1257 || (cold_text_section && sect == cold_text_section));
1258
1259 if (dwarf2out_do_cfi_asm ())
1260 dwarf2out_do_cfi_startproc (true);
1261
1262 var_location_switch_text_section ();
1263
1264 if (cold_text_section != NULL)
1265 set_cur_line_info_table (sect);
1266 }
1267 \f
1268 /* And now, the subset of the debugging information support code necessary
1269 for emitting location expressions. */
1270
1271 /* Data about a single source file. */
1272 struct GTY((for_user)) dwarf_file_data {
1273 const char * filename;
1274 int emitted_number;
1275 };
1276
1277 /* Describe an entry into the .debug_addr section. */
1278
1279 enum ate_kind {
1280 ate_kind_rtx,
1281 ate_kind_rtx_dtprel,
1282 ate_kind_label
1283 };
1284
1285 struct GTY((for_user)) addr_table_entry {
1286 enum ate_kind kind;
1287 unsigned int refcount;
1288 unsigned int index;
1289 union addr_table_entry_struct_union
1290 {
1291 rtx GTY ((tag ("0"))) rtl;
1292 char * GTY ((tag ("1"))) label;
1293 }
1294 GTY ((desc ("%1.kind"))) addr;
1295 };
1296
1297 typedef unsigned int var_loc_view;
1298
1299 /* Location lists are ranges + location descriptions for that range,
1300 so you can track variables that are in different places over
1301 their entire life. */
1302 typedef struct GTY(()) dw_loc_list_struct {
1303 dw_loc_list_ref dw_loc_next;
1304 const char *begin; /* Label and addr_entry for start of range */
1305 addr_table_entry *begin_entry;
1306 const char *end; /* Label for end of range */
1307 char *ll_symbol; /* Label for beginning of location list.
1308 Only on head of list. */
1309 char *vl_symbol; /* Label for beginning of view list. Ditto. */
1310 const char *section; /* Section this loclist is relative to */
1311 dw_loc_descr_ref expr;
1312 var_loc_view vbegin, vend;
1313 hashval_t hash;
1314 /* True if all addresses in this and subsequent lists are known to be
1315 resolved. */
1316 bool resolved_addr;
1317 /* True if this list has been replaced by dw_loc_next. */
1318 bool replaced;
1319 /* True if it has been emitted into .debug_loc* / .debug_loclists*
1320 section. */
1321 unsigned char emitted : 1;
1322 /* True if hash field is index rather than hash value. */
1323 unsigned char num_assigned : 1;
1324 /* True if .debug_loclists.dwo offset has been emitted for it already. */
1325 unsigned char offset_emitted : 1;
1326 /* True if note_variable_value_in_expr has been called on it. */
1327 unsigned char noted_variable_value : 1;
1328 /* True if the range should be emitted even if begin and end
1329 are the same. */
1330 bool force;
1331 } dw_loc_list_node;
1332
1333 static dw_loc_descr_ref int_loc_descriptor (poly_int64);
1334 static dw_loc_descr_ref uint_loc_descriptor (unsigned HOST_WIDE_INT);
1335
1336 /* Convert a DWARF stack opcode into its string name. */
1337
1338 static const char *
1339 dwarf_stack_op_name (unsigned int op)
1340 {
1341 const char *name = get_DW_OP_name (op);
1342
1343 if (name != NULL)
1344 return name;
1345
1346 return "OP_<unknown>";
1347 }
1348
1349 /* Return TRUE iff we're to output location view lists as a separate
1350 attribute next to the location lists, as an extension compatible
1351 with DWARF 2 and above. */
1352
1353 static inline bool
1354 dwarf2out_locviews_in_attribute ()
1355 {
1356 return debug_variable_location_views == 1;
1357 }
1358
1359 /* Return TRUE iff we're to output location view lists as part of the
1360 location lists, as proposed for standardization after DWARF 5. */
1361
1362 static inline bool
1363 dwarf2out_locviews_in_loclist ()
1364 {
1365 #ifndef DW_LLE_view_pair
1366 return false;
1367 #else
1368 return debug_variable_location_views == -1;
1369 #endif
1370 }
1371
1372 /* Return a pointer to a newly allocated location description. Location
1373 descriptions are simple expression terms that can be strung
1374 together to form more complicated location (address) descriptions. */
1375
1376 static inline dw_loc_descr_ref
1377 new_loc_descr (enum dwarf_location_atom op, unsigned HOST_WIDE_INT oprnd1,
1378 unsigned HOST_WIDE_INT oprnd2)
1379 {
1380 dw_loc_descr_ref descr = ggc_cleared_alloc<dw_loc_descr_node> ();
1381
1382 descr->dw_loc_opc = op;
1383 descr->dw_loc_oprnd1.val_class = dw_val_class_unsigned_const;
1384 descr->dw_loc_oprnd1.val_entry = NULL;
1385 descr->dw_loc_oprnd1.v.val_unsigned = oprnd1;
1386 descr->dw_loc_oprnd2.val_class = dw_val_class_unsigned_const;
1387 descr->dw_loc_oprnd2.val_entry = NULL;
1388 descr->dw_loc_oprnd2.v.val_unsigned = oprnd2;
1389
1390 return descr;
1391 }
1392
1393 /* Add a location description term to a location description expression. */
1394
1395 static inline void
1396 add_loc_descr (dw_loc_descr_ref *list_head, dw_loc_descr_ref descr)
1397 {
1398 dw_loc_descr_ref *d;
1399
1400 /* Find the end of the chain. */
1401 for (d = list_head; (*d) != NULL; d = &(*d)->dw_loc_next)
1402 ;
1403
1404 *d = descr;
1405 }
1406
1407 /* Compare two location operands for exact equality. */
1408
1409 static bool
1410 dw_val_equal_p (dw_val_node *a, dw_val_node *b)
1411 {
1412 if (a->val_class != b->val_class)
1413 return false;
1414 switch (a->val_class)
1415 {
1416 case dw_val_class_none:
1417 return true;
1418 case dw_val_class_addr:
1419 return rtx_equal_p (a->v.val_addr, b->v.val_addr);
1420
1421 case dw_val_class_offset:
1422 case dw_val_class_unsigned_const:
1423 case dw_val_class_const:
1424 case dw_val_class_unsigned_const_implicit:
1425 case dw_val_class_const_implicit:
1426 case dw_val_class_range_list:
1427 /* These are all HOST_WIDE_INT, signed or unsigned. */
1428 return a->v.val_unsigned == b->v.val_unsigned;
1429
1430 case dw_val_class_loc:
1431 return a->v.val_loc == b->v.val_loc;
1432 case dw_val_class_loc_list:
1433 return a->v.val_loc_list == b->v.val_loc_list;
1434 case dw_val_class_view_list:
1435 return a->v.val_view_list == b->v.val_view_list;
1436 case dw_val_class_die_ref:
1437 return a->v.val_die_ref.die == b->v.val_die_ref.die;
1438 case dw_val_class_fde_ref:
1439 return a->v.val_fde_index == b->v.val_fde_index;
1440 case dw_val_class_symview:
1441 return strcmp (a->v.val_symbolic_view, b->v.val_symbolic_view) == 0;
1442 case dw_val_class_lbl_id:
1443 case dw_val_class_lineptr:
1444 case dw_val_class_macptr:
1445 case dw_val_class_loclistsptr:
1446 case dw_val_class_high_pc:
1447 return strcmp (a->v.val_lbl_id, b->v.val_lbl_id) == 0;
1448 case dw_val_class_str:
1449 return a->v.val_str == b->v.val_str;
1450 case dw_val_class_flag:
1451 return a->v.val_flag == b->v.val_flag;
1452 case dw_val_class_file:
1453 case dw_val_class_file_implicit:
1454 return a->v.val_file == b->v.val_file;
1455 case dw_val_class_decl_ref:
1456 return a->v.val_decl_ref == b->v.val_decl_ref;
1457
1458 case dw_val_class_const_double:
1459 return (a->v.val_double.high == b->v.val_double.high
1460 && a->v.val_double.low == b->v.val_double.low);
1461
1462 case dw_val_class_wide_int:
1463 return *a->v.val_wide == *b->v.val_wide;
1464
1465 case dw_val_class_vec:
1466 {
1467 size_t a_len = a->v.val_vec.elt_size * a->v.val_vec.length;
1468 size_t b_len = b->v.val_vec.elt_size * b->v.val_vec.length;
1469
1470 return (a_len == b_len
1471 && !memcmp (a->v.val_vec.array, b->v.val_vec.array, a_len));
1472 }
1473
1474 case dw_val_class_data8:
1475 return memcmp (a->v.val_data8, b->v.val_data8, 8) == 0;
1476
1477 case dw_val_class_vms_delta:
1478 return (!strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1)
1479 && !strcmp (a->v.val_vms_delta.lbl1, b->v.val_vms_delta.lbl1));
1480
1481 case dw_val_class_discr_value:
1482 return (a->v.val_discr_value.pos == b->v.val_discr_value.pos
1483 && a->v.val_discr_value.v.uval == b->v.val_discr_value.v.uval);
1484 case dw_val_class_discr_list:
1485 /* It makes no sense comparing two discriminant value lists. */
1486 return false;
1487 }
1488 gcc_unreachable ();
1489 }
1490
1491 /* Compare two location atoms for exact equality. */
1492
1493 static bool
1494 loc_descr_equal_p_1 (dw_loc_descr_ref a, dw_loc_descr_ref b)
1495 {
1496 if (a->dw_loc_opc != b->dw_loc_opc)
1497 return false;
1498
1499 /* ??? This is only ever set for DW_OP_constNu, for N equal to the
1500 address size, but since we always allocate cleared storage it
1501 should be zero for other types of locations. */
1502 if (a->dtprel != b->dtprel)
1503 return false;
1504
1505 return (dw_val_equal_p (&a->dw_loc_oprnd1, &b->dw_loc_oprnd1)
1506 && dw_val_equal_p (&a->dw_loc_oprnd2, &b->dw_loc_oprnd2));
1507 }
1508
1509 /* Compare two complete location expressions for exact equality. */
1510
1511 bool
1512 loc_descr_equal_p (dw_loc_descr_ref a, dw_loc_descr_ref b)
1513 {
1514 while (1)
1515 {
1516 if (a == b)
1517 return true;
1518 if (a == NULL || b == NULL)
1519 return false;
1520 if (!loc_descr_equal_p_1 (a, b))
1521 return false;
1522
1523 a = a->dw_loc_next;
1524 b = b->dw_loc_next;
1525 }
1526 }
1527
1528
1529 /* Add a constant POLY_OFFSET to a location expression. */
1530
1531 static void
1532 loc_descr_plus_const (dw_loc_descr_ref *list_head, poly_int64 poly_offset)
1533 {
1534 dw_loc_descr_ref loc;
1535 HOST_WIDE_INT *p;
1536
1537 gcc_assert (*list_head != NULL);
1538
1539 if (known_eq (poly_offset, 0))
1540 return;
1541
1542 /* Find the end of the chain. */
1543 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
1544 ;
1545
1546 HOST_WIDE_INT offset;
1547 if (!poly_offset.is_constant (&offset))
1548 {
1549 loc->dw_loc_next = int_loc_descriptor (poly_offset);
1550 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_plus, 0, 0));
1551 return;
1552 }
1553
1554 p = NULL;
1555 if (loc->dw_loc_opc == DW_OP_fbreg
1556 || (loc->dw_loc_opc >= DW_OP_breg0 && loc->dw_loc_opc <= DW_OP_breg31))
1557 p = &loc->dw_loc_oprnd1.v.val_int;
1558 else if (loc->dw_loc_opc == DW_OP_bregx)
1559 p = &loc->dw_loc_oprnd2.v.val_int;
1560
1561 /* If the last operation is fbreg, breg{0..31,x}, optimize by adjusting its
1562 offset. Don't optimize if an signed integer overflow would happen. */
1563 if (p != NULL
1564 && ((offset > 0 && *p <= INTTYPE_MAXIMUM (HOST_WIDE_INT) - offset)
1565 || (offset < 0 && *p >= INTTYPE_MINIMUM (HOST_WIDE_INT) - offset)))
1566 *p += offset;
1567
1568 else if (offset > 0)
1569 loc->dw_loc_next = new_loc_descr (DW_OP_plus_uconst, offset, 0);
1570
1571 else
1572 {
1573 loc->dw_loc_next
1574 = uint_loc_descriptor (-(unsigned HOST_WIDE_INT) offset);
1575 add_loc_descr (&loc->dw_loc_next, new_loc_descr (DW_OP_minus, 0, 0));
1576 }
1577 }
1578
1579 /* Return a pointer to a newly allocated location description for
1580 REG and OFFSET. */
1581
1582 static inline dw_loc_descr_ref
1583 new_reg_loc_descr (unsigned int reg, poly_int64 offset)
1584 {
1585 HOST_WIDE_INT const_offset;
1586 if (offset.is_constant (&const_offset))
1587 {
1588 if (reg <= 31)
1589 return new_loc_descr ((enum dwarf_location_atom) (DW_OP_breg0 + reg),
1590 const_offset, 0);
1591 else
1592 return new_loc_descr (DW_OP_bregx, reg, const_offset);
1593 }
1594 else
1595 {
1596 dw_loc_descr_ref ret = new_reg_loc_descr (reg, 0);
1597 loc_descr_plus_const (&ret, offset);
1598 return ret;
1599 }
1600 }
1601
1602 /* Add a constant OFFSET to a location list. */
1603
1604 static void
1605 loc_list_plus_const (dw_loc_list_ref list_head, poly_int64 offset)
1606 {
1607 dw_loc_list_ref d;
1608 for (d = list_head; d != NULL; d = d->dw_loc_next)
1609 loc_descr_plus_const (&d->expr, offset);
1610 }
1611
1612 #define DWARF_REF_SIZE \
1613 (dwarf_version == 2 ? DWARF2_ADDR_SIZE : DWARF_OFFSET_SIZE)
1614
1615 /* The number of bits that can be encoded by largest DW_FORM_dataN.
1616 In DWARF4 and earlier it is DW_FORM_data8 with 64 bits, in DWARF5
1617 DW_FORM_data16 with 128 bits. */
1618 #define DWARF_LARGEST_DATA_FORM_BITS \
1619 (dwarf_version >= 5 ? 128 : 64)
1620
1621 /* Utility inline function for construction of ops that were GNU extension
1622 before DWARF 5. */
1623 static inline enum dwarf_location_atom
1624 dwarf_OP (enum dwarf_location_atom op)
1625 {
1626 switch (op)
1627 {
1628 case DW_OP_implicit_pointer:
1629 if (dwarf_version < 5)
1630 return DW_OP_GNU_implicit_pointer;
1631 break;
1632
1633 case DW_OP_entry_value:
1634 if (dwarf_version < 5)
1635 return DW_OP_GNU_entry_value;
1636 break;
1637
1638 case DW_OP_const_type:
1639 if (dwarf_version < 5)
1640 return DW_OP_GNU_const_type;
1641 break;
1642
1643 case DW_OP_regval_type:
1644 if (dwarf_version < 5)
1645 return DW_OP_GNU_regval_type;
1646 break;
1647
1648 case DW_OP_deref_type:
1649 if (dwarf_version < 5)
1650 return DW_OP_GNU_deref_type;
1651 break;
1652
1653 case DW_OP_convert:
1654 if (dwarf_version < 5)
1655 return DW_OP_GNU_convert;
1656 break;
1657
1658 case DW_OP_reinterpret:
1659 if (dwarf_version < 5)
1660 return DW_OP_GNU_reinterpret;
1661 break;
1662
1663 case DW_OP_addrx:
1664 if (dwarf_version < 5)
1665 return DW_OP_GNU_addr_index;
1666 break;
1667
1668 case DW_OP_constx:
1669 if (dwarf_version < 5)
1670 return DW_OP_GNU_const_index;
1671 break;
1672
1673 default:
1674 break;
1675 }
1676 return op;
1677 }
1678
1679 /* Similarly for attributes. */
1680 static inline enum dwarf_attribute
1681 dwarf_AT (enum dwarf_attribute at)
1682 {
1683 switch (at)
1684 {
1685 case DW_AT_call_return_pc:
1686 if (dwarf_version < 5)
1687 return DW_AT_low_pc;
1688 break;
1689
1690 case DW_AT_call_tail_call:
1691 if (dwarf_version < 5)
1692 return DW_AT_GNU_tail_call;
1693 break;
1694
1695 case DW_AT_call_origin:
1696 if (dwarf_version < 5)
1697 return DW_AT_abstract_origin;
1698 break;
1699
1700 case DW_AT_call_target:
1701 if (dwarf_version < 5)
1702 return DW_AT_GNU_call_site_target;
1703 break;
1704
1705 case DW_AT_call_target_clobbered:
1706 if (dwarf_version < 5)
1707 return DW_AT_GNU_call_site_target_clobbered;
1708 break;
1709
1710 case DW_AT_call_parameter:
1711 if (dwarf_version < 5)
1712 return DW_AT_abstract_origin;
1713 break;
1714
1715 case DW_AT_call_value:
1716 if (dwarf_version < 5)
1717 return DW_AT_GNU_call_site_value;
1718 break;
1719
1720 case DW_AT_call_data_value:
1721 if (dwarf_version < 5)
1722 return DW_AT_GNU_call_site_data_value;
1723 break;
1724
1725 case DW_AT_call_all_calls:
1726 if (dwarf_version < 5)
1727 return DW_AT_GNU_all_call_sites;
1728 break;
1729
1730 case DW_AT_call_all_tail_calls:
1731 if (dwarf_version < 5)
1732 return DW_AT_GNU_all_tail_call_sites;
1733 break;
1734
1735 case DW_AT_dwo_name:
1736 if (dwarf_version < 5)
1737 return DW_AT_GNU_dwo_name;
1738 break;
1739
1740 case DW_AT_addr_base:
1741 if (dwarf_version < 5)
1742 return DW_AT_GNU_addr_base;
1743 break;
1744
1745 default:
1746 break;
1747 }
1748 return at;
1749 }
1750
1751 /* And similarly for tags. */
1752 static inline enum dwarf_tag
1753 dwarf_TAG (enum dwarf_tag tag)
1754 {
1755 switch (tag)
1756 {
1757 case DW_TAG_call_site:
1758 if (dwarf_version < 5)
1759 return DW_TAG_GNU_call_site;
1760 break;
1761
1762 case DW_TAG_call_site_parameter:
1763 if (dwarf_version < 5)
1764 return DW_TAG_GNU_call_site_parameter;
1765 break;
1766
1767 default:
1768 break;
1769 }
1770 return tag;
1771 }
1772
1773 /* And similarly for forms. */
1774 static inline enum dwarf_form
1775 dwarf_FORM (enum dwarf_form form)
1776 {
1777 switch (form)
1778 {
1779 case DW_FORM_addrx:
1780 if (dwarf_version < 5)
1781 return DW_FORM_GNU_addr_index;
1782 break;
1783
1784 case DW_FORM_strx:
1785 if (dwarf_version < 5)
1786 return DW_FORM_GNU_str_index;
1787 break;
1788
1789 default:
1790 break;
1791 }
1792 return form;
1793 }
1794
1795 static unsigned long int get_base_type_offset (dw_die_ref);
1796
1797 /* Return the size of a location descriptor. */
1798
1799 static unsigned long
1800 size_of_loc_descr (dw_loc_descr_ref loc)
1801 {
1802 unsigned long size = 1;
1803
1804 switch (loc->dw_loc_opc)
1805 {
1806 case DW_OP_addr:
1807 size += DWARF2_ADDR_SIZE;
1808 break;
1809 case DW_OP_GNU_addr_index:
1810 case DW_OP_addrx:
1811 case DW_OP_GNU_const_index:
1812 case DW_OP_constx:
1813 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
1814 size += size_of_uleb128 (loc->dw_loc_oprnd1.val_entry->index);
1815 break;
1816 case DW_OP_const1u:
1817 case DW_OP_const1s:
1818 size += 1;
1819 break;
1820 case DW_OP_const2u:
1821 case DW_OP_const2s:
1822 size += 2;
1823 break;
1824 case DW_OP_const4u:
1825 case DW_OP_const4s:
1826 size += 4;
1827 break;
1828 case DW_OP_const8u:
1829 case DW_OP_const8s:
1830 size += 8;
1831 break;
1832 case DW_OP_constu:
1833 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1834 break;
1835 case DW_OP_consts:
1836 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1837 break;
1838 case DW_OP_pick:
1839 size += 1;
1840 break;
1841 case DW_OP_plus_uconst:
1842 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1843 break;
1844 case DW_OP_skip:
1845 case DW_OP_bra:
1846 size += 2;
1847 break;
1848 case DW_OP_breg0:
1849 case DW_OP_breg1:
1850 case DW_OP_breg2:
1851 case DW_OP_breg3:
1852 case DW_OP_breg4:
1853 case DW_OP_breg5:
1854 case DW_OP_breg6:
1855 case DW_OP_breg7:
1856 case DW_OP_breg8:
1857 case DW_OP_breg9:
1858 case DW_OP_breg10:
1859 case DW_OP_breg11:
1860 case DW_OP_breg12:
1861 case DW_OP_breg13:
1862 case DW_OP_breg14:
1863 case DW_OP_breg15:
1864 case DW_OP_breg16:
1865 case DW_OP_breg17:
1866 case DW_OP_breg18:
1867 case DW_OP_breg19:
1868 case DW_OP_breg20:
1869 case DW_OP_breg21:
1870 case DW_OP_breg22:
1871 case DW_OP_breg23:
1872 case DW_OP_breg24:
1873 case DW_OP_breg25:
1874 case DW_OP_breg26:
1875 case DW_OP_breg27:
1876 case DW_OP_breg28:
1877 case DW_OP_breg29:
1878 case DW_OP_breg30:
1879 case DW_OP_breg31:
1880 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1881 break;
1882 case DW_OP_regx:
1883 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1884 break;
1885 case DW_OP_fbreg:
1886 size += size_of_sleb128 (loc->dw_loc_oprnd1.v.val_int);
1887 break;
1888 case DW_OP_bregx:
1889 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1890 size += size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1891 break;
1892 case DW_OP_piece:
1893 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1894 break;
1895 case DW_OP_bit_piece:
1896 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1897 size += size_of_uleb128 (loc->dw_loc_oprnd2.v.val_unsigned);
1898 break;
1899 case DW_OP_deref_size:
1900 case DW_OP_xderef_size:
1901 size += 1;
1902 break;
1903 case DW_OP_call2:
1904 size += 2;
1905 break;
1906 case DW_OP_call4:
1907 size += 4;
1908 break;
1909 case DW_OP_call_ref:
1910 case DW_OP_GNU_variable_value:
1911 size += DWARF_REF_SIZE;
1912 break;
1913 case DW_OP_implicit_value:
1914 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1915 + loc->dw_loc_oprnd1.v.val_unsigned;
1916 break;
1917 case DW_OP_implicit_pointer:
1918 case DW_OP_GNU_implicit_pointer:
1919 size += DWARF_REF_SIZE + size_of_sleb128 (loc->dw_loc_oprnd2.v.val_int);
1920 break;
1921 case DW_OP_entry_value:
1922 case DW_OP_GNU_entry_value:
1923 {
1924 unsigned long op_size = size_of_locs (loc->dw_loc_oprnd1.v.val_loc);
1925 size += size_of_uleb128 (op_size) + op_size;
1926 break;
1927 }
1928 case DW_OP_const_type:
1929 case DW_OP_GNU_const_type:
1930 {
1931 unsigned long o
1932 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1933 size += size_of_uleb128 (o) + 1;
1934 switch (loc->dw_loc_oprnd2.val_class)
1935 {
1936 case dw_val_class_vec:
1937 size += loc->dw_loc_oprnd2.v.val_vec.length
1938 * loc->dw_loc_oprnd2.v.val_vec.elt_size;
1939 break;
1940 case dw_val_class_const:
1941 size += HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT;
1942 break;
1943 case dw_val_class_const_double:
1944 size += HOST_BITS_PER_DOUBLE_INT / BITS_PER_UNIT;
1945 break;
1946 case dw_val_class_wide_int:
1947 size += (get_full_len (*loc->dw_loc_oprnd2.v.val_wide)
1948 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
1949 break;
1950 default:
1951 gcc_unreachable ();
1952 }
1953 break;
1954 }
1955 case DW_OP_regval_type:
1956 case DW_OP_GNU_regval_type:
1957 {
1958 unsigned long o
1959 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1960 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned)
1961 + size_of_uleb128 (o);
1962 }
1963 break;
1964 case DW_OP_deref_type:
1965 case DW_OP_GNU_deref_type:
1966 {
1967 unsigned long o
1968 = get_base_type_offset (loc->dw_loc_oprnd2.v.val_die_ref.die);
1969 size += 1 + size_of_uleb128 (o);
1970 }
1971 break;
1972 case DW_OP_convert:
1973 case DW_OP_reinterpret:
1974 case DW_OP_GNU_convert:
1975 case DW_OP_GNU_reinterpret:
1976 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
1977 size += size_of_uleb128 (loc->dw_loc_oprnd1.v.val_unsigned);
1978 else
1979 {
1980 unsigned long o
1981 = get_base_type_offset (loc->dw_loc_oprnd1.v.val_die_ref.die);
1982 size += size_of_uleb128 (o);
1983 }
1984 break;
1985 case DW_OP_GNU_parameter_ref:
1986 size += 4;
1987 break;
1988 default:
1989 break;
1990 }
1991
1992 return size;
1993 }
1994
1995 /* Return the size of a series of location descriptors. */
1996
1997 unsigned long
1998 size_of_locs (dw_loc_descr_ref loc)
1999 {
2000 dw_loc_descr_ref l;
2001 unsigned long size;
2002
2003 /* If there are no skip or bra opcodes, don't fill in the dw_loc_addr
2004 field, to avoid writing to a PCH file. */
2005 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
2006 {
2007 if (l->dw_loc_opc == DW_OP_skip || l->dw_loc_opc == DW_OP_bra)
2008 break;
2009 size += size_of_loc_descr (l);
2010 }
2011 if (! l)
2012 return size;
2013
2014 for (size = 0, l = loc; l != NULL; l = l->dw_loc_next)
2015 {
2016 l->dw_loc_addr = size;
2017 size += size_of_loc_descr (l);
2018 }
2019
2020 return size;
2021 }
2022
2023 /* Return the size of the value in a DW_AT_discr_value attribute. */
2024
2025 static int
2026 size_of_discr_value (dw_discr_value *discr_value)
2027 {
2028 if (discr_value->pos)
2029 return size_of_uleb128 (discr_value->v.uval);
2030 else
2031 return size_of_sleb128 (discr_value->v.sval);
2032 }
2033
2034 /* Return the size of the value in a DW_AT_discr_list attribute. */
2035
2036 static int
2037 size_of_discr_list (dw_discr_list_ref discr_list)
2038 {
2039 int size = 0;
2040
2041 for (dw_discr_list_ref list = discr_list;
2042 list != NULL;
2043 list = list->dw_discr_next)
2044 {
2045 /* One byte for the discriminant value descriptor, and then one or two
2046 LEB128 numbers, depending on whether it's a single case label or a
2047 range label. */
2048 size += 1;
2049 size += size_of_discr_value (&list->dw_discr_lower_bound);
2050 if (list->dw_discr_range != 0)
2051 size += size_of_discr_value (&list->dw_discr_upper_bound);
2052 }
2053 return size;
2054 }
2055
2056 static HOST_WIDE_INT extract_int (const unsigned char *, unsigned);
2057 static void get_ref_die_offset_label (char *, dw_die_ref);
2058 static unsigned long int get_ref_die_offset (dw_die_ref);
2059
2060 /* Output location description stack opcode's operands (if any).
2061 The for_eh_or_skip parameter controls whether register numbers are
2062 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2063 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2064 info). This should be suppressed for the cases that have not been converted
2065 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2066
2067 static void
2068 output_loc_operands (dw_loc_descr_ref loc, int for_eh_or_skip)
2069 {
2070 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2071 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2072
2073 switch (loc->dw_loc_opc)
2074 {
2075 #ifdef DWARF2_DEBUGGING_INFO
2076 case DW_OP_const2u:
2077 case DW_OP_const2s:
2078 dw2_asm_output_data (2, val1->v.val_int, NULL);
2079 break;
2080 case DW_OP_const4u:
2081 if (loc->dtprel)
2082 {
2083 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2084 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 4,
2085 val1->v.val_addr);
2086 fputc ('\n', asm_out_file);
2087 break;
2088 }
2089 /* FALLTHRU */
2090 case DW_OP_const4s:
2091 dw2_asm_output_data (4, val1->v.val_int, NULL);
2092 break;
2093 case DW_OP_const8u:
2094 if (loc->dtprel)
2095 {
2096 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
2097 targetm.asm_out.output_dwarf_dtprel (asm_out_file, 8,
2098 val1->v.val_addr);
2099 fputc ('\n', asm_out_file);
2100 break;
2101 }
2102 /* FALLTHRU */
2103 case DW_OP_const8s:
2104 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2105 dw2_asm_output_data (8, val1->v.val_int, NULL);
2106 break;
2107 case DW_OP_skip:
2108 case DW_OP_bra:
2109 {
2110 int offset;
2111
2112 gcc_assert (val1->val_class == dw_val_class_loc);
2113 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2114
2115 dw2_asm_output_data (2, offset, NULL);
2116 }
2117 break;
2118 case DW_OP_implicit_value:
2119 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2120 switch (val2->val_class)
2121 {
2122 case dw_val_class_const:
2123 dw2_asm_output_data (val1->v.val_unsigned, val2->v.val_int, NULL);
2124 break;
2125 case dw_val_class_vec:
2126 {
2127 unsigned int elt_size = val2->v.val_vec.elt_size;
2128 unsigned int len = val2->v.val_vec.length;
2129 unsigned int i;
2130 unsigned char *p;
2131
2132 if (elt_size > sizeof (HOST_WIDE_INT))
2133 {
2134 elt_size /= 2;
2135 len *= 2;
2136 }
2137 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2138 i < len;
2139 i++, p += elt_size)
2140 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2141 "fp or vector constant word %u", i);
2142 }
2143 break;
2144 case dw_val_class_const_double:
2145 {
2146 unsigned HOST_WIDE_INT first, second;
2147
2148 if (WORDS_BIG_ENDIAN)
2149 {
2150 first = val2->v.val_double.high;
2151 second = val2->v.val_double.low;
2152 }
2153 else
2154 {
2155 first = val2->v.val_double.low;
2156 second = val2->v.val_double.high;
2157 }
2158 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2159 first, NULL);
2160 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2161 second, NULL);
2162 }
2163 break;
2164 case dw_val_class_wide_int:
2165 {
2166 int i;
2167 int len = get_full_len (*val2->v.val_wide);
2168 if (WORDS_BIG_ENDIAN)
2169 for (i = len - 1; i >= 0; --i)
2170 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2171 val2->v.val_wide->elt (i), NULL);
2172 else
2173 for (i = 0; i < len; ++i)
2174 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
2175 val2->v.val_wide->elt (i), NULL);
2176 }
2177 break;
2178 case dw_val_class_addr:
2179 gcc_assert (val1->v.val_unsigned == DWARF2_ADDR_SIZE);
2180 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val2->v.val_addr, NULL);
2181 break;
2182 default:
2183 gcc_unreachable ();
2184 }
2185 break;
2186 #else
2187 case DW_OP_const2u:
2188 case DW_OP_const2s:
2189 case DW_OP_const4u:
2190 case DW_OP_const4s:
2191 case DW_OP_const8u:
2192 case DW_OP_const8s:
2193 case DW_OP_skip:
2194 case DW_OP_bra:
2195 case DW_OP_implicit_value:
2196 /* We currently don't make any attempt to make sure these are
2197 aligned properly like we do for the main unwind info, so
2198 don't support emitting things larger than a byte if we're
2199 only doing unwinding. */
2200 gcc_unreachable ();
2201 #endif
2202 case DW_OP_const1u:
2203 case DW_OP_const1s:
2204 dw2_asm_output_data (1, val1->v.val_int, NULL);
2205 break;
2206 case DW_OP_constu:
2207 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2208 break;
2209 case DW_OP_consts:
2210 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2211 break;
2212 case DW_OP_pick:
2213 dw2_asm_output_data (1, val1->v.val_int, NULL);
2214 break;
2215 case DW_OP_plus_uconst:
2216 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2217 break;
2218 case DW_OP_breg0:
2219 case DW_OP_breg1:
2220 case DW_OP_breg2:
2221 case DW_OP_breg3:
2222 case DW_OP_breg4:
2223 case DW_OP_breg5:
2224 case DW_OP_breg6:
2225 case DW_OP_breg7:
2226 case DW_OP_breg8:
2227 case DW_OP_breg9:
2228 case DW_OP_breg10:
2229 case DW_OP_breg11:
2230 case DW_OP_breg12:
2231 case DW_OP_breg13:
2232 case DW_OP_breg14:
2233 case DW_OP_breg15:
2234 case DW_OP_breg16:
2235 case DW_OP_breg17:
2236 case DW_OP_breg18:
2237 case DW_OP_breg19:
2238 case DW_OP_breg20:
2239 case DW_OP_breg21:
2240 case DW_OP_breg22:
2241 case DW_OP_breg23:
2242 case DW_OP_breg24:
2243 case DW_OP_breg25:
2244 case DW_OP_breg26:
2245 case DW_OP_breg27:
2246 case DW_OP_breg28:
2247 case DW_OP_breg29:
2248 case DW_OP_breg30:
2249 case DW_OP_breg31:
2250 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2251 break;
2252 case DW_OP_regx:
2253 {
2254 unsigned r = val1->v.val_unsigned;
2255 if (for_eh_or_skip >= 0)
2256 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2257 gcc_assert (size_of_uleb128 (r)
2258 == size_of_uleb128 (val1->v.val_unsigned));
2259 dw2_asm_output_data_uleb128 (r, NULL);
2260 }
2261 break;
2262 case DW_OP_fbreg:
2263 dw2_asm_output_data_sleb128 (val1->v.val_int, NULL);
2264 break;
2265 case DW_OP_bregx:
2266 {
2267 unsigned r = val1->v.val_unsigned;
2268 if (for_eh_or_skip >= 0)
2269 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2270 gcc_assert (size_of_uleb128 (r)
2271 == size_of_uleb128 (val1->v.val_unsigned));
2272 dw2_asm_output_data_uleb128 (r, NULL);
2273 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2274 }
2275 break;
2276 case DW_OP_piece:
2277 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2278 break;
2279 case DW_OP_bit_piece:
2280 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2281 dw2_asm_output_data_uleb128 (val2->v.val_unsigned, NULL);
2282 break;
2283 case DW_OP_deref_size:
2284 case DW_OP_xderef_size:
2285 dw2_asm_output_data (1, val1->v.val_int, NULL);
2286 break;
2287
2288 case DW_OP_addr:
2289 if (loc->dtprel)
2290 {
2291 if (targetm.asm_out.output_dwarf_dtprel)
2292 {
2293 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
2294 DWARF2_ADDR_SIZE,
2295 val1->v.val_addr);
2296 fputc ('\n', asm_out_file);
2297 }
2298 else
2299 gcc_unreachable ();
2300 }
2301 else
2302 {
2303 #ifdef DWARF2_DEBUGGING_INFO
2304 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, val1->v.val_addr, NULL);
2305 #else
2306 gcc_unreachable ();
2307 #endif
2308 }
2309 break;
2310
2311 case DW_OP_GNU_addr_index:
2312 case DW_OP_addrx:
2313 case DW_OP_GNU_const_index:
2314 case DW_OP_constx:
2315 gcc_assert (loc->dw_loc_oprnd1.val_entry->index != NO_INDEX_ASSIGNED);
2316 dw2_asm_output_data_uleb128 (loc->dw_loc_oprnd1.val_entry->index,
2317 "(index into .debug_addr)");
2318 break;
2319
2320 case DW_OP_call2:
2321 case DW_OP_call4:
2322 {
2323 unsigned long die_offset
2324 = get_ref_die_offset (val1->v.val_die_ref.die);
2325 /* Make sure the offset has been computed and that we can encode it as
2326 an operand. */
2327 gcc_assert (die_offset > 0
2328 && die_offset <= (loc->dw_loc_opc == DW_OP_call2
2329 ? 0xffff
2330 : 0xffffffff));
2331 dw2_asm_output_data ((loc->dw_loc_opc == DW_OP_call2) ? 2 : 4,
2332 die_offset, NULL);
2333 }
2334 break;
2335
2336 case DW_OP_call_ref:
2337 case DW_OP_GNU_variable_value:
2338 {
2339 char label[MAX_ARTIFICIAL_LABEL_BYTES
2340 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2341 gcc_assert (val1->val_class == dw_val_class_die_ref);
2342 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2343 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2344 }
2345 break;
2346
2347 case DW_OP_implicit_pointer:
2348 case DW_OP_GNU_implicit_pointer:
2349 {
2350 char label[MAX_ARTIFICIAL_LABEL_BYTES
2351 + HOST_BITS_PER_WIDE_INT / 2 + 2];
2352 gcc_assert (val1->val_class == dw_val_class_die_ref);
2353 get_ref_die_offset_label (label, val1->v.val_die_ref.die);
2354 dw2_asm_output_offset (DWARF_REF_SIZE, label, debug_info_section, NULL);
2355 dw2_asm_output_data_sleb128 (val2->v.val_int, NULL);
2356 }
2357 break;
2358
2359 case DW_OP_entry_value:
2360 case DW_OP_GNU_entry_value:
2361 dw2_asm_output_data_uleb128 (size_of_locs (val1->v.val_loc), NULL);
2362 output_loc_sequence (val1->v.val_loc, for_eh_or_skip);
2363 break;
2364
2365 case DW_OP_const_type:
2366 case DW_OP_GNU_const_type:
2367 {
2368 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die), l;
2369 gcc_assert (o);
2370 dw2_asm_output_data_uleb128 (o, NULL);
2371 switch (val2->val_class)
2372 {
2373 case dw_val_class_const:
2374 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2375 dw2_asm_output_data (1, l, NULL);
2376 dw2_asm_output_data (l, val2->v.val_int, NULL);
2377 break;
2378 case dw_val_class_vec:
2379 {
2380 unsigned int elt_size = val2->v.val_vec.elt_size;
2381 unsigned int len = val2->v.val_vec.length;
2382 unsigned int i;
2383 unsigned char *p;
2384
2385 l = len * elt_size;
2386 dw2_asm_output_data (1, l, NULL);
2387 if (elt_size > sizeof (HOST_WIDE_INT))
2388 {
2389 elt_size /= 2;
2390 len *= 2;
2391 }
2392 for (i = 0, p = (unsigned char *) val2->v.val_vec.array;
2393 i < len;
2394 i++, p += elt_size)
2395 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
2396 "fp or vector constant word %u", i);
2397 }
2398 break;
2399 case dw_val_class_const_double:
2400 {
2401 unsigned HOST_WIDE_INT first, second;
2402 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2403
2404 dw2_asm_output_data (1, 2 * l, NULL);
2405 if (WORDS_BIG_ENDIAN)
2406 {
2407 first = val2->v.val_double.high;
2408 second = val2->v.val_double.low;
2409 }
2410 else
2411 {
2412 first = val2->v.val_double.low;
2413 second = val2->v.val_double.high;
2414 }
2415 dw2_asm_output_data (l, first, NULL);
2416 dw2_asm_output_data (l, second, NULL);
2417 }
2418 break;
2419 case dw_val_class_wide_int:
2420 {
2421 int i;
2422 int len = get_full_len (*val2->v.val_wide);
2423 l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
2424
2425 dw2_asm_output_data (1, len * l, NULL);
2426 if (WORDS_BIG_ENDIAN)
2427 for (i = len - 1; i >= 0; --i)
2428 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2429 else
2430 for (i = 0; i < len; ++i)
2431 dw2_asm_output_data (l, val2->v.val_wide->elt (i), NULL);
2432 }
2433 break;
2434 default:
2435 gcc_unreachable ();
2436 }
2437 }
2438 break;
2439 case DW_OP_regval_type:
2440 case DW_OP_GNU_regval_type:
2441 {
2442 unsigned r = val1->v.val_unsigned;
2443 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2444 gcc_assert (o);
2445 if (for_eh_or_skip >= 0)
2446 {
2447 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2448 gcc_assert (size_of_uleb128 (r)
2449 == size_of_uleb128 (val1->v.val_unsigned));
2450 }
2451 dw2_asm_output_data_uleb128 (r, NULL);
2452 dw2_asm_output_data_uleb128 (o, NULL);
2453 }
2454 break;
2455 case DW_OP_deref_type:
2456 case DW_OP_GNU_deref_type:
2457 {
2458 unsigned long o = get_base_type_offset (val2->v.val_die_ref.die);
2459 gcc_assert (o);
2460 dw2_asm_output_data (1, val1->v.val_int, NULL);
2461 dw2_asm_output_data_uleb128 (o, NULL);
2462 }
2463 break;
2464 case DW_OP_convert:
2465 case DW_OP_reinterpret:
2466 case DW_OP_GNU_convert:
2467 case DW_OP_GNU_reinterpret:
2468 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
2469 dw2_asm_output_data_uleb128 (val1->v.val_unsigned, NULL);
2470 else
2471 {
2472 unsigned long o = get_base_type_offset (val1->v.val_die_ref.die);
2473 gcc_assert (o);
2474 dw2_asm_output_data_uleb128 (o, NULL);
2475 }
2476 break;
2477
2478 case DW_OP_GNU_parameter_ref:
2479 {
2480 unsigned long o;
2481 gcc_assert (val1->val_class == dw_val_class_die_ref);
2482 o = get_ref_die_offset (val1->v.val_die_ref.die);
2483 dw2_asm_output_data (4, o, NULL);
2484 }
2485 break;
2486
2487 default:
2488 /* Other codes have no operands. */
2489 break;
2490 }
2491 }
2492
2493 /* Output a sequence of location operations.
2494 The for_eh_or_skip parameter controls whether register numbers are
2495 converted using DWARF2_FRAME_REG_OUT, which is needed in the case that
2496 hard reg numbers have been processed via DWARF_FRAME_REGNUM (i.e. for unwind
2497 info). This should be suppressed for the cases that have not been converted
2498 (i.e. symbolic debug info), by setting the parameter < 0. See PR47324. */
2499
2500 void
2501 output_loc_sequence (dw_loc_descr_ref loc, int for_eh_or_skip)
2502 {
2503 for (; loc != NULL; loc = loc->dw_loc_next)
2504 {
2505 enum dwarf_location_atom opc = loc->dw_loc_opc;
2506 /* Output the opcode. */
2507 if (for_eh_or_skip >= 0
2508 && opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2509 {
2510 unsigned r = (opc - DW_OP_breg0);
2511 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2512 gcc_assert (r <= 31);
2513 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2514 }
2515 else if (for_eh_or_skip >= 0
2516 && opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2517 {
2518 unsigned r = (opc - DW_OP_reg0);
2519 r = DWARF2_FRAME_REG_OUT (r, for_eh_or_skip);
2520 gcc_assert (r <= 31);
2521 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2522 }
2523
2524 dw2_asm_output_data (1, opc,
2525 "%s", dwarf_stack_op_name (opc));
2526
2527 /* Output the operand(s) (if any). */
2528 output_loc_operands (loc, for_eh_or_skip);
2529 }
2530 }
2531
2532 /* Output location description stack opcode's operands (if any).
2533 The output is single bytes on a line, suitable for .cfi_escape. */
2534
2535 static void
2536 output_loc_operands_raw (dw_loc_descr_ref loc)
2537 {
2538 dw_val_ref val1 = &loc->dw_loc_oprnd1;
2539 dw_val_ref val2 = &loc->dw_loc_oprnd2;
2540
2541 switch (loc->dw_loc_opc)
2542 {
2543 case DW_OP_addr:
2544 case DW_OP_GNU_addr_index:
2545 case DW_OP_addrx:
2546 case DW_OP_GNU_const_index:
2547 case DW_OP_constx:
2548 case DW_OP_implicit_value:
2549 /* We cannot output addresses in .cfi_escape, only bytes. */
2550 gcc_unreachable ();
2551
2552 case DW_OP_const1u:
2553 case DW_OP_const1s:
2554 case DW_OP_pick:
2555 case DW_OP_deref_size:
2556 case DW_OP_xderef_size:
2557 fputc (',', asm_out_file);
2558 dw2_asm_output_data_raw (1, val1->v.val_int);
2559 break;
2560
2561 case DW_OP_const2u:
2562 case DW_OP_const2s:
2563 fputc (',', asm_out_file);
2564 dw2_asm_output_data_raw (2, val1->v.val_int);
2565 break;
2566
2567 case DW_OP_const4u:
2568 case DW_OP_const4s:
2569 fputc (',', asm_out_file);
2570 dw2_asm_output_data_raw (4, val1->v.val_int);
2571 break;
2572
2573 case DW_OP_const8u:
2574 case DW_OP_const8s:
2575 gcc_assert (HOST_BITS_PER_WIDE_INT >= 64);
2576 fputc (',', asm_out_file);
2577 dw2_asm_output_data_raw (8, val1->v.val_int);
2578 break;
2579
2580 case DW_OP_skip:
2581 case DW_OP_bra:
2582 {
2583 int offset;
2584
2585 gcc_assert (val1->val_class == dw_val_class_loc);
2586 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
2587
2588 fputc (',', asm_out_file);
2589 dw2_asm_output_data_raw (2, offset);
2590 }
2591 break;
2592
2593 case DW_OP_regx:
2594 {
2595 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2596 gcc_assert (size_of_uleb128 (r)
2597 == size_of_uleb128 (val1->v.val_unsigned));
2598 fputc (',', asm_out_file);
2599 dw2_asm_output_data_uleb128_raw (r);
2600 }
2601 break;
2602
2603 case DW_OP_constu:
2604 case DW_OP_plus_uconst:
2605 case DW_OP_piece:
2606 fputc (',', asm_out_file);
2607 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2608 break;
2609
2610 case DW_OP_bit_piece:
2611 fputc (',', asm_out_file);
2612 dw2_asm_output_data_uleb128_raw (val1->v.val_unsigned);
2613 dw2_asm_output_data_uleb128_raw (val2->v.val_unsigned);
2614 break;
2615
2616 case DW_OP_consts:
2617 case DW_OP_breg0:
2618 case DW_OP_breg1:
2619 case DW_OP_breg2:
2620 case DW_OP_breg3:
2621 case DW_OP_breg4:
2622 case DW_OP_breg5:
2623 case DW_OP_breg6:
2624 case DW_OP_breg7:
2625 case DW_OP_breg8:
2626 case DW_OP_breg9:
2627 case DW_OP_breg10:
2628 case DW_OP_breg11:
2629 case DW_OP_breg12:
2630 case DW_OP_breg13:
2631 case DW_OP_breg14:
2632 case DW_OP_breg15:
2633 case DW_OP_breg16:
2634 case DW_OP_breg17:
2635 case DW_OP_breg18:
2636 case DW_OP_breg19:
2637 case DW_OP_breg20:
2638 case DW_OP_breg21:
2639 case DW_OP_breg22:
2640 case DW_OP_breg23:
2641 case DW_OP_breg24:
2642 case DW_OP_breg25:
2643 case DW_OP_breg26:
2644 case DW_OP_breg27:
2645 case DW_OP_breg28:
2646 case DW_OP_breg29:
2647 case DW_OP_breg30:
2648 case DW_OP_breg31:
2649 case DW_OP_fbreg:
2650 fputc (',', asm_out_file);
2651 dw2_asm_output_data_sleb128_raw (val1->v.val_int);
2652 break;
2653
2654 case DW_OP_bregx:
2655 {
2656 unsigned r = DWARF2_FRAME_REG_OUT (val1->v.val_unsigned, 1);
2657 gcc_assert (size_of_uleb128 (r)
2658 == size_of_uleb128 (val1->v.val_unsigned));
2659 fputc (',', asm_out_file);
2660 dw2_asm_output_data_uleb128_raw (r);
2661 fputc (',', asm_out_file);
2662 dw2_asm_output_data_sleb128_raw (val2->v.val_int);
2663 }
2664 break;
2665
2666 case DW_OP_implicit_pointer:
2667 case DW_OP_entry_value:
2668 case DW_OP_const_type:
2669 case DW_OP_regval_type:
2670 case DW_OP_deref_type:
2671 case DW_OP_convert:
2672 case DW_OP_reinterpret:
2673 case DW_OP_GNU_implicit_pointer:
2674 case DW_OP_GNU_entry_value:
2675 case DW_OP_GNU_const_type:
2676 case DW_OP_GNU_regval_type:
2677 case DW_OP_GNU_deref_type:
2678 case DW_OP_GNU_convert:
2679 case DW_OP_GNU_reinterpret:
2680 case DW_OP_GNU_parameter_ref:
2681 gcc_unreachable ();
2682 break;
2683
2684 default:
2685 /* Other codes have no operands. */
2686 break;
2687 }
2688 }
2689
2690 void
2691 output_loc_sequence_raw (dw_loc_descr_ref loc)
2692 {
2693 while (1)
2694 {
2695 enum dwarf_location_atom opc = loc->dw_loc_opc;
2696 /* Output the opcode. */
2697 if (opc >= DW_OP_breg0 && opc <= DW_OP_breg31)
2698 {
2699 unsigned r = (opc - DW_OP_breg0);
2700 r = DWARF2_FRAME_REG_OUT (r, 1);
2701 gcc_assert (r <= 31);
2702 opc = (enum dwarf_location_atom) (DW_OP_breg0 + r);
2703 }
2704 else if (opc >= DW_OP_reg0 && opc <= DW_OP_reg31)
2705 {
2706 unsigned r = (opc - DW_OP_reg0);
2707 r = DWARF2_FRAME_REG_OUT (r, 1);
2708 gcc_assert (r <= 31);
2709 opc = (enum dwarf_location_atom) (DW_OP_reg0 + r);
2710 }
2711 /* Output the opcode. */
2712 fprintf (asm_out_file, "%#x", opc);
2713 output_loc_operands_raw (loc);
2714
2715 if (!loc->dw_loc_next)
2716 break;
2717 loc = loc->dw_loc_next;
2718
2719 fputc (',', asm_out_file);
2720 }
2721 }
2722
2723 /* This function builds a dwarf location descriptor sequence from a
2724 dw_cfa_location, adding the given OFFSET to the result of the
2725 expression. */
2726
2727 struct dw_loc_descr_node *
2728 build_cfa_loc (dw_cfa_location *cfa, poly_int64 offset)
2729 {
2730 struct dw_loc_descr_node *head, *tmp;
2731
2732 offset += cfa->offset;
2733
2734 if (cfa->indirect)
2735 {
2736 head = new_reg_loc_descr (cfa->reg, cfa->base_offset);
2737 head->dw_loc_oprnd1.val_class = dw_val_class_const;
2738 head->dw_loc_oprnd1.val_entry = NULL;
2739 tmp = new_loc_descr (DW_OP_deref, 0, 0);
2740 add_loc_descr (&head, tmp);
2741 loc_descr_plus_const (&head, offset);
2742 }
2743 else
2744 head = new_reg_loc_descr (cfa->reg, offset);
2745
2746 return head;
2747 }
2748
2749 /* This function builds a dwarf location descriptor sequence for
2750 the address at OFFSET from the CFA when stack is aligned to
2751 ALIGNMENT byte. */
2752
2753 struct dw_loc_descr_node *
2754 build_cfa_aligned_loc (dw_cfa_location *cfa,
2755 poly_int64 offset, HOST_WIDE_INT alignment)
2756 {
2757 struct dw_loc_descr_node *head;
2758 unsigned int dwarf_fp
2759 = DWARF_FRAME_REGNUM (HARD_FRAME_POINTER_REGNUM);
2760
2761 /* When CFA is defined as FP+OFFSET, emulate stack alignment. */
2762 if (cfa->reg == HARD_FRAME_POINTER_REGNUM && cfa->indirect == 0)
2763 {
2764 head = new_reg_loc_descr (dwarf_fp, 0);
2765 add_loc_descr (&head, int_loc_descriptor (alignment));
2766 add_loc_descr (&head, new_loc_descr (DW_OP_and, 0, 0));
2767 loc_descr_plus_const (&head, offset);
2768 }
2769 else
2770 head = new_reg_loc_descr (dwarf_fp, offset);
2771 return head;
2772 }
2773 \f
2774 /* And now, the support for symbolic debugging information. */
2775
2776 /* .debug_str support. */
2777
2778 static void dwarf2out_init (const char *);
2779 static void dwarf2out_finish (const char *);
2780 static void dwarf2out_early_finish (const char *);
2781 static void dwarf2out_assembly_start (void);
2782 static void dwarf2out_define (unsigned int, const char *);
2783 static void dwarf2out_undef (unsigned int, const char *);
2784 static void dwarf2out_start_source_file (unsigned, const char *);
2785 static void dwarf2out_end_source_file (unsigned);
2786 static void dwarf2out_function_decl (tree);
2787 static void dwarf2out_begin_block (unsigned, unsigned);
2788 static void dwarf2out_end_block (unsigned, unsigned);
2789 static bool dwarf2out_ignore_block (const_tree);
2790 static void dwarf2out_early_global_decl (tree);
2791 static void dwarf2out_late_global_decl (tree);
2792 static void dwarf2out_type_decl (tree, int);
2793 static void dwarf2out_imported_module_or_decl (tree, tree, tree, bool, bool);
2794 static void dwarf2out_imported_module_or_decl_1 (tree, tree, tree,
2795 dw_die_ref);
2796 static void dwarf2out_abstract_function (tree);
2797 static void dwarf2out_var_location (rtx_insn *);
2798 static void dwarf2out_inline_entry (tree);
2799 static void dwarf2out_size_function (tree);
2800 static void dwarf2out_begin_function (tree);
2801 static void dwarf2out_end_function (unsigned int);
2802 static void dwarf2out_register_main_translation_unit (tree unit);
2803 static void dwarf2out_set_name (tree, tree);
2804 static void dwarf2out_register_external_die (tree decl, const char *sym,
2805 unsigned HOST_WIDE_INT off);
2806 static bool dwarf2out_die_ref_for_decl (tree decl, const char **sym,
2807 unsigned HOST_WIDE_INT *off);
2808
2809 /* The debug hooks structure. */
2810
2811 const struct gcc_debug_hooks dwarf2_debug_hooks =
2812 {
2813 dwarf2out_init,
2814 dwarf2out_finish,
2815 dwarf2out_early_finish,
2816 dwarf2out_assembly_start,
2817 dwarf2out_define,
2818 dwarf2out_undef,
2819 dwarf2out_start_source_file,
2820 dwarf2out_end_source_file,
2821 dwarf2out_begin_block,
2822 dwarf2out_end_block,
2823 dwarf2out_ignore_block,
2824 dwarf2out_source_line,
2825 dwarf2out_begin_prologue,
2826 #if VMS_DEBUGGING_INFO
2827 dwarf2out_vms_end_prologue,
2828 dwarf2out_vms_begin_epilogue,
2829 #else
2830 debug_nothing_int_charstar,
2831 debug_nothing_int_charstar,
2832 #endif
2833 dwarf2out_end_epilogue,
2834 dwarf2out_begin_function,
2835 dwarf2out_end_function, /* end_function */
2836 dwarf2out_register_main_translation_unit,
2837 dwarf2out_function_decl, /* function_decl */
2838 dwarf2out_early_global_decl,
2839 dwarf2out_late_global_decl,
2840 dwarf2out_type_decl, /* type_decl */
2841 dwarf2out_imported_module_or_decl,
2842 dwarf2out_die_ref_for_decl,
2843 dwarf2out_register_external_die,
2844 debug_nothing_tree, /* deferred_inline_function */
2845 /* The DWARF 2 backend tries to reduce debugging bloat by not
2846 emitting the abstract description of inline functions until
2847 something tries to reference them. */
2848 dwarf2out_abstract_function, /* outlining_inline_function */
2849 debug_nothing_rtx_code_label, /* label */
2850 debug_nothing_int, /* handle_pch */
2851 dwarf2out_var_location,
2852 dwarf2out_inline_entry, /* inline_entry */
2853 dwarf2out_size_function, /* size_function */
2854 dwarf2out_switch_text_section,
2855 dwarf2out_set_name,
2856 1, /* start_end_main_source_file */
2857 TYPE_SYMTAB_IS_DIE /* tree_type_symtab_field */
2858 };
2859
2860 const struct gcc_debug_hooks dwarf2_lineno_debug_hooks =
2861 {
2862 dwarf2out_init,
2863 debug_nothing_charstar,
2864 debug_nothing_charstar,
2865 dwarf2out_assembly_start,
2866 debug_nothing_int_charstar,
2867 debug_nothing_int_charstar,
2868 debug_nothing_int_charstar,
2869 debug_nothing_int,
2870 debug_nothing_int_int, /* begin_block */
2871 debug_nothing_int_int, /* end_block */
2872 debug_true_const_tree, /* ignore_block */
2873 dwarf2out_source_line, /* source_line */
2874 debug_nothing_int_int_charstar, /* begin_prologue */
2875 debug_nothing_int_charstar, /* end_prologue */
2876 debug_nothing_int_charstar, /* begin_epilogue */
2877 debug_nothing_int_charstar, /* end_epilogue */
2878 debug_nothing_tree, /* begin_function */
2879 debug_nothing_int, /* end_function */
2880 debug_nothing_tree, /* register_main_translation_unit */
2881 debug_nothing_tree, /* function_decl */
2882 debug_nothing_tree, /* early_global_decl */
2883 debug_nothing_tree, /* late_global_decl */
2884 debug_nothing_tree_int, /* type_decl */
2885 debug_nothing_tree_tree_tree_bool_bool,/* imported_module_or_decl */
2886 debug_false_tree_charstarstar_uhwistar,/* die_ref_for_decl */
2887 debug_nothing_tree_charstar_uhwi, /* register_external_die */
2888 debug_nothing_tree, /* deferred_inline_function */
2889 debug_nothing_tree, /* outlining_inline_function */
2890 debug_nothing_rtx_code_label, /* label */
2891 debug_nothing_int, /* handle_pch */
2892 debug_nothing_rtx_insn, /* var_location */
2893 debug_nothing_tree, /* inline_entry */
2894 debug_nothing_tree, /* size_function */
2895 debug_nothing_void, /* switch_text_section */
2896 debug_nothing_tree_tree, /* set_name */
2897 0, /* start_end_main_source_file */
2898 TYPE_SYMTAB_IS_ADDRESS /* tree_type_symtab_field */
2899 };
2900 \f
2901 /* NOTE: In the comments in this file, many references are made to
2902 "Debugging Information Entries". This term is abbreviated as `DIE'
2903 throughout the remainder of this file. */
2904
2905 /* An internal representation of the DWARF output is built, and then
2906 walked to generate the DWARF debugging info. The walk of the internal
2907 representation is done after the entire program has been compiled.
2908 The types below are used to describe the internal representation. */
2909
2910 /* Whether to put type DIEs into their own section .debug_types instead
2911 of making them part of the .debug_info section. Only supported for
2912 Dwarf V4 or higher and the user didn't disable them through
2913 -fno-debug-types-section. It is more efficient to put them in a
2914 separate comdat sections since the linker will then be able to
2915 remove duplicates. But not all tools support .debug_types sections
2916 yet. For Dwarf V5 or higher .debug_types doesn't exist any more,
2917 it is DW_UT_type unit type in .debug_info section. For late LTO
2918 debug there should be almost no types emitted so avoid enabling
2919 -fdebug-types-section there. */
2920
2921 #define use_debug_types (dwarf_version >= 4 \
2922 && flag_debug_types_section \
2923 && !in_lto_p)
2924
2925 /* Various DIE's use offsets relative to the beginning of the
2926 .debug_info section to refer to each other. */
2927
2928 typedef long int dw_offset;
2929
2930 struct comdat_type_node;
2931
2932 /* The entries in the line_info table more-or-less mirror the opcodes
2933 that are used in the real dwarf line table. Arrays of these entries
2934 are collected per section when DWARF2_ASM_LINE_DEBUG_INFO is not
2935 supported. */
2936
2937 enum dw_line_info_opcode {
2938 /* Emit DW_LNE_set_address; the operand is the label index. */
2939 LI_set_address,
2940
2941 /* Emit a row to the matrix with the given line. This may be done
2942 via any combination of DW_LNS_copy, DW_LNS_advance_line, and
2943 special opcodes. */
2944 LI_set_line,
2945
2946 /* Emit a DW_LNS_set_file. */
2947 LI_set_file,
2948
2949 /* Emit a DW_LNS_set_column. */
2950 LI_set_column,
2951
2952 /* Emit a DW_LNS_negate_stmt; the operand is ignored. */
2953 LI_negate_stmt,
2954
2955 /* Emit a DW_LNS_set_prologue_end/epilogue_begin; the operand is ignored. */
2956 LI_set_prologue_end,
2957 LI_set_epilogue_begin,
2958
2959 /* Emit a DW_LNE_set_discriminator. */
2960 LI_set_discriminator,
2961
2962 /* Output a Fixed Advance PC; the target PC is the label index; the
2963 base PC is the previous LI_adv_address or LI_set_address entry.
2964 We only use this when emitting debug views without assembler
2965 support, at explicit user request. Ideally, we should only use
2966 it when the offset might be zero but we can't tell: it's the only
2967 way to maybe change the PC without resetting the view number. */
2968 LI_adv_address
2969 };
2970
2971 typedef struct GTY(()) dw_line_info_struct {
2972 enum dw_line_info_opcode opcode;
2973 unsigned int val;
2974 } dw_line_info_entry;
2975
2976
2977 struct GTY(()) dw_line_info_table {
2978 /* The label that marks the end of this section. */
2979 const char *end_label;
2980
2981 /* The values for the last row of the matrix, as collected in the table.
2982 These are used to minimize the changes to the next row. */
2983 unsigned int file_num;
2984 unsigned int line_num;
2985 unsigned int column_num;
2986 int discrim_num;
2987 bool is_stmt;
2988 bool in_use;
2989
2990 /* This denotes the NEXT view number.
2991
2992 If it is 0, it is known that the NEXT view will be the first view
2993 at the given PC.
2994
2995 If it is -1, we're forcing the view number to be reset, e.g. at a
2996 function entry.
2997
2998 The meaning of other nonzero values depends on whether we're
2999 computing views internally or leaving it for the assembler to do
3000 so. If we're emitting them internally, view denotes the view
3001 number since the last known advance of PC. If we're leaving it
3002 for the assembler, it denotes the LVU label number that we're
3003 going to ask the assembler to assign. */
3004 var_loc_view view;
3005
3006 /* This counts the number of symbolic views emitted in this table
3007 since the latest view reset. Its max value, over all tables,
3008 sets symview_upper_bound. */
3009 var_loc_view symviews_since_reset;
3010
3011 #define FORCE_RESET_NEXT_VIEW(x) ((x) = (var_loc_view)-1)
3012 #define RESET_NEXT_VIEW(x) ((x) = (var_loc_view)0)
3013 #define FORCE_RESETTING_VIEW_P(x) ((x) == (var_loc_view)-1)
3014 #define RESETTING_VIEW_P(x) ((x) == (var_loc_view)0 || FORCE_RESETTING_VIEW_P (x))
3015
3016 vec<dw_line_info_entry, va_gc> *entries;
3017 };
3018
3019 /* This is an upper bound for view numbers that the assembler may
3020 assign to symbolic views output in this translation. It is used to
3021 decide how big a field to use to represent view numbers in
3022 symview-classed attributes. */
3023
3024 static var_loc_view symview_upper_bound;
3025
3026 /* If we're keep track of location views and their reset points, and
3027 INSN is a reset point (i.e., it necessarily advances the PC), mark
3028 the next view in TABLE as reset. */
3029
3030 static void
3031 maybe_reset_location_view (rtx_insn *insn, dw_line_info_table *table)
3032 {
3033 if (!debug_internal_reset_location_views)
3034 return;
3035
3036 /* Maybe turn (part of?) this test into a default target hook. */
3037 int reset = 0;
3038
3039 if (targetm.reset_location_view)
3040 reset = targetm.reset_location_view (insn);
3041
3042 if (reset)
3043 ;
3044 else if (JUMP_TABLE_DATA_P (insn))
3045 reset = 1;
3046 else if (GET_CODE (insn) == USE
3047 || GET_CODE (insn) == CLOBBER
3048 || GET_CODE (insn) == ASM_INPUT
3049 || asm_noperands (insn) >= 0)
3050 ;
3051 else if (get_attr_min_length (insn) > 0)
3052 reset = 1;
3053
3054 if (reset > 0 && !RESETTING_VIEW_P (table->view))
3055 RESET_NEXT_VIEW (table->view);
3056 }
3057
3058 /* Each DIE attribute has a field specifying the attribute kind,
3059 a link to the next attribute in the chain, and an attribute value.
3060 Attributes are typically linked below the DIE they modify. */
3061
3062 typedef struct GTY(()) dw_attr_struct {
3063 enum dwarf_attribute dw_attr;
3064 dw_val_node dw_attr_val;
3065 }
3066 dw_attr_node;
3067
3068
3069 /* The Debugging Information Entry (DIE) structure. DIEs form a tree.
3070 The children of each node form a circular list linked by
3071 die_sib. die_child points to the node *before* the "first" child node. */
3072
3073 typedef struct GTY((chain_circular ("%h.die_sib"), for_user)) die_struct {
3074 union die_symbol_or_type_node
3075 {
3076 const char * GTY ((tag ("0"))) die_symbol;
3077 comdat_type_node *GTY ((tag ("1"))) die_type_node;
3078 }
3079 GTY ((desc ("%0.comdat_type_p"))) die_id;
3080 vec<dw_attr_node, va_gc> *die_attr;
3081 dw_die_ref die_parent;
3082 dw_die_ref die_child;
3083 dw_die_ref die_sib;
3084 dw_die_ref die_definition; /* ref from a specification to its definition */
3085 dw_offset die_offset;
3086 unsigned long die_abbrev;
3087 int die_mark;
3088 unsigned int decl_id;
3089 enum dwarf_tag die_tag;
3090 /* Die is used and must not be pruned as unused. */
3091 BOOL_BITFIELD die_perennial_p : 1;
3092 BOOL_BITFIELD comdat_type_p : 1; /* DIE has a type signature */
3093 /* For an external ref to die_symbol if die_offset contains an extra
3094 offset to that symbol. */
3095 BOOL_BITFIELD with_offset : 1;
3096 /* Whether this DIE was removed from the DIE tree, for example via
3097 prune_unused_types. We don't consider those present from the
3098 DIE lookup routines. */
3099 BOOL_BITFIELD removed : 1;
3100 /* Lots of spare bits. */
3101 }
3102 die_node;
3103
3104 /* Set to TRUE while dwarf2out_early_global_decl is running. */
3105 static bool early_dwarf;
3106 static bool early_dwarf_finished;
3107 class set_early_dwarf {
3108 public:
3109 bool saved;
3110 set_early_dwarf () : saved(early_dwarf)
3111 {
3112 gcc_assert (! early_dwarf_finished);
3113 early_dwarf = true;
3114 }
3115 ~set_early_dwarf () { early_dwarf = saved; }
3116 };
3117
3118 /* Evaluate 'expr' while 'c' is set to each child of DIE in order. */
3119 #define FOR_EACH_CHILD(die, c, expr) do { \
3120 c = die->die_child; \
3121 if (c) do { \
3122 c = c->die_sib; \
3123 expr; \
3124 } while (c != die->die_child); \
3125 } while (0)
3126
3127 /* The pubname structure */
3128
3129 typedef struct GTY(()) pubname_struct {
3130 dw_die_ref die;
3131 const char *name;
3132 }
3133 pubname_entry;
3134
3135
3136 struct GTY(()) dw_ranges {
3137 const char *label;
3138 /* If this is positive, it's a block number, otherwise it's a
3139 bitwise-negated index into dw_ranges_by_label. */
3140 int num;
3141 /* Index for the range list for DW_FORM_rnglistx. */
3142 unsigned int idx : 31;
3143 /* True if this range might be possibly in a different section
3144 from previous entry. */
3145 unsigned int maybe_new_sec : 1;
3146 };
3147
3148 /* A structure to hold a macinfo entry. */
3149
3150 typedef struct GTY(()) macinfo_struct {
3151 unsigned char code;
3152 unsigned HOST_WIDE_INT lineno;
3153 const char *info;
3154 }
3155 macinfo_entry;
3156
3157
3158 struct GTY(()) dw_ranges_by_label {
3159 const char *begin;
3160 const char *end;
3161 };
3162
3163 /* The comdat type node structure. */
3164 struct GTY(()) comdat_type_node
3165 {
3166 dw_die_ref root_die;
3167 dw_die_ref type_die;
3168 dw_die_ref skeleton_die;
3169 char signature[DWARF_TYPE_SIGNATURE_SIZE];
3170 comdat_type_node *next;
3171 };
3172
3173 /* A list of DIEs for which we can't determine ancestry (parent_die
3174 field) just yet. Later in dwarf2out_finish we will fill in the
3175 missing bits. */
3176 typedef struct GTY(()) limbo_die_struct {
3177 dw_die_ref die;
3178 /* The tree for which this DIE was created. We use this to
3179 determine ancestry later. */
3180 tree created_for;
3181 struct limbo_die_struct *next;
3182 }
3183 limbo_die_node;
3184
3185 typedef struct skeleton_chain_struct
3186 {
3187 dw_die_ref old_die;
3188 dw_die_ref new_die;
3189 struct skeleton_chain_struct *parent;
3190 }
3191 skeleton_chain_node;
3192
3193 /* Define a macro which returns nonzero for a TYPE_DECL which was
3194 implicitly generated for a type.
3195
3196 Note that, unlike the C front-end (which generates a NULL named
3197 TYPE_DECL node for each complete tagged type, each array type,
3198 and each function type node created) the C++ front-end generates
3199 a _named_ TYPE_DECL node for each tagged type node created.
3200 These TYPE_DECLs have DECL_ARTIFICIAL set, so we know not to
3201 generate a DW_TAG_typedef DIE for them. Likewise with the Ada
3202 front-end, but for each type, tagged or not. */
3203
3204 #define TYPE_DECL_IS_STUB(decl) \
3205 (DECL_NAME (decl) == NULL_TREE \
3206 || (DECL_ARTIFICIAL (decl) \
3207 && ((decl == TYPE_STUB_DECL (TREE_TYPE (decl))) \
3208 /* This is necessary for stub decls that \
3209 appear in nested inline functions. */ \
3210 || (DECL_ABSTRACT_ORIGIN (decl) != NULL_TREE \
3211 && (decl_ultimate_origin (decl) \
3212 == TYPE_STUB_DECL (TREE_TYPE (decl)))))))
3213
3214 /* Information concerning the compilation unit's programming
3215 language, and compiler version. */
3216
3217 /* Fixed size portion of the DWARF compilation unit header. */
3218 #define DWARF_COMPILE_UNIT_HEADER_SIZE \
3219 (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE \
3220 + (dwarf_version >= 5 ? 4 : 3))
3221
3222 /* Fixed size portion of the DWARF comdat type unit header. */
3223 #define DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE \
3224 (DWARF_COMPILE_UNIT_HEADER_SIZE \
3225 + DWARF_TYPE_SIGNATURE_SIZE + DWARF_OFFSET_SIZE)
3226
3227 /* Fixed size portion of the DWARF skeleton compilation unit header. */
3228 #define DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE \
3229 (DWARF_COMPILE_UNIT_HEADER_SIZE + (dwarf_version >= 5 ? 8 : 0))
3230
3231 /* Fixed size portion of public names info. */
3232 #define DWARF_PUBNAMES_HEADER_SIZE (2 * DWARF_OFFSET_SIZE + 2)
3233
3234 /* Fixed size portion of the address range info. */
3235 #define DWARF_ARANGES_HEADER_SIZE \
3236 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3237 DWARF2_ADDR_SIZE * 2) \
3238 - DWARF_INITIAL_LENGTH_SIZE)
3239
3240 /* Size of padding portion in the address range info. It must be
3241 aligned to twice the pointer size. */
3242 #define DWARF_ARANGES_PAD_SIZE \
3243 (DWARF_ROUND (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4, \
3244 DWARF2_ADDR_SIZE * 2) \
3245 - (DWARF_INITIAL_LENGTH_SIZE + DWARF_OFFSET_SIZE + 4))
3246
3247 /* Use assembler line directives if available. */
3248 #ifndef DWARF2_ASM_LINE_DEBUG_INFO
3249 #ifdef HAVE_AS_DWARF2_DEBUG_LINE
3250 #define DWARF2_ASM_LINE_DEBUG_INFO 1
3251 #else
3252 #define DWARF2_ASM_LINE_DEBUG_INFO 0
3253 #endif
3254 #endif
3255
3256 /* Use assembler views in line directives if available. */
3257 #ifndef DWARF2_ASM_VIEW_DEBUG_INFO
3258 #ifdef HAVE_AS_DWARF2_DEBUG_VIEW
3259 #define DWARF2_ASM_VIEW_DEBUG_INFO 1
3260 #else
3261 #define DWARF2_ASM_VIEW_DEBUG_INFO 0
3262 #endif
3263 #endif
3264
3265 /* Return true if GCC configure detected assembler support for .loc. */
3266
3267 bool
3268 dwarf2out_default_as_loc_support (void)
3269 {
3270 return DWARF2_ASM_LINE_DEBUG_INFO;
3271 #if (GCC_VERSION >= 3000)
3272 # undef DWARF2_ASM_LINE_DEBUG_INFO
3273 # pragma GCC poison DWARF2_ASM_LINE_DEBUG_INFO
3274 #endif
3275 }
3276
3277 /* Return true if GCC configure detected assembler support for views
3278 in .loc directives. */
3279
3280 bool
3281 dwarf2out_default_as_locview_support (void)
3282 {
3283 return DWARF2_ASM_VIEW_DEBUG_INFO;
3284 #if (GCC_VERSION >= 3000)
3285 # undef DWARF2_ASM_VIEW_DEBUG_INFO
3286 # pragma GCC poison DWARF2_ASM_VIEW_DEBUG_INFO
3287 #endif
3288 }
3289
3290 /* A bit is set in ZERO_VIEW_P if we are using the assembler-supported
3291 view computation, and it refers to a view identifier for which we
3292 will not emit a label because it is known to map to a view number
3293 zero. We won't allocate the bitmap if we're not using assembler
3294 support for location views, but we have to make the variable
3295 visible for GGC and for code that will be optimized out for lack of
3296 support but that's still parsed and compiled. We could abstract it
3297 out with macros, but it's not worth it. */
3298 static GTY(()) bitmap zero_view_p;
3299
3300 /* Evaluate to TRUE iff N is known to identify the first location view
3301 at its PC. When not using assembler location view computation,
3302 that must be view number zero. Otherwise, ZERO_VIEW_P is allocated
3303 and views label numbers recorded in it are the ones known to be
3304 zero. */
3305 #define ZERO_VIEW_P(N) ((N) == (var_loc_view)0 \
3306 || (N) == (var_loc_view)-1 \
3307 || (zero_view_p \
3308 && bitmap_bit_p (zero_view_p, (N))))
3309
3310 /* Return true iff we're to emit .loc directives for the assembler to
3311 generate line number sections.
3312
3313 When we're not emitting views, all we need from the assembler is
3314 support for .loc directives.
3315
3316 If we are emitting views, we can only use the assembler's .loc
3317 support if it also supports views.
3318
3319 When the compiler is emitting the line number programs and
3320 computing view numbers itself, it resets view numbers at known PC
3321 changes and counts from that, and then it emits view numbers as
3322 literal constants in locviewlists. There are cases in which the
3323 compiler is not sure about PC changes, e.g. when extra alignment is
3324 requested for a label. In these cases, the compiler may not reset
3325 the view counter, and the potential PC advance in the line number
3326 program will use an opcode that does not reset the view counter
3327 even if the PC actually changes, so that compiler and debug info
3328 consumer can keep view numbers in sync.
3329
3330 When the compiler defers view computation to the assembler, it
3331 emits symbolic view numbers in locviewlists, with the exception of
3332 views known to be zero (forced resets, or reset after
3333 compiler-visible PC changes): instead of emitting symbols for
3334 these, we emit literal zero and assert the assembler agrees with
3335 the compiler's assessment. We could use symbolic views everywhere,
3336 instead of special-casing zero views, but then we'd be unable to
3337 optimize out locviewlists that contain only zeros. */
3338
3339 static bool
3340 output_asm_line_debug_info (void)
3341 {
3342 return (dwarf2out_as_loc_support
3343 && (dwarf2out_as_locview_support
3344 || !debug_variable_location_views));
3345 }
3346
3347 /* Minimum line offset in a special line info. opcode.
3348 This value was chosen to give a reasonable range of values. */
3349 #define DWARF_LINE_BASE -10
3350
3351 /* First special line opcode - leave room for the standard opcodes. */
3352 #define DWARF_LINE_OPCODE_BASE ((int)DW_LNS_set_isa + 1)
3353
3354 /* Range of line offsets in a special line info. opcode. */
3355 #define DWARF_LINE_RANGE (254-DWARF_LINE_OPCODE_BASE+1)
3356
3357 /* Flag that indicates the initial value of the is_stmt_start flag.
3358 In the present implementation, we do not mark any lines as
3359 the beginning of a source statement, because that information
3360 is not made available by the GCC front-end. */
3361 #define DWARF_LINE_DEFAULT_IS_STMT_START 1
3362
3363 /* Maximum number of operations per instruction bundle. */
3364 #ifndef DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN
3365 #define DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN 1
3366 #endif
3367
3368 /* This location is used by calc_die_sizes() to keep track
3369 the offset of each DIE within the .debug_info section. */
3370 static unsigned long next_die_offset;
3371
3372 /* Record the root of the DIE's built for the current compilation unit. */
3373 static GTY(()) dw_die_ref single_comp_unit_die;
3374
3375 /* A list of type DIEs that have been separated into comdat sections. */
3376 static GTY(()) comdat_type_node *comdat_type_list;
3377
3378 /* A list of CU DIEs that have been separated. */
3379 static GTY(()) limbo_die_node *cu_die_list;
3380
3381 /* A list of DIEs with a NULL parent waiting to be relocated. */
3382 static GTY(()) limbo_die_node *limbo_die_list;
3383
3384 /* A list of DIEs for which we may have to generate
3385 DW_AT_{,MIPS_}linkage_name once their DECL_ASSEMBLER_NAMEs are set. */
3386 static GTY(()) limbo_die_node *deferred_asm_name;
3387
3388 struct dwarf_file_hasher : ggc_ptr_hash<dwarf_file_data>
3389 {
3390 typedef const char *compare_type;
3391
3392 static hashval_t hash (dwarf_file_data *);
3393 static bool equal (dwarf_file_data *, const char *);
3394 };
3395
3396 /* Filenames referenced by this compilation unit. */
3397 static GTY(()) hash_table<dwarf_file_hasher> *file_table;
3398
3399 struct decl_die_hasher : ggc_ptr_hash<die_node>
3400 {
3401 typedef tree compare_type;
3402
3403 static hashval_t hash (die_node *);
3404 static bool equal (die_node *, tree);
3405 };
3406 /* A hash table of references to DIE's that describe declarations.
3407 The key is a DECL_UID() which is a unique number identifying each decl. */
3408 static GTY (()) hash_table<decl_die_hasher> *decl_die_table;
3409
3410 struct GTY ((for_user)) variable_value_struct {
3411 unsigned int decl_id;
3412 vec<dw_die_ref, va_gc> *dies;
3413 };
3414
3415 struct variable_value_hasher : ggc_ptr_hash<variable_value_struct>
3416 {
3417 typedef tree compare_type;
3418
3419 static hashval_t hash (variable_value_struct *);
3420 static bool equal (variable_value_struct *, tree);
3421 };
3422 /* A hash table of DIEs that contain DW_OP_GNU_variable_value with
3423 dw_val_class_decl_ref class, indexed by FUNCTION_DECLs which is
3424 DECL_CONTEXT of the referenced VAR_DECLs. */
3425 static GTY (()) hash_table<variable_value_hasher> *variable_value_hash;
3426
3427 struct block_die_hasher : ggc_ptr_hash<die_struct>
3428 {
3429 static hashval_t hash (die_struct *);
3430 static bool equal (die_struct *, die_struct *);
3431 };
3432
3433 /* A hash table of references to DIE's that describe COMMON blocks.
3434 The key is DECL_UID() ^ die_parent. */
3435 static GTY (()) hash_table<block_die_hasher> *common_block_die_table;
3436
3437 typedef struct GTY(()) die_arg_entry_struct {
3438 dw_die_ref die;
3439 tree arg;
3440 } die_arg_entry;
3441
3442
3443 /* Node of the variable location list. */
3444 struct GTY ((chain_next ("%h.next"))) var_loc_node {
3445 /* Either NOTE_INSN_VAR_LOCATION, or, for SRA optimized variables,
3446 EXPR_LIST chain. For small bitsizes, bitsize is encoded
3447 in mode of the EXPR_LIST node and first EXPR_LIST operand
3448 is either NOTE_INSN_VAR_LOCATION for a piece with a known
3449 location or NULL for padding. For larger bitsizes,
3450 mode is 0 and first operand is a CONCAT with bitsize
3451 as first CONCAT operand and NOTE_INSN_VAR_LOCATION resp.
3452 NULL as second operand. */
3453 rtx GTY (()) loc;
3454 const char * GTY (()) label;
3455 struct var_loc_node * GTY (()) next;
3456 var_loc_view view;
3457 };
3458
3459 /* Variable location list. */
3460 struct GTY ((for_user)) var_loc_list_def {
3461 struct var_loc_node * GTY (()) first;
3462
3463 /* Pointer to the last but one or last element of the
3464 chained list. If the list is empty, both first and
3465 last are NULL, if the list contains just one node
3466 or the last node certainly is not redundant, it points
3467 to the last node, otherwise points to the last but one.
3468 Do not mark it for GC because it is marked through the chain. */
3469 struct var_loc_node * GTY ((skip ("%h"))) last;
3470
3471 /* Pointer to the last element before section switch,
3472 if NULL, either sections weren't switched or first
3473 is after section switch. */
3474 struct var_loc_node * GTY ((skip ("%h"))) last_before_switch;
3475
3476 /* DECL_UID of the variable decl. */
3477 unsigned int decl_id;
3478 };
3479 typedef struct var_loc_list_def var_loc_list;
3480
3481 /* Call argument location list. */
3482 struct GTY ((chain_next ("%h.next"))) call_arg_loc_node {
3483 rtx GTY (()) call_arg_loc_note;
3484 const char * GTY (()) label;
3485 tree GTY (()) block;
3486 bool tail_call_p;
3487 rtx GTY (()) symbol_ref;
3488 struct call_arg_loc_node * GTY (()) next;
3489 };
3490
3491
3492 struct decl_loc_hasher : ggc_ptr_hash<var_loc_list>
3493 {
3494 typedef const_tree compare_type;
3495
3496 static hashval_t hash (var_loc_list *);
3497 static bool equal (var_loc_list *, const_tree);
3498 };
3499
3500 /* Table of decl location linked lists. */
3501 static GTY (()) hash_table<decl_loc_hasher> *decl_loc_table;
3502
3503 /* Head and tail of call_arg_loc chain. */
3504 static GTY (()) struct call_arg_loc_node *call_arg_locations;
3505 static struct call_arg_loc_node *call_arg_loc_last;
3506
3507 /* Number of call sites in the current function. */
3508 static int call_site_count = -1;
3509 /* Number of tail call sites in the current function. */
3510 static int tail_call_site_count = -1;
3511
3512 /* A cached location list. */
3513 struct GTY ((for_user)) cached_dw_loc_list_def {
3514 /* The DECL_UID of the decl that this entry describes. */
3515 unsigned int decl_id;
3516
3517 /* The cached location list. */
3518 dw_loc_list_ref loc_list;
3519 };
3520 typedef struct cached_dw_loc_list_def cached_dw_loc_list;
3521
3522 struct dw_loc_list_hasher : ggc_ptr_hash<cached_dw_loc_list>
3523 {
3524
3525 typedef const_tree compare_type;
3526
3527 static hashval_t hash (cached_dw_loc_list *);
3528 static bool equal (cached_dw_loc_list *, const_tree);
3529 };
3530
3531 /* Table of cached location lists. */
3532 static GTY (()) hash_table<dw_loc_list_hasher> *cached_dw_loc_list_table;
3533
3534 /* A vector of references to DIE's that are uniquely identified by their tag,
3535 presence/absence of children DIE's, and list of attribute/value pairs. */
3536 static GTY(()) vec<dw_die_ref, va_gc> *abbrev_die_table;
3537
3538 /* A hash map to remember the stack usage for DWARF procedures. The value
3539 stored is the stack size difference between before the DWARF procedure
3540 invokation and after it returned. In other words, for a DWARF procedure
3541 that consumes N stack slots and that pushes M ones, this stores M - N. */
3542 static hash_map<dw_die_ref, int> *dwarf_proc_stack_usage_map;
3543
3544 /* A global counter for generating labels for line number data. */
3545 static unsigned int line_info_label_num;
3546
3547 /* The current table to which we should emit line number information
3548 for the current function. This will be set up at the beginning of
3549 assembly for the function. */
3550 static GTY(()) dw_line_info_table *cur_line_info_table;
3551
3552 /* The two default tables of line number info. */
3553 static GTY(()) dw_line_info_table *text_section_line_info;
3554 static GTY(()) dw_line_info_table *cold_text_section_line_info;
3555
3556 /* The set of all non-default tables of line number info. */
3557 static GTY(()) vec<dw_line_info_table *, va_gc> *separate_line_info;
3558
3559 /* A flag to tell pubnames/types export if there is an info section to
3560 refer to. */
3561 static bool info_section_emitted;
3562
3563 /* A pointer to the base of a table that contains a list of publicly
3564 accessible names. */
3565 static GTY (()) vec<pubname_entry, va_gc> *pubname_table;
3566
3567 /* A pointer to the base of a table that contains a list of publicly
3568 accessible types. */
3569 static GTY (()) vec<pubname_entry, va_gc> *pubtype_table;
3570
3571 /* A pointer to the base of a table that contains a list of macro
3572 defines/undefines (and file start/end markers). */
3573 static GTY (()) vec<macinfo_entry, va_gc> *macinfo_table;
3574
3575 /* True if .debug_macinfo or .debug_macros section is going to be
3576 emitted. */
3577 #define have_macinfo \
3578 ((!XCOFF_DEBUGGING_INFO || HAVE_XCOFF_DWARF_EXTRAS) \
3579 && debug_info_level >= DINFO_LEVEL_VERBOSE \
3580 && !macinfo_table->is_empty ())
3581
3582 /* Vector of dies for which we should generate .debug_ranges info. */
3583 static GTY (()) vec<dw_ranges, va_gc> *ranges_table;
3584
3585 /* Vector of pairs of labels referenced in ranges_table. */
3586 static GTY (()) vec<dw_ranges_by_label, va_gc> *ranges_by_label;
3587
3588 /* Whether we have location lists that need outputting */
3589 static GTY(()) bool have_location_lists;
3590
3591 /* Unique label counter. */
3592 static GTY(()) unsigned int loclabel_num;
3593
3594 /* Unique label counter for point-of-call tables. */
3595 static GTY(()) unsigned int poc_label_num;
3596
3597 /* The last file entry emitted by maybe_emit_file(). */
3598 static GTY(()) struct dwarf_file_data * last_emitted_file;
3599
3600 /* Number of internal labels generated by gen_internal_sym(). */
3601 static GTY(()) int label_num;
3602
3603 static GTY(()) vec<die_arg_entry, va_gc> *tmpl_value_parm_die_table;
3604
3605 /* Instances of generic types for which we need to generate debug
3606 info that describe their generic parameters and arguments. That
3607 generation needs to happen once all types are properly laid out so
3608 we do it at the end of compilation. */
3609 static GTY(()) vec<tree, va_gc> *generic_type_instances;
3610
3611 /* Offset from the "steady-state frame pointer" to the frame base,
3612 within the current function. */
3613 static poly_int64 frame_pointer_fb_offset;
3614 static bool frame_pointer_fb_offset_valid;
3615
3616 static vec<dw_die_ref> base_types;
3617
3618 /* Flags to represent a set of attribute classes for attributes that represent
3619 a scalar value (bounds, pointers, ...). */
3620 enum dw_scalar_form
3621 {
3622 dw_scalar_form_constant = 0x01,
3623 dw_scalar_form_exprloc = 0x02,
3624 dw_scalar_form_reference = 0x04
3625 };
3626
3627 /* Forward declarations for functions defined in this file. */
3628
3629 static int is_pseudo_reg (const_rtx);
3630 static tree type_main_variant (tree);
3631 static int is_tagged_type (const_tree);
3632 static const char *dwarf_tag_name (unsigned);
3633 static const char *dwarf_attr_name (unsigned);
3634 static const char *dwarf_form_name (unsigned);
3635 static tree decl_ultimate_origin (const_tree);
3636 static tree decl_class_context (tree);
3637 static void add_dwarf_attr (dw_die_ref, dw_attr_node *);
3638 static inline enum dw_val_class AT_class (dw_attr_node *);
3639 static inline unsigned int AT_index (dw_attr_node *);
3640 static void add_AT_flag (dw_die_ref, enum dwarf_attribute, unsigned);
3641 static inline unsigned AT_flag (dw_attr_node *);
3642 static void add_AT_int (dw_die_ref, enum dwarf_attribute, HOST_WIDE_INT);
3643 static inline HOST_WIDE_INT AT_int (dw_attr_node *);
3644 static void add_AT_unsigned (dw_die_ref, enum dwarf_attribute, unsigned HOST_WIDE_INT);
3645 static inline unsigned HOST_WIDE_INT AT_unsigned (dw_attr_node *);
3646 static void add_AT_double (dw_die_ref, enum dwarf_attribute,
3647 HOST_WIDE_INT, unsigned HOST_WIDE_INT);
3648 static inline void add_AT_vec (dw_die_ref, enum dwarf_attribute, unsigned int,
3649 unsigned int, unsigned char *);
3650 static void add_AT_data8 (dw_die_ref, enum dwarf_attribute, unsigned char *);
3651 static void add_AT_string (dw_die_ref, enum dwarf_attribute, const char *);
3652 static inline const char *AT_string (dw_attr_node *);
3653 static enum dwarf_form AT_string_form (dw_attr_node *);
3654 static void add_AT_die_ref (dw_die_ref, enum dwarf_attribute, dw_die_ref);
3655 static void add_AT_specification (dw_die_ref, dw_die_ref);
3656 static inline dw_die_ref AT_ref (dw_attr_node *);
3657 static inline int AT_ref_external (dw_attr_node *);
3658 static inline void set_AT_ref_external (dw_attr_node *, int);
3659 static void add_AT_loc (dw_die_ref, enum dwarf_attribute, dw_loc_descr_ref);
3660 static inline dw_loc_descr_ref AT_loc (dw_attr_node *);
3661 static void add_AT_loc_list (dw_die_ref, enum dwarf_attribute,
3662 dw_loc_list_ref);
3663 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3664 static void add_AT_view_list (dw_die_ref, enum dwarf_attribute);
3665 static inline dw_loc_list_ref AT_loc_list (dw_attr_node *);
3666 static addr_table_entry *add_addr_table_entry (void *, enum ate_kind);
3667 static void remove_addr_table_entry (addr_table_entry *);
3668 static void add_AT_addr (dw_die_ref, enum dwarf_attribute, rtx, bool);
3669 static inline rtx AT_addr (dw_attr_node *);
3670 static void add_AT_symview (dw_die_ref, enum dwarf_attribute, const char *);
3671 static void add_AT_lbl_id (dw_die_ref, enum dwarf_attribute, const char *);
3672 static void add_AT_lineptr (dw_die_ref, enum dwarf_attribute, const char *);
3673 static void add_AT_macptr (dw_die_ref, enum dwarf_attribute, const char *);
3674 static void add_AT_range_list (dw_die_ref, enum dwarf_attribute,
3675 unsigned long, bool);
3676 static inline const char *AT_lbl (dw_attr_node *);
3677 static dw_attr_node *get_AT (dw_die_ref, enum dwarf_attribute);
3678 static const char *get_AT_low_pc (dw_die_ref);
3679 static const char *get_AT_string (dw_die_ref, enum dwarf_attribute);
3680 static int get_AT_flag (dw_die_ref, enum dwarf_attribute);
3681 static unsigned get_AT_unsigned (dw_die_ref, enum dwarf_attribute);
3682 static inline dw_die_ref get_AT_ref (dw_die_ref, enum dwarf_attribute);
3683 static bool is_c (void);
3684 static bool is_cxx (void);
3685 static bool is_cxx (const_tree);
3686 static bool is_fortran (void);
3687 static bool is_ada (void);
3688 static bool remove_AT (dw_die_ref, enum dwarf_attribute);
3689 static void remove_child_TAG (dw_die_ref, enum dwarf_tag);
3690 static void add_child_die (dw_die_ref, dw_die_ref);
3691 static dw_die_ref new_die (enum dwarf_tag, dw_die_ref, tree);
3692 static dw_die_ref lookup_type_die (tree);
3693 static dw_die_ref strip_naming_typedef (tree, dw_die_ref);
3694 static dw_die_ref lookup_type_die_strip_naming_typedef (tree);
3695 static void equate_type_number_to_die (tree, dw_die_ref);
3696 static dw_die_ref lookup_decl_die (tree);
3697 static var_loc_list *lookup_decl_loc (const_tree);
3698 static void equate_decl_number_to_die (tree, dw_die_ref);
3699 static struct var_loc_node *add_var_loc_to_decl (tree, rtx, const char *, var_loc_view);
3700 static void print_spaces (FILE *);
3701 static void print_die (dw_die_ref, FILE *);
3702 static void loc_checksum (dw_loc_descr_ref, struct md5_ctx *);
3703 static void attr_checksum (dw_attr_node *, struct md5_ctx *, int *);
3704 static void die_checksum (dw_die_ref, struct md5_ctx *, int *);
3705 static void checksum_sleb128 (HOST_WIDE_INT, struct md5_ctx *);
3706 static void checksum_uleb128 (unsigned HOST_WIDE_INT, struct md5_ctx *);
3707 static void loc_checksum_ordered (dw_loc_descr_ref, struct md5_ctx *);
3708 static void attr_checksum_ordered (enum dwarf_tag, dw_attr_node *,
3709 struct md5_ctx *, int *);
3710 struct checksum_attributes;
3711 static void collect_checksum_attributes (struct checksum_attributes *, dw_die_ref);
3712 static void die_checksum_ordered (dw_die_ref, struct md5_ctx *, int *);
3713 static void checksum_die_context (dw_die_ref, struct md5_ctx *);
3714 static void generate_type_signature (dw_die_ref, comdat_type_node *);
3715 static int same_loc_p (dw_loc_descr_ref, dw_loc_descr_ref, int *);
3716 static int same_dw_val_p (const dw_val_node *, const dw_val_node *, int *);
3717 static int same_attr_p (dw_attr_node *, dw_attr_node *, int *);
3718 static int same_die_p (dw_die_ref, dw_die_ref, int *);
3719 static int is_type_die (dw_die_ref);
3720 static inline bool is_template_instantiation (dw_die_ref);
3721 static int is_declaration_die (dw_die_ref);
3722 static int should_move_die_to_comdat (dw_die_ref);
3723 static dw_die_ref clone_as_declaration (dw_die_ref);
3724 static dw_die_ref clone_die (dw_die_ref);
3725 static dw_die_ref clone_tree (dw_die_ref);
3726 static dw_die_ref copy_declaration_context (dw_die_ref, dw_die_ref);
3727 static void generate_skeleton_ancestor_tree (skeleton_chain_node *);
3728 static void generate_skeleton_bottom_up (skeleton_chain_node *);
3729 static dw_die_ref generate_skeleton (dw_die_ref);
3730 static dw_die_ref remove_child_or_replace_with_skeleton (dw_die_ref,
3731 dw_die_ref,
3732 dw_die_ref);
3733 static void break_out_comdat_types (dw_die_ref);
3734 static void copy_decls_for_unworthy_types (dw_die_ref);
3735
3736 static void add_sibling_attributes (dw_die_ref);
3737 static void output_location_lists (dw_die_ref);
3738 static int constant_size (unsigned HOST_WIDE_INT);
3739 static unsigned long size_of_die (dw_die_ref);
3740 static void calc_die_sizes (dw_die_ref);
3741 static void calc_base_type_die_sizes (void);
3742 static void mark_dies (dw_die_ref);
3743 static void unmark_dies (dw_die_ref);
3744 static void unmark_all_dies (dw_die_ref);
3745 static unsigned long size_of_pubnames (vec<pubname_entry, va_gc> *);
3746 static unsigned long size_of_aranges (void);
3747 static enum dwarf_form value_format (dw_attr_node *);
3748 static void output_value_format (dw_attr_node *);
3749 static void output_abbrev_section (void);
3750 static void output_die_abbrevs (unsigned long, dw_die_ref);
3751 static void output_die (dw_die_ref);
3752 static void output_compilation_unit_header (enum dwarf_unit_type);
3753 static void output_comp_unit (dw_die_ref, int, const unsigned char *);
3754 static void output_comdat_type_unit (comdat_type_node *, bool);
3755 static const char *dwarf2_name (tree, int);
3756 static void add_pubname (tree, dw_die_ref);
3757 static void add_enumerator_pubname (const char *, dw_die_ref);
3758 static void add_pubname_string (const char *, dw_die_ref);
3759 static void add_pubtype (tree, dw_die_ref);
3760 static void output_pubnames (vec<pubname_entry, va_gc> *);
3761 static void output_aranges (void);
3762 static unsigned int add_ranges (const_tree, bool = false);
3763 static void add_ranges_by_labels (dw_die_ref, const char *, const char *,
3764 bool *, bool);
3765 static void output_ranges (void);
3766 static dw_line_info_table *new_line_info_table (void);
3767 static void output_line_info (bool);
3768 static void output_file_names (void);
3769 static dw_die_ref base_type_die (tree, bool);
3770 static int is_base_type (tree);
3771 static dw_die_ref subrange_type_die (tree, tree, tree, tree, dw_die_ref);
3772 static int decl_quals (const_tree);
3773 static dw_die_ref modified_type_die (tree, int, bool, dw_die_ref);
3774 static dw_die_ref generic_parameter_die (tree, tree, bool, dw_die_ref);
3775 static dw_die_ref template_parameter_pack_die (tree, tree, dw_die_ref);
3776 static unsigned int dbx_reg_number (const_rtx);
3777 static void add_loc_descr_op_piece (dw_loc_descr_ref *, int);
3778 static dw_loc_descr_ref reg_loc_descriptor (rtx, enum var_init_status);
3779 static dw_loc_descr_ref one_reg_loc_descriptor (unsigned int,
3780 enum var_init_status);
3781 static dw_loc_descr_ref multiple_reg_loc_descriptor (rtx, rtx,
3782 enum var_init_status);
3783 static dw_loc_descr_ref based_loc_descr (rtx, poly_int64,
3784 enum var_init_status);
3785 static int is_based_loc (const_rtx);
3786 static bool resolve_one_addr (rtx *);
3787 static dw_loc_descr_ref concat_loc_descriptor (rtx, rtx,
3788 enum var_init_status);
3789 static dw_loc_descr_ref loc_descriptor (rtx, machine_mode mode,
3790 enum var_init_status);
3791 struct loc_descr_context;
3792 static void add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref);
3793 static void add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list);
3794 static dw_loc_list_ref loc_list_from_tree (tree, int,
3795 struct loc_descr_context *);
3796 static dw_loc_descr_ref loc_descriptor_from_tree (tree, int,
3797 struct loc_descr_context *);
3798 static tree field_type (const_tree);
3799 static unsigned int simple_type_align_in_bits (const_tree);
3800 static unsigned int simple_decl_align_in_bits (const_tree);
3801 static unsigned HOST_WIDE_INT simple_type_size_in_bits (const_tree);
3802 struct vlr_context;
3803 static dw_loc_descr_ref field_byte_offset (const_tree, struct vlr_context *,
3804 HOST_WIDE_INT *);
3805 static void add_AT_location_description (dw_die_ref, enum dwarf_attribute,
3806 dw_loc_list_ref);
3807 static void add_data_member_location_attribute (dw_die_ref, tree,
3808 struct vlr_context *);
3809 static bool add_const_value_attribute (dw_die_ref, rtx);
3810 static void insert_int (HOST_WIDE_INT, unsigned, unsigned char *);
3811 static void insert_wide_int (const wide_int &, unsigned char *, int);
3812 static void insert_float (const_rtx, unsigned char *);
3813 static rtx rtl_for_decl_location (tree);
3814 static bool add_location_or_const_value_attribute (dw_die_ref, tree, bool);
3815 static bool tree_add_const_value_attribute (dw_die_ref, tree);
3816 static bool tree_add_const_value_attribute_for_decl (dw_die_ref, tree);
3817 static void add_name_attribute (dw_die_ref, const char *);
3818 static void add_desc_attribute (dw_die_ref, tree);
3819 static void add_gnat_descriptive_type_attribute (dw_die_ref, tree, dw_die_ref);
3820 static void add_comp_dir_attribute (dw_die_ref);
3821 static void add_scalar_info (dw_die_ref, enum dwarf_attribute, tree, int,
3822 struct loc_descr_context *);
3823 static void add_bound_info (dw_die_ref, enum dwarf_attribute, tree,
3824 struct loc_descr_context *);
3825 static void add_subscript_info (dw_die_ref, tree, bool);
3826 static void add_byte_size_attribute (dw_die_ref, tree);
3827 static void add_alignment_attribute (dw_die_ref, tree);
3828 static inline void add_bit_offset_attribute (dw_die_ref, tree,
3829 struct vlr_context *);
3830 static void add_bit_size_attribute (dw_die_ref, tree);
3831 static void add_prototyped_attribute (dw_die_ref, tree);
3832 static void add_abstract_origin_attribute (dw_die_ref, tree);
3833 static void add_pure_or_virtual_attribute (dw_die_ref, tree);
3834 static void add_src_coords_attributes (dw_die_ref, tree);
3835 static void add_name_and_src_coords_attributes (dw_die_ref, tree, bool = false);
3836 static void add_discr_value (dw_die_ref, dw_discr_value *);
3837 static void add_discr_list (dw_die_ref, dw_discr_list_ref);
3838 static inline dw_discr_list_ref AT_discr_list (dw_attr_node *);
3839 static dw_die_ref scope_die_for (tree, dw_die_ref);
3840 static inline int local_scope_p (dw_die_ref);
3841 static inline int class_scope_p (dw_die_ref);
3842 static inline int class_or_namespace_scope_p (dw_die_ref);
3843 static void add_type_attribute (dw_die_ref, tree, int, bool, dw_die_ref);
3844 static void add_calling_convention_attribute (dw_die_ref, tree);
3845 static const char *type_tag (const_tree);
3846 static tree member_declared_type (const_tree);
3847 #if 0
3848 static const char *decl_start_label (tree);
3849 #endif
3850 static void gen_array_type_die (tree, dw_die_ref);
3851 static void gen_descr_array_type_die (tree, struct array_descr_info *, dw_die_ref);
3852 #if 0
3853 static void gen_entry_point_die (tree, dw_die_ref);
3854 #endif
3855 static dw_die_ref gen_enumeration_type_die (tree, dw_die_ref);
3856 static dw_die_ref gen_formal_parameter_die (tree, tree, bool, dw_die_ref);
3857 static dw_die_ref gen_formal_parameter_pack_die (tree, tree, dw_die_ref, tree*);
3858 static void gen_unspecified_parameters_die (tree, dw_die_ref);
3859 static void gen_formal_types_die (tree, dw_die_ref);
3860 static void gen_subprogram_die (tree, dw_die_ref);
3861 static void gen_variable_die (tree, tree, dw_die_ref);
3862 static void gen_const_die (tree, dw_die_ref);
3863 static void gen_label_die (tree, dw_die_ref);
3864 static void gen_lexical_block_die (tree, dw_die_ref);
3865 static void gen_inlined_subroutine_die (tree, dw_die_ref);
3866 static void gen_field_die (tree, struct vlr_context *, dw_die_ref);
3867 static void gen_ptr_to_mbr_type_die (tree, dw_die_ref);
3868 static dw_die_ref gen_compile_unit_die (const char *);
3869 static void gen_inheritance_die (tree, tree, tree, dw_die_ref);
3870 static void gen_member_die (tree, dw_die_ref);
3871 static void gen_struct_or_union_type_die (tree, dw_die_ref,
3872 enum debug_info_usage);
3873 static void gen_subroutine_type_die (tree, dw_die_ref);
3874 static void gen_typedef_die (tree, dw_die_ref);
3875 static void gen_type_die (tree, dw_die_ref);
3876 static void gen_block_die (tree, dw_die_ref);
3877 static void decls_for_scope (tree, dw_die_ref, bool = true);
3878 static bool is_naming_typedef_decl (const_tree);
3879 static inline dw_die_ref get_context_die (tree);
3880 static void gen_namespace_die (tree, dw_die_ref);
3881 static dw_die_ref gen_namelist_decl (tree, dw_die_ref, tree);
3882 static dw_die_ref gen_decl_die (tree, tree, struct vlr_context *, dw_die_ref);
3883 static dw_die_ref force_decl_die (tree);
3884 static dw_die_ref force_type_die (tree);
3885 static dw_die_ref setup_namespace_context (tree, dw_die_ref);
3886 static dw_die_ref declare_in_namespace (tree, dw_die_ref);
3887 static struct dwarf_file_data * lookup_filename (const char *);
3888 static void retry_incomplete_types (void);
3889 static void gen_type_die_for_member (tree, tree, dw_die_ref);
3890 static void gen_generic_params_dies (tree);
3891 static void gen_tagged_type_die (tree, dw_die_ref, enum debug_info_usage);
3892 static void gen_type_die_with_usage (tree, dw_die_ref, enum debug_info_usage);
3893 static void splice_child_die (dw_die_ref, dw_die_ref);
3894 static int file_info_cmp (const void *, const void *);
3895 static dw_loc_list_ref new_loc_list (dw_loc_descr_ref, const char *, var_loc_view,
3896 const char *, var_loc_view, const char *);
3897 static void output_loc_list (dw_loc_list_ref);
3898 static char *gen_internal_sym (const char *);
3899 static bool want_pubnames (void);
3900
3901 static void prune_unmark_dies (dw_die_ref);
3902 static void prune_unused_types_mark_generic_parms_dies (dw_die_ref);
3903 static void prune_unused_types_mark (dw_die_ref, int);
3904 static void prune_unused_types_walk (dw_die_ref);
3905 static void prune_unused_types_walk_attribs (dw_die_ref);
3906 static void prune_unused_types_prune (dw_die_ref);
3907 static void prune_unused_types (void);
3908 static int maybe_emit_file (struct dwarf_file_data *fd);
3909 static inline const char *AT_vms_delta1 (dw_attr_node *);
3910 static inline const char *AT_vms_delta2 (dw_attr_node *);
3911 static inline void add_AT_vms_delta (dw_die_ref, enum dwarf_attribute,
3912 const char *, const char *);
3913 static void append_entry_to_tmpl_value_parm_die_table (dw_die_ref, tree);
3914 static void gen_remaining_tmpl_value_param_die_attribute (void);
3915 static bool generic_type_p (tree);
3916 static void schedule_generic_params_dies_gen (tree t);
3917 static void gen_scheduled_generic_parms_dies (void);
3918 static void resolve_variable_values (void);
3919
3920 static const char *comp_dir_string (void);
3921
3922 static void hash_loc_operands (dw_loc_descr_ref, inchash::hash &);
3923
3924 /* enum for tracking thread-local variables whose address is really an offset
3925 relative to the TLS pointer, which will need link-time relocation, but will
3926 not need relocation by the DWARF consumer. */
3927
3928 enum dtprel_bool
3929 {
3930 dtprel_false = 0,
3931 dtprel_true = 1
3932 };
3933
3934 /* Return the operator to use for an address of a variable. For dtprel_true, we
3935 use DW_OP_const*. For regular variables, which need both link-time
3936 relocation and consumer-level relocation (e.g., to account for shared objects
3937 loaded at a random address), we use DW_OP_addr*. */
3938
3939 static inline enum dwarf_location_atom
3940 dw_addr_op (enum dtprel_bool dtprel)
3941 {
3942 if (dtprel == dtprel_true)
3943 return (dwarf_split_debug_info ? dwarf_OP (DW_OP_constx)
3944 : (DWARF2_ADDR_SIZE == 4 ? DW_OP_const4u : DW_OP_const8u));
3945 else
3946 return dwarf_split_debug_info ? dwarf_OP (DW_OP_addrx) : DW_OP_addr;
3947 }
3948
3949 /* Return a pointer to a newly allocated address location description. If
3950 dwarf_split_debug_info is true, then record the address with the appropriate
3951 relocation. */
3952 static inline dw_loc_descr_ref
3953 new_addr_loc_descr (rtx addr, enum dtprel_bool dtprel)
3954 {
3955 dw_loc_descr_ref ref = new_loc_descr (dw_addr_op (dtprel), 0, 0);
3956
3957 ref->dw_loc_oprnd1.val_class = dw_val_class_addr;
3958 ref->dw_loc_oprnd1.v.val_addr = addr;
3959 ref->dtprel = dtprel;
3960 if (dwarf_split_debug_info)
3961 ref->dw_loc_oprnd1.val_entry
3962 = add_addr_table_entry (addr,
3963 dtprel ? ate_kind_rtx_dtprel : ate_kind_rtx);
3964 else
3965 ref->dw_loc_oprnd1.val_entry = NULL;
3966
3967 return ref;
3968 }
3969
3970 /* Section names used to hold DWARF debugging information. */
3971
3972 #ifndef DEBUG_INFO_SECTION
3973 #define DEBUG_INFO_SECTION ".debug_info"
3974 #endif
3975 #ifndef DEBUG_DWO_INFO_SECTION
3976 #define DEBUG_DWO_INFO_SECTION ".debug_info.dwo"
3977 #endif
3978 #ifndef DEBUG_LTO_INFO_SECTION
3979 #define DEBUG_LTO_INFO_SECTION ".gnu.debuglto_.debug_info"
3980 #endif
3981 #ifndef DEBUG_LTO_DWO_INFO_SECTION
3982 #define DEBUG_LTO_DWO_INFO_SECTION ".gnu.debuglto_.debug_info.dwo"
3983 #endif
3984 #ifndef DEBUG_ABBREV_SECTION
3985 #define DEBUG_ABBREV_SECTION ".debug_abbrev"
3986 #endif
3987 #ifndef DEBUG_LTO_ABBREV_SECTION
3988 #define DEBUG_LTO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev"
3989 #endif
3990 #ifndef DEBUG_DWO_ABBREV_SECTION
3991 #define DEBUG_DWO_ABBREV_SECTION ".debug_abbrev.dwo"
3992 #endif
3993 #ifndef DEBUG_LTO_DWO_ABBREV_SECTION
3994 #define DEBUG_LTO_DWO_ABBREV_SECTION ".gnu.debuglto_.debug_abbrev.dwo"
3995 #endif
3996 #ifndef DEBUG_ARANGES_SECTION
3997 #define DEBUG_ARANGES_SECTION ".debug_aranges"
3998 #endif
3999 #ifndef DEBUG_ADDR_SECTION
4000 #define DEBUG_ADDR_SECTION ".debug_addr"
4001 #endif
4002 #ifndef DEBUG_MACINFO_SECTION
4003 #define DEBUG_MACINFO_SECTION ".debug_macinfo"
4004 #endif
4005 #ifndef DEBUG_LTO_MACINFO_SECTION
4006 #define DEBUG_LTO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo"
4007 #endif
4008 #ifndef DEBUG_DWO_MACINFO_SECTION
4009 #define DEBUG_DWO_MACINFO_SECTION ".debug_macinfo.dwo"
4010 #endif
4011 #ifndef DEBUG_LTO_DWO_MACINFO_SECTION
4012 #define DEBUG_LTO_DWO_MACINFO_SECTION ".gnu.debuglto_.debug_macinfo.dwo"
4013 #endif
4014 #ifndef DEBUG_MACRO_SECTION
4015 #define DEBUG_MACRO_SECTION ".debug_macro"
4016 #endif
4017 #ifndef DEBUG_LTO_MACRO_SECTION
4018 #define DEBUG_LTO_MACRO_SECTION ".gnu.debuglto_.debug_macro"
4019 #endif
4020 #ifndef DEBUG_DWO_MACRO_SECTION
4021 #define DEBUG_DWO_MACRO_SECTION ".debug_macro.dwo"
4022 #endif
4023 #ifndef DEBUG_LTO_DWO_MACRO_SECTION
4024 #define DEBUG_LTO_DWO_MACRO_SECTION ".gnu.debuglto_.debug_macro.dwo"
4025 #endif
4026 #ifndef DEBUG_LINE_SECTION
4027 #define DEBUG_LINE_SECTION ".debug_line"
4028 #endif
4029 #ifndef DEBUG_LTO_LINE_SECTION
4030 #define DEBUG_LTO_LINE_SECTION ".gnu.debuglto_.debug_line"
4031 #endif
4032 #ifndef DEBUG_DWO_LINE_SECTION
4033 #define DEBUG_DWO_LINE_SECTION ".debug_line.dwo"
4034 #endif
4035 #ifndef DEBUG_LTO_DWO_LINE_SECTION
4036 #define DEBUG_LTO_DWO_LINE_SECTION ".gnu.debuglto_.debug_line.dwo"
4037 #endif
4038 #ifndef DEBUG_LOC_SECTION
4039 #define DEBUG_LOC_SECTION ".debug_loc"
4040 #endif
4041 #ifndef DEBUG_DWO_LOC_SECTION
4042 #define DEBUG_DWO_LOC_SECTION ".debug_loc.dwo"
4043 #endif
4044 #ifndef DEBUG_LOCLISTS_SECTION
4045 #define DEBUG_LOCLISTS_SECTION ".debug_loclists"
4046 #endif
4047 #ifndef DEBUG_DWO_LOCLISTS_SECTION
4048 #define DEBUG_DWO_LOCLISTS_SECTION ".debug_loclists.dwo"
4049 #endif
4050 #ifndef DEBUG_PUBNAMES_SECTION
4051 #define DEBUG_PUBNAMES_SECTION \
4052 ((debug_generate_pub_sections == 2) \
4053 ? ".debug_gnu_pubnames" : ".debug_pubnames")
4054 #endif
4055 #ifndef DEBUG_PUBTYPES_SECTION
4056 #define DEBUG_PUBTYPES_SECTION \
4057 ((debug_generate_pub_sections == 2) \
4058 ? ".debug_gnu_pubtypes" : ".debug_pubtypes")
4059 #endif
4060 #ifndef DEBUG_STR_OFFSETS_SECTION
4061 #define DEBUG_STR_OFFSETS_SECTION ".debug_str_offsets"
4062 #endif
4063 #ifndef DEBUG_DWO_STR_OFFSETS_SECTION
4064 #define DEBUG_DWO_STR_OFFSETS_SECTION ".debug_str_offsets.dwo"
4065 #endif
4066 #ifndef DEBUG_LTO_DWO_STR_OFFSETS_SECTION
4067 #define DEBUG_LTO_DWO_STR_OFFSETS_SECTION ".gnu.debuglto_.debug_str_offsets.dwo"
4068 #endif
4069 #ifndef DEBUG_STR_SECTION
4070 #define DEBUG_STR_SECTION ".debug_str"
4071 #endif
4072 #ifndef DEBUG_LTO_STR_SECTION
4073 #define DEBUG_LTO_STR_SECTION ".gnu.debuglto_.debug_str"
4074 #endif
4075 #ifndef DEBUG_STR_DWO_SECTION
4076 #define DEBUG_STR_DWO_SECTION ".debug_str.dwo"
4077 #endif
4078 #ifndef DEBUG_LTO_STR_DWO_SECTION
4079 #define DEBUG_LTO_STR_DWO_SECTION ".gnu.debuglto_.debug_str.dwo"
4080 #endif
4081 #ifndef DEBUG_RANGES_SECTION
4082 #define DEBUG_RANGES_SECTION ".debug_ranges"
4083 #endif
4084 #ifndef DEBUG_RNGLISTS_SECTION
4085 #define DEBUG_RNGLISTS_SECTION ".debug_rnglists"
4086 #endif
4087 #ifndef DEBUG_LINE_STR_SECTION
4088 #define DEBUG_LINE_STR_SECTION ".debug_line_str"
4089 #endif
4090 #ifndef DEBUG_LTO_LINE_STR_SECTION
4091 #define DEBUG_LTO_LINE_STR_SECTION ".gnu.debuglto_.debug_line_str"
4092 #endif
4093
4094 /* Standard ELF section names for compiled code and data. */
4095 #ifndef TEXT_SECTION_NAME
4096 #define TEXT_SECTION_NAME ".text"
4097 #endif
4098
4099 /* Section flags for .debug_str section. */
4100 #define DEBUG_STR_SECTION_FLAGS \
4101 (HAVE_GAS_SHF_MERGE && flag_merge_debug_strings \
4102 ? SECTION_DEBUG | SECTION_MERGE | SECTION_STRINGS | 1 \
4103 : SECTION_DEBUG)
4104
4105 /* Section flags for .debug_str.dwo section. */
4106 #define DEBUG_STR_DWO_SECTION_FLAGS (SECTION_DEBUG | SECTION_EXCLUDE)
4107
4108 /* Attribute used to refer to the macro section. */
4109 #define DEBUG_MACRO_ATTRIBUTE (dwarf_version >= 5 ? DW_AT_macros \
4110 : dwarf_strict ? DW_AT_macro_info : DW_AT_GNU_macros)
4111
4112 /* Labels we insert at beginning sections we can reference instead of
4113 the section names themselves. */
4114
4115 #ifndef TEXT_SECTION_LABEL
4116 #define TEXT_SECTION_LABEL "Ltext"
4117 #endif
4118 #ifndef COLD_TEXT_SECTION_LABEL
4119 #define COLD_TEXT_SECTION_LABEL "Ltext_cold"
4120 #endif
4121 #ifndef DEBUG_LINE_SECTION_LABEL
4122 #define DEBUG_LINE_SECTION_LABEL "Ldebug_line"
4123 #endif
4124 #ifndef DEBUG_SKELETON_LINE_SECTION_LABEL
4125 #define DEBUG_SKELETON_LINE_SECTION_LABEL "Lskeleton_debug_line"
4126 #endif
4127 #ifndef DEBUG_INFO_SECTION_LABEL
4128 #define DEBUG_INFO_SECTION_LABEL "Ldebug_info"
4129 #endif
4130 #ifndef DEBUG_SKELETON_INFO_SECTION_LABEL
4131 #define DEBUG_SKELETON_INFO_SECTION_LABEL "Lskeleton_debug_info"
4132 #endif
4133 #ifndef DEBUG_ABBREV_SECTION_LABEL
4134 #define DEBUG_ABBREV_SECTION_LABEL "Ldebug_abbrev"
4135 #endif
4136 #ifndef DEBUG_SKELETON_ABBREV_SECTION_LABEL
4137 #define DEBUG_SKELETON_ABBREV_SECTION_LABEL "Lskeleton_debug_abbrev"
4138 #endif
4139 #ifndef DEBUG_ADDR_SECTION_LABEL
4140 #define DEBUG_ADDR_SECTION_LABEL "Ldebug_addr"
4141 #endif
4142 #ifndef DEBUG_LOC_SECTION_LABEL
4143 #define DEBUG_LOC_SECTION_LABEL "Ldebug_loc"
4144 #endif
4145 #ifndef DEBUG_RANGES_SECTION_LABEL
4146 #define DEBUG_RANGES_SECTION_LABEL "Ldebug_ranges"
4147 #endif
4148 #ifndef DEBUG_MACINFO_SECTION_LABEL
4149 #define DEBUG_MACINFO_SECTION_LABEL "Ldebug_macinfo"
4150 #endif
4151 #ifndef DEBUG_MACRO_SECTION_LABEL
4152 #define DEBUG_MACRO_SECTION_LABEL "Ldebug_macro"
4153 #endif
4154 #define SKELETON_COMP_DIE_ABBREV 1
4155 #define SKELETON_TYPE_DIE_ABBREV 2
4156
4157 /* Definitions of defaults for formats and names of various special
4158 (artificial) labels which may be generated within this file (when the -g
4159 options is used and DWARF2_DEBUGGING_INFO is in effect.
4160 If necessary, these may be overridden from within the tm.h file, but
4161 typically, overriding these defaults is unnecessary. */
4162
4163 static char text_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4164 static char text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4165 static char cold_text_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4166 static char cold_end_label[MAX_ARTIFICIAL_LABEL_BYTES];
4167 static char abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4168 static char debug_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4169 static char debug_skeleton_info_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4170 static char debug_skeleton_abbrev_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4171 static char debug_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4172 static char debug_addr_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4173 static char debug_skeleton_line_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4174 static char macinfo_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4175 static char loc_section_label[MAX_ARTIFICIAL_LABEL_BYTES];
4176 static char ranges_section_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4177 static char ranges_base_label[2 * MAX_ARTIFICIAL_LABEL_BYTES];
4178
4179 #ifndef TEXT_END_LABEL
4180 #define TEXT_END_LABEL "Letext"
4181 #endif
4182 #ifndef COLD_END_LABEL
4183 #define COLD_END_LABEL "Letext_cold"
4184 #endif
4185 #ifndef BLOCK_BEGIN_LABEL
4186 #define BLOCK_BEGIN_LABEL "LBB"
4187 #endif
4188 #ifndef BLOCK_INLINE_ENTRY_LABEL
4189 #define BLOCK_INLINE_ENTRY_LABEL "LBI"
4190 #endif
4191 #ifndef BLOCK_END_LABEL
4192 #define BLOCK_END_LABEL "LBE"
4193 #endif
4194 #ifndef LINE_CODE_LABEL
4195 #define LINE_CODE_LABEL "LM"
4196 #endif
4197
4198 \f
4199 /* Return the root of the DIE's built for the current compilation unit. */
4200 static dw_die_ref
4201 comp_unit_die (void)
4202 {
4203 if (!single_comp_unit_die)
4204 single_comp_unit_die = gen_compile_unit_die (NULL);
4205 return single_comp_unit_die;
4206 }
4207
4208 /* We allow a language front-end to designate a function that is to be
4209 called to "demangle" any name before it is put into a DIE. */
4210
4211 static const char *(*demangle_name_func) (const char *);
4212
4213 void
4214 dwarf2out_set_demangle_name_func (const char *(*func) (const char *))
4215 {
4216 demangle_name_func = func;
4217 }
4218
4219 /* Test if rtl node points to a pseudo register. */
4220
4221 static inline int
4222 is_pseudo_reg (const_rtx rtl)
4223 {
4224 return ((REG_P (rtl) && REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
4225 || (GET_CODE (rtl) == SUBREG
4226 && REGNO (SUBREG_REG (rtl)) >= FIRST_PSEUDO_REGISTER));
4227 }
4228
4229 /* Return a reference to a type, with its const and volatile qualifiers
4230 removed. */
4231
4232 static inline tree
4233 type_main_variant (tree type)
4234 {
4235 type = TYPE_MAIN_VARIANT (type);
4236
4237 /* ??? There really should be only one main variant among any group of
4238 variants of a given type (and all of the MAIN_VARIANT values for all
4239 members of the group should point to that one type) but sometimes the C
4240 front-end messes this up for array types, so we work around that bug
4241 here. */
4242 if (TREE_CODE (type) == ARRAY_TYPE)
4243 while (type != TYPE_MAIN_VARIANT (type))
4244 type = TYPE_MAIN_VARIANT (type);
4245
4246 return type;
4247 }
4248
4249 /* Return nonzero if the given type node represents a tagged type. */
4250
4251 static inline int
4252 is_tagged_type (const_tree type)
4253 {
4254 enum tree_code code = TREE_CODE (type);
4255
4256 return (code == RECORD_TYPE || code == UNION_TYPE
4257 || code == QUAL_UNION_TYPE || code == ENUMERAL_TYPE);
4258 }
4259
4260 /* Set label to debug_info_section_label + die_offset of a DIE reference. */
4261
4262 static void
4263 get_ref_die_offset_label (char *label, dw_die_ref ref)
4264 {
4265 sprintf (label, "%s+%ld", debug_info_section_label, ref->die_offset);
4266 }
4267
4268 /* Return die_offset of a DIE reference to a base type. */
4269
4270 static unsigned long int
4271 get_base_type_offset (dw_die_ref ref)
4272 {
4273 if (ref->die_offset)
4274 return ref->die_offset;
4275 if (comp_unit_die ()->die_abbrev)
4276 {
4277 calc_base_type_die_sizes ();
4278 gcc_assert (ref->die_offset);
4279 }
4280 return ref->die_offset;
4281 }
4282
4283 /* Return die_offset of a DIE reference other than base type. */
4284
4285 static unsigned long int
4286 get_ref_die_offset (dw_die_ref ref)
4287 {
4288 gcc_assert (ref->die_offset);
4289 return ref->die_offset;
4290 }
4291
4292 /* Convert a DIE tag into its string name. */
4293
4294 static const char *
4295 dwarf_tag_name (unsigned int tag)
4296 {
4297 const char *name = get_DW_TAG_name (tag);
4298
4299 if (name != NULL)
4300 return name;
4301
4302 return "DW_TAG_<unknown>";
4303 }
4304
4305 /* Convert a DWARF attribute code into its string name. */
4306
4307 static const char *
4308 dwarf_attr_name (unsigned int attr)
4309 {
4310 const char *name;
4311
4312 switch (attr)
4313 {
4314 #if VMS_DEBUGGING_INFO
4315 case DW_AT_HP_prologue:
4316 return "DW_AT_HP_prologue";
4317 #else
4318 case DW_AT_MIPS_loop_unroll_factor:
4319 return "DW_AT_MIPS_loop_unroll_factor";
4320 #endif
4321
4322 #if VMS_DEBUGGING_INFO
4323 case DW_AT_HP_epilogue:
4324 return "DW_AT_HP_epilogue";
4325 #else
4326 case DW_AT_MIPS_stride:
4327 return "DW_AT_MIPS_stride";
4328 #endif
4329 }
4330
4331 name = get_DW_AT_name (attr);
4332
4333 if (name != NULL)
4334 return name;
4335
4336 return "DW_AT_<unknown>";
4337 }
4338
4339 /* Convert a DWARF value form code into its string name. */
4340
4341 static const char *
4342 dwarf_form_name (unsigned int form)
4343 {
4344 const char *name = get_DW_FORM_name (form);
4345
4346 if (name != NULL)
4347 return name;
4348
4349 return "DW_FORM_<unknown>";
4350 }
4351 \f
4352 /* Determine the "ultimate origin" of a decl. The decl may be an inlined
4353 instance of an inlined instance of a decl which is local to an inline
4354 function, so we have to trace all of the way back through the origin chain
4355 to find out what sort of node actually served as the original seed for the
4356 given block. */
4357
4358 static tree
4359 decl_ultimate_origin (const_tree decl)
4360 {
4361 if (!CODE_CONTAINS_STRUCT (TREE_CODE (decl), TS_DECL_COMMON))
4362 return NULL_TREE;
4363
4364 /* DECL_ABSTRACT_ORIGIN can point to itself; ignore that if
4365 we're trying to output the abstract instance of this function. */
4366 if (DECL_ABSTRACT_P (decl) && DECL_ABSTRACT_ORIGIN (decl) == decl)
4367 return NULL_TREE;
4368
4369 /* Since the DECL_ABSTRACT_ORIGIN for a DECL is supposed to be the
4370 most distant ancestor, this should never happen. */
4371 gcc_assert (!DECL_FROM_INLINE (DECL_ORIGIN (decl)));
4372
4373 return DECL_ABSTRACT_ORIGIN (decl);
4374 }
4375
4376 /* Get the class to which DECL belongs, if any. In g++, the DECL_CONTEXT
4377 of a virtual function may refer to a base class, so we check the 'this'
4378 parameter. */
4379
4380 static tree
4381 decl_class_context (tree decl)
4382 {
4383 tree context = NULL_TREE;
4384
4385 if (TREE_CODE (decl) != FUNCTION_DECL || ! DECL_VINDEX (decl))
4386 context = DECL_CONTEXT (decl);
4387 else
4388 context = TYPE_MAIN_VARIANT
4389 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl)))));
4390
4391 if (context && !TYPE_P (context))
4392 context = NULL_TREE;
4393
4394 return context;
4395 }
4396 \f
4397 /* Add an attribute/value pair to a DIE. */
4398
4399 static inline void
4400 add_dwarf_attr (dw_die_ref die, dw_attr_node *attr)
4401 {
4402 /* Maybe this should be an assert? */
4403 if (die == NULL)
4404 return;
4405
4406 if (flag_checking)
4407 {
4408 /* Check we do not add duplicate attrs. Can't use get_AT here
4409 because that recurses to the specification/abstract origin DIE. */
4410 dw_attr_node *a;
4411 unsigned ix;
4412 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
4413 gcc_assert (a->dw_attr != attr->dw_attr);
4414 }
4415
4416 vec_safe_reserve (die->die_attr, 1);
4417 vec_safe_push (die->die_attr, *attr);
4418 }
4419
4420 static inline enum dw_val_class
4421 AT_class (dw_attr_node *a)
4422 {
4423 return a->dw_attr_val.val_class;
4424 }
4425
4426 /* Return the index for any attribute that will be referenced with a
4427 DW_FORM_addrx/GNU_addr_index or DW_FORM_strx/GNU_str_index. String
4428 indices are stored in dw_attr_val.v.val_str for reference counting
4429 pruning. */
4430
4431 static inline unsigned int
4432 AT_index (dw_attr_node *a)
4433 {
4434 if (AT_class (a) == dw_val_class_str)
4435 return a->dw_attr_val.v.val_str->index;
4436 else if (a->dw_attr_val.val_entry != NULL)
4437 return a->dw_attr_val.val_entry->index;
4438 return NOT_INDEXED;
4439 }
4440
4441 /* Add a flag value attribute to a DIE. */
4442
4443 static inline void
4444 add_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind, unsigned int flag)
4445 {
4446 dw_attr_node attr;
4447
4448 attr.dw_attr = attr_kind;
4449 attr.dw_attr_val.val_class = dw_val_class_flag;
4450 attr.dw_attr_val.val_entry = NULL;
4451 attr.dw_attr_val.v.val_flag = flag;
4452 add_dwarf_attr (die, &attr);
4453 }
4454
4455 static inline unsigned
4456 AT_flag (dw_attr_node *a)
4457 {
4458 gcc_assert (a && AT_class (a) == dw_val_class_flag);
4459 return a->dw_attr_val.v.val_flag;
4460 }
4461
4462 /* Add a signed integer attribute value to a DIE. */
4463
4464 static inline void
4465 add_AT_int (dw_die_ref die, enum dwarf_attribute attr_kind, HOST_WIDE_INT int_val)
4466 {
4467 dw_attr_node attr;
4468
4469 attr.dw_attr = attr_kind;
4470 attr.dw_attr_val.val_class = dw_val_class_const;
4471 attr.dw_attr_val.val_entry = NULL;
4472 attr.dw_attr_val.v.val_int = int_val;
4473 add_dwarf_attr (die, &attr);
4474 }
4475
4476 static inline HOST_WIDE_INT
4477 AT_int (dw_attr_node *a)
4478 {
4479 gcc_assert (a && (AT_class (a) == dw_val_class_const
4480 || AT_class (a) == dw_val_class_const_implicit));
4481 return a->dw_attr_val.v.val_int;
4482 }
4483
4484 /* Add an unsigned integer attribute value to a DIE. */
4485
4486 static inline void
4487 add_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind,
4488 unsigned HOST_WIDE_INT unsigned_val)
4489 {
4490 dw_attr_node attr;
4491
4492 attr.dw_attr = attr_kind;
4493 attr.dw_attr_val.val_class = dw_val_class_unsigned_const;
4494 attr.dw_attr_val.val_entry = NULL;
4495 attr.dw_attr_val.v.val_unsigned = unsigned_val;
4496 add_dwarf_attr (die, &attr);
4497 }
4498
4499 static inline unsigned HOST_WIDE_INT
4500 AT_unsigned (dw_attr_node *a)
4501 {
4502 gcc_assert (a && (AT_class (a) == dw_val_class_unsigned_const
4503 || AT_class (a) == dw_val_class_unsigned_const_implicit));
4504 return a->dw_attr_val.v.val_unsigned;
4505 }
4506
4507 /* Add an unsigned wide integer attribute value to a DIE. */
4508
4509 static inline void
4510 add_AT_wide (dw_die_ref die, enum dwarf_attribute attr_kind,
4511 const wide_int& w)
4512 {
4513 dw_attr_node attr;
4514
4515 attr.dw_attr = attr_kind;
4516 attr.dw_attr_val.val_class = dw_val_class_wide_int;
4517 attr.dw_attr_val.val_entry = NULL;
4518 attr.dw_attr_val.v.val_wide = ggc_alloc<wide_int> ();
4519 *attr.dw_attr_val.v.val_wide = w;
4520 add_dwarf_attr (die, &attr);
4521 }
4522
4523 /* Add an unsigned double integer attribute value to a DIE. */
4524
4525 static inline void
4526 add_AT_double (dw_die_ref die, enum dwarf_attribute attr_kind,
4527 HOST_WIDE_INT high, unsigned HOST_WIDE_INT low)
4528 {
4529 dw_attr_node attr;
4530
4531 attr.dw_attr = attr_kind;
4532 attr.dw_attr_val.val_class = dw_val_class_const_double;
4533 attr.dw_attr_val.val_entry = NULL;
4534 attr.dw_attr_val.v.val_double.high = high;
4535 attr.dw_attr_val.v.val_double.low = low;
4536 add_dwarf_attr (die, &attr);
4537 }
4538
4539 /* Add a floating point attribute value to a DIE and return it. */
4540
4541 static inline void
4542 add_AT_vec (dw_die_ref die, enum dwarf_attribute attr_kind,
4543 unsigned int length, unsigned int elt_size, unsigned char *array)
4544 {
4545 dw_attr_node attr;
4546
4547 attr.dw_attr = attr_kind;
4548 attr.dw_attr_val.val_class = dw_val_class_vec;
4549 attr.dw_attr_val.val_entry = NULL;
4550 attr.dw_attr_val.v.val_vec.length = length;
4551 attr.dw_attr_val.v.val_vec.elt_size = elt_size;
4552 attr.dw_attr_val.v.val_vec.array = array;
4553 add_dwarf_attr (die, &attr);
4554 }
4555
4556 /* Add an 8-byte data attribute value to a DIE. */
4557
4558 static inline void
4559 add_AT_data8 (dw_die_ref die, enum dwarf_attribute attr_kind,
4560 unsigned char data8[8])
4561 {
4562 dw_attr_node attr;
4563
4564 attr.dw_attr = attr_kind;
4565 attr.dw_attr_val.val_class = dw_val_class_data8;
4566 attr.dw_attr_val.val_entry = NULL;
4567 memcpy (attr.dw_attr_val.v.val_data8, data8, 8);
4568 add_dwarf_attr (die, &attr);
4569 }
4570
4571 /* Add DW_AT_low_pc and DW_AT_high_pc to a DIE. When using
4572 dwarf_split_debug_info, address attributes in dies destined for the
4573 final executable have force_direct set to avoid using indexed
4574 references. */
4575
4576 static inline void
4577 add_AT_low_high_pc (dw_die_ref die, const char *lbl_low, const char *lbl_high,
4578 bool force_direct)
4579 {
4580 dw_attr_node attr;
4581 char * lbl_id;
4582
4583 lbl_id = xstrdup (lbl_low);
4584 attr.dw_attr = DW_AT_low_pc;
4585 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4586 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4587 if (dwarf_split_debug_info && !force_direct)
4588 attr.dw_attr_val.val_entry
4589 = add_addr_table_entry (lbl_id, ate_kind_label);
4590 else
4591 attr.dw_attr_val.val_entry = NULL;
4592 add_dwarf_attr (die, &attr);
4593
4594 attr.dw_attr = DW_AT_high_pc;
4595 if (dwarf_version < 4)
4596 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
4597 else
4598 attr.dw_attr_val.val_class = dw_val_class_high_pc;
4599 lbl_id = xstrdup (lbl_high);
4600 attr.dw_attr_val.v.val_lbl_id = lbl_id;
4601 if (attr.dw_attr_val.val_class == dw_val_class_lbl_id
4602 && dwarf_split_debug_info && !force_direct)
4603 attr.dw_attr_val.val_entry
4604 = add_addr_table_entry (lbl_id, ate_kind_label);
4605 else
4606 attr.dw_attr_val.val_entry = NULL;
4607 add_dwarf_attr (die, &attr);
4608 }
4609
4610 /* Hash and equality functions for debug_str_hash. */
4611
4612 hashval_t
4613 indirect_string_hasher::hash (indirect_string_node *x)
4614 {
4615 return htab_hash_string (x->str);
4616 }
4617
4618 bool
4619 indirect_string_hasher::equal (indirect_string_node *x1, const char *x2)
4620 {
4621 return strcmp (x1->str, x2) == 0;
4622 }
4623
4624 /* Add STR to the given string hash table. */
4625
4626 static struct indirect_string_node *
4627 find_AT_string_in_table (const char *str,
4628 hash_table<indirect_string_hasher> *table,
4629 enum insert_option insert = INSERT)
4630 {
4631 struct indirect_string_node *node;
4632
4633 indirect_string_node **slot
4634 = table->find_slot_with_hash (str, htab_hash_string (str), insert);
4635 if (*slot == NULL)
4636 {
4637 node = ggc_cleared_alloc<indirect_string_node> ();
4638 node->str = ggc_strdup (str);
4639 *slot = node;
4640 }
4641 else
4642 node = *slot;
4643
4644 node->refcount++;
4645 return node;
4646 }
4647
4648 /* Add STR to the indirect string hash table. */
4649
4650 static struct indirect_string_node *
4651 find_AT_string (const char *str, enum insert_option insert = INSERT)
4652 {
4653 if (! debug_str_hash)
4654 debug_str_hash = hash_table<indirect_string_hasher>::create_ggc (10);
4655
4656 return find_AT_string_in_table (str, debug_str_hash, insert);
4657 }
4658
4659 /* Add a string attribute value to a DIE. */
4660
4661 static inline void
4662 add_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind, const char *str)
4663 {
4664 dw_attr_node attr;
4665 struct indirect_string_node *node;
4666
4667 node = find_AT_string (str);
4668
4669 attr.dw_attr = attr_kind;
4670 attr.dw_attr_val.val_class = dw_val_class_str;
4671 attr.dw_attr_val.val_entry = NULL;
4672 attr.dw_attr_val.v.val_str = node;
4673 add_dwarf_attr (die, &attr);
4674 }
4675
4676 static inline const char *
4677 AT_string (dw_attr_node *a)
4678 {
4679 gcc_assert (a && AT_class (a) == dw_val_class_str);
4680 return a->dw_attr_val.v.val_str->str;
4681 }
4682
4683 /* Call this function directly to bypass AT_string_form's logic to put
4684 the string inline in the die. */
4685
4686 static void
4687 set_indirect_string (struct indirect_string_node *node)
4688 {
4689 char label[MAX_ARTIFICIAL_LABEL_BYTES];
4690 /* Already indirect is a no op. */
4691 if (node->form == DW_FORM_strp
4692 || node->form == DW_FORM_line_strp
4693 || node->form == dwarf_FORM (DW_FORM_strx))
4694 {
4695 gcc_assert (node->label);
4696 return;
4697 }
4698 ASM_GENERATE_INTERNAL_LABEL (label, "LASF", dw2_string_counter);
4699 ++dw2_string_counter;
4700 node->label = xstrdup (label);
4701
4702 if (!dwarf_split_debug_info)
4703 {
4704 node->form = DW_FORM_strp;
4705 node->index = NOT_INDEXED;
4706 }
4707 else
4708 {
4709 node->form = dwarf_FORM (DW_FORM_strx);
4710 node->index = NO_INDEX_ASSIGNED;
4711 }
4712 }
4713
4714 /* A helper function for dwarf2out_finish, called to reset indirect
4715 string decisions done for early LTO dwarf output before fat object
4716 dwarf output. */
4717
4718 int
4719 reset_indirect_string (indirect_string_node **h, void *)
4720 {
4721 struct indirect_string_node *node = *h;
4722 if (node->form == DW_FORM_strp || node->form == dwarf_FORM (DW_FORM_strx))
4723 {
4724 free (node->label);
4725 node->label = NULL;
4726 node->form = (dwarf_form) 0;
4727 node->index = 0;
4728 }
4729 return 1;
4730 }
4731
4732 /* Find out whether a string should be output inline in DIE
4733 or out-of-line in .debug_str section. */
4734
4735 static enum dwarf_form
4736 find_string_form (struct indirect_string_node *node)
4737 {
4738 unsigned int len;
4739
4740 if (node->form)
4741 return node->form;
4742
4743 len = strlen (node->str) + 1;
4744
4745 /* If the string is shorter or equal to the size of the reference, it is
4746 always better to put it inline. */
4747 if (len <= DWARF_OFFSET_SIZE || node->refcount == 0)
4748 return node->form = DW_FORM_string;
4749
4750 /* If we cannot expect the linker to merge strings in .debug_str
4751 section, only put it into .debug_str if it is worth even in this
4752 single module. */
4753 if (DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
4754 || ((debug_str_section->common.flags & SECTION_MERGE) == 0
4755 && (len - DWARF_OFFSET_SIZE) * node->refcount <= len))
4756 return node->form = DW_FORM_string;
4757
4758 set_indirect_string (node);
4759
4760 return node->form;
4761 }
4762
4763 /* Find out whether the string referenced from the attribute should be
4764 output inline in DIE or out-of-line in .debug_str section. */
4765
4766 static enum dwarf_form
4767 AT_string_form (dw_attr_node *a)
4768 {
4769 gcc_assert (a && AT_class (a) == dw_val_class_str);
4770 return find_string_form (a->dw_attr_val.v.val_str);
4771 }
4772
4773 /* Add a DIE reference attribute value to a DIE. */
4774
4775 static inline void
4776 add_AT_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind, dw_die_ref targ_die)
4777 {
4778 dw_attr_node attr;
4779 gcc_checking_assert (targ_die != NULL);
4780
4781 /* With LTO we can end up trying to reference something we didn't create
4782 a DIE for. Avoid crashing later on a NULL referenced DIE. */
4783 if (targ_die == NULL)
4784 return;
4785
4786 attr.dw_attr = attr_kind;
4787 attr.dw_attr_val.val_class = dw_val_class_die_ref;
4788 attr.dw_attr_val.val_entry = NULL;
4789 attr.dw_attr_val.v.val_die_ref.die = targ_die;
4790 attr.dw_attr_val.v.val_die_ref.external = 0;
4791 add_dwarf_attr (die, &attr);
4792 }
4793
4794 /* Change DIE reference REF to point to NEW_DIE instead. */
4795
4796 static inline void
4797 change_AT_die_ref (dw_attr_node *ref, dw_die_ref new_die)
4798 {
4799 gcc_assert (ref->dw_attr_val.val_class == dw_val_class_die_ref);
4800 ref->dw_attr_val.v.val_die_ref.die = new_die;
4801 ref->dw_attr_val.v.val_die_ref.external = 0;
4802 }
4803
4804 /* Add an AT_specification attribute to a DIE, and also make the back
4805 pointer from the specification to the definition. */
4806
4807 static inline void
4808 add_AT_specification (dw_die_ref die, dw_die_ref targ_die)
4809 {
4810 add_AT_die_ref (die, DW_AT_specification, targ_die);
4811 gcc_assert (!targ_die->die_definition);
4812 targ_die->die_definition = die;
4813 }
4814
4815 static inline dw_die_ref
4816 AT_ref (dw_attr_node *a)
4817 {
4818 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4819 return a->dw_attr_val.v.val_die_ref.die;
4820 }
4821
4822 static inline int
4823 AT_ref_external (dw_attr_node *a)
4824 {
4825 if (a && AT_class (a) == dw_val_class_die_ref)
4826 return a->dw_attr_val.v.val_die_ref.external;
4827
4828 return 0;
4829 }
4830
4831 static inline void
4832 set_AT_ref_external (dw_attr_node *a, int i)
4833 {
4834 gcc_assert (a && AT_class (a) == dw_val_class_die_ref);
4835 a->dw_attr_val.v.val_die_ref.external = i;
4836 }
4837
4838 /* Add a location description attribute value to a DIE. */
4839
4840 static inline void
4841 add_AT_loc (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_descr_ref loc)
4842 {
4843 dw_attr_node attr;
4844
4845 attr.dw_attr = attr_kind;
4846 attr.dw_attr_val.val_class = dw_val_class_loc;
4847 attr.dw_attr_val.val_entry = NULL;
4848 attr.dw_attr_val.v.val_loc = loc;
4849 add_dwarf_attr (die, &attr);
4850 }
4851
4852 static inline dw_loc_descr_ref
4853 AT_loc (dw_attr_node *a)
4854 {
4855 gcc_assert (a && AT_class (a) == dw_val_class_loc);
4856 return a->dw_attr_val.v.val_loc;
4857 }
4858
4859 static inline void
4860 add_AT_loc_list (dw_die_ref die, enum dwarf_attribute attr_kind, dw_loc_list_ref loc_list)
4861 {
4862 dw_attr_node attr;
4863
4864 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4865 return;
4866
4867 attr.dw_attr = attr_kind;
4868 attr.dw_attr_val.val_class = dw_val_class_loc_list;
4869 attr.dw_attr_val.val_entry = NULL;
4870 attr.dw_attr_val.v.val_loc_list = loc_list;
4871 add_dwarf_attr (die, &attr);
4872 have_location_lists = true;
4873 }
4874
4875 static inline dw_loc_list_ref
4876 AT_loc_list (dw_attr_node *a)
4877 {
4878 gcc_assert (a && AT_class (a) == dw_val_class_loc_list);
4879 return a->dw_attr_val.v.val_loc_list;
4880 }
4881
4882 /* Add a view list attribute to DIE. It must have a DW_AT_location
4883 attribute, because the view list complements the location list. */
4884
4885 static inline void
4886 add_AT_view_list (dw_die_ref die, enum dwarf_attribute attr_kind)
4887 {
4888 dw_attr_node attr;
4889
4890 if (XCOFF_DEBUGGING_INFO && !HAVE_XCOFF_DWARF_EXTRAS)
4891 return;
4892
4893 attr.dw_attr = attr_kind;
4894 attr.dw_attr_val.val_class = dw_val_class_view_list;
4895 attr.dw_attr_val.val_entry = NULL;
4896 attr.dw_attr_val.v.val_view_list = die;
4897 add_dwarf_attr (die, &attr);
4898 gcc_checking_assert (get_AT (die, DW_AT_location));
4899 gcc_assert (have_location_lists);
4900 }
4901
4902 /* Return a pointer to the location list referenced by the attribute.
4903 If the named attribute is a view list, look up the corresponding
4904 DW_AT_location attribute and return its location list. */
4905
4906 static inline dw_loc_list_ref *
4907 AT_loc_list_ptr (dw_attr_node *a)
4908 {
4909 gcc_assert (a);
4910 switch (AT_class (a))
4911 {
4912 case dw_val_class_loc_list:
4913 return &a->dw_attr_val.v.val_loc_list;
4914 case dw_val_class_view_list:
4915 {
4916 dw_attr_node *l;
4917 l = get_AT (a->dw_attr_val.v.val_view_list, DW_AT_location);
4918 if (!l)
4919 return NULL;
4920 gcc_checking_assert (l + 1 == a);
4921 return AT_loc_list_ptr (l);
4922 }
4923 default:
4924 gcc_unreachable ();
4925 }
4926 }
4927
4928 /* Return the location attribute value associated with a view list
4929 attribute value. */
4930
4931 static inline dw_val_node *
4932 view_list_to_loc_list_val_node (dw_val_node *val)
4933 {
4934 gcc_assert (val->val_class == dw_val_class_view_list);
4935 dw_attr_node *loc = get_AT (val->v.val_view_list, DW_AT_location);
4936 if (!loc)
4937 return NULL;
4938 gcc_checking_assert (&(loc + 1)->dw_attr_val == val);
4939 gcc_assert (AT_class (loc) == dw_val_class_loc_list);
4940 return &loc->dw_attr_val;
4941 }
4942
4943 struct addr_hasher : ggc_ptr_hash<addr_table_entry>
4944 {
4945 static hashval_t hash (addr_table_entry *);
4946 static bool equal (addr_table_entry *, addr_table_entry *);
4947 };
4948
4949 /* Table of entries into the .debug_addr section. */
4950
4951 static GTY (()) hash_table<addr_hasher> *addr_index_table;
4952
4953 /* Hash an address_table_entry. */
4954
4955 hashval_t
4956 addr_hasher::hash (addr_table_entry *a)
4957 {
4958 inchash::hash hstate;
4959 switch (a->kind)
4960 {
4961 case ate_kind_rtx:
4962 hstate.add_int (0);
4963 break;
4964 case ate_kind_rtx_dtprel:
4965 hstate.add_int (1);
4966 break;
4967 case ate_kind_label:
4968 return htab_hash_string (a->addr.label);
4969 default:
4970 gcc_unreachable ();
4971 }
4972 inchash::add_rtx (a->addr.rtl, hstate);
4973 return hstate.end ();
4974 }
4975
4976 /* Determine equality for two address_table_entries. */
4977
4978 bool
4979 addr_hasher::equal (addr_table_entry *a1, addr_table_entry *a2)
4980 {
4981 if (a1->kind != a2->kind)
4982 return 0;
4983 switch (a1->kind)
4984 {
4985 case ate_kind_rtx:
4986 case ate_kind_rtx_dtprel:
4987 return rtx_equal_p (a1->addr.rtl, a2->addr.rtl);
4988 case ate_kind_label:
4989 return strcmp (a1->addr.label, a2->addr.label) == 0;
4990 default:
4991 gcc_unreachable ();
4992 }
4993 }
4994
4995 /* Initialize an addr_table_entry. */
4996
4997 void
4998 init_addr_table_entry (addr_table_entry *e, enum ate_kind kind, void *addr)
4999 {
5000 e->kind = kind;
5001 switch (kind)
5002 {
5003 case ate_kind_rtx:
5004 case ate_kind_rtx_dtprel:
5005 e->addr.rtl = (rtx) addr;
5006 break;
5007 case ate_kind_label:
5008 e->addr.label = (char *) addr;
5009 break;
5010 }
5011 e->refcount = 0;
5012 e->index = NO_INDEX_ASSIGNED;
5013 }
5014
5015 /* Add attr to the address table entry to the table. Defer setting an
5016 index until output time. */
5017
5018 static addr_table_entry *
5019 add_addr_table_entry (void *addr, enum ate_kind kind)
5020 {
5021 addr_table_entry *node;
5022 addr_table_entry finder;
5023
5024 gcc_assert (dwarf_split_debug_info);
5025 if (! addr_index_table)
5026 addr_index_table = hash_table<addr_hasher>::create_ggc (10);
5027 init_addr_table_entry (&finder, kind, addr);
5028 addr_table_entry **slot = addr_index_table->find_slot (&finder, INSERT);
5029
5030 if (*slot == HTAB_EMPTY_ENTRY)
5031 {
5032 node = ggc_cleared_alloc<addr_table_entry> ();
5033 init_addr_table_entry (node, kind, addr);
5034 *slot = node;
5035 }
5036 else
5037 node = *slot;
5038
5039 node->refcount++;
5040 return node;
5041 }
5042
5043 /* Remove an entry from the addr table by decrementing its refcount.
5044 Strictly, decrementing the refcount would be enough, but the
5045 assertion that the entry is actually in the table has found
5046 bugs. */
5047
5048 static void
5049 remove_addr_table_entry (addr_table_entry *entry)
5050 {
5051 gcc_assert (dwarf_split_debug_info && addr_index_table);
5052 /* After an index is assigned, the table is frozen. */
5053 gcc_assert (entry->refcount > 0 && entry->index == NO_INDEX_ASSIGNED);
5054 entry->refcount--;
5055 }
5056
5057 /* Given a location list, remove all addresses it refers to from the
5058 address_table. */
5059
5060 static void
5061 remove_loc_list_addr_table_entries (dw_loc_descr_ref descr)
5062 {
5063 for (; descr; descr = descr->dw_loc_next)
5064 if (descr->dw_loc_oprnd1.val_entry != NULL)
5065 {
5066 gcc_assert (descr->dw_loc_oprnd1.val_entry->index == NO_INDEX_ASSIGNED);
5067 remove_addr_table_entry (descr->dw_loc_oprnd1.val_entry);
5068 }
5069 }
5070
5071 /* A helper function for dwarf2out_finish called through
5072 htab_traverse. Assign an addr_table_entry its index. All entries
5073 must be collected into the table when this function is called,
5074 because the indexing code relies on htab_traverse to traverse nodes
5075 in the same order for each run. */
5076
5077 int
5078 index_addr_table_entry (addr_table_entry **h, unsigned int *index)
5079 {
5080 addr_table_entry *node = *h;
5081
5082 /* Don't index unreferenced nodes. */
5083 if (node->refcount == 0)
5084 return 1;
5085
5086 gcc_assert (node->index == NO_INDEX_ASSIGNED);
5087 node->index = *index;
5088 *index += 1;
5089
5090 return 1;
5091 }
5092
5093 /* Add an address constant attribute value to a DIE. When using
5094 dwarf_split_debug_info, address attributes in dies destined for the
5095 final executable should be direct references--setting the parameter
5096 force_direct ensures this behavior. */
5097
5098 static inline void
5099 add_AT_addr (dw_die_ref die, enum dwarf_attribute attr_kind, rtx addr,
5100 bool force_direct)
5101 {
5102 dw_attr_node attr;
5103
5104 attr.dw_attr = attr_kind;
5105 attr.dw_attr_val.val_class = dw_val_class_addr;
5106 attr.dw_attr_val.v.val_addr = addr;
5107 if (dwarf_split_debug_info && !force_direct)
5108 attr.dw_attr_val.val_entry = add_addr_table_entry (addr, ate_kind_rtx);
5109 else
5110 attr.dw_attr_val.val_entry = NULL;
5111 add_dwarf_attr (die, &attr);
5112 }
5113
5114 /* Get the RTX from to an address DIE attribute. */
5115
5116 static inline rtx
5117 AT_addr (dw_attr_node *a)
5118 {
5119 gcc_assert (a && AT_class (a) == dw_val_class_addr);
5120 return a->dw_attr_val.v.val_addr;
5121 }
5122
5123 /* Add a file attribute value to a DIE. */
5124
5125 static inline void
5126 add_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind,
5127 struct dwarf_file_data *fd)
5128 {
5129 dw_attr_node attr;
5130
5131 attr.dw_attr = attr_kind;
5132 attr.dw_attr_val.val_class = dw_val_class_file;
5133 attr.dw_attr_val.val_entry = NULL;
5134 attr.dw_attr_val.v.val_file = fd;
5135 add_dwarf_attr (die, &attr);
5136 }
5137
5138 /* Get the dwarf_file_data from a file DIE attribute. */
5139
5140 static inline struct dwarf_file_data *
5141 AT_file (dw_attr_node *a)
5142 {
5143 gcc_assert (a && (AT_class (a) == dw_val_class_file
5144 || AT_class (a) == dw_val_class_file_implicit));
5145 return a->dw_attr_val.v.val_file;
5146 }
5147
5148 /* Add a vms delta attribute value to a DIE. */
5149
5150 static inline void
5151 add_AT_vms_delta (dw_die_ref die, enum dwarf_attribute attr_kind,
5152 const char *lbl1, const char *lbl2)
5153 {
5154 dw_attr_node attr;
5155
5156 attr.dw_attr = attr_kind;
5157 attr.dw_attr_val.val_class = dw_val_class_vms_delta;
5158 attr.dw_attr_val.val_entry = NULL;
5159 attr.dw_attr_val.v.val_vms_delta.lbl1 = xstrdup (lbl1);
5160 attr.dw_attr_val.v.val_vms_delta.lbl2 = xstrdup (lbl2);
5161 add_dwarf_attr (die, &attr);
5162 }
5163
5164 /* Add a symbolic view identifier attribute value to a DIE. */
5165
5166 static inline void
5167 add_AT_symview (dw_die_ref die, enum dwarf_attribute attr_kind,
5168 const char *view_label)
5169 {
5170 dw_attr_node attr;
5171
5172 attr.dw_attr = attr_kind;
5173 attr.dw_attr_val.val_class = dw_val_class_symview;
5174 attr.dw_attr_val.val_entry = NULL;
5175 attr.dw_attr_val.v.val_symbolic_view = xstrdup (view_label);
5176 add_dwarf_attr (die, &attr);
5177 }
5178
5179 /* Add a label identifier attribute value to a DIE. */
5180
5181 static inline void
5182 add_AT_lbl_id (dw_die_ref die, enum dwarf_attribute attr_kind,
5183 const char *lbl_id)
5184 {
5185 dw_attr_node attr;
5186
5187 attr.dw_attr = attr_kind;
5188 attr.dw_attr_val.val_class = dw_val_class_lbl_id;
5189 attr.dw_attr_val.val_entry = NULL;
5190 attr.dw_attr_val.v.val_lbl_id = xstrdup (lbl_id);
5191 if (dwarf_split_debug_info)
5192 attr.dw_attr_val.val_entry
5193 = add_addr_table_entry (attr.dw_attr_val.v.val_lbl_id,
5194 ate_kind_label);
5195 add_dwarf_attr (die, &attr);
5196 }
5197
5198 /* Add a section offset attribute value to a DIE, an offset into the
5199 debug_line section. */
5200
5201 static inline void
5202 add_AT_lineptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5203 const char *label)
5204 {
5205 dw_attr_node attr;
5206
5207 attr.dw_attr = attr_kind;
5208 attr.dw_attr_val.val_class = dw_val_class_lineptr;
5209 attr.dw_attr_val.val_entry = NULL;
5210 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5211 add_dwarf_attr (die, &attr);
5212 }
5213
5214 /* Add a section offset attribute value to a DIE, an offset into the
5215 debug_macinfo section. */
5216
5217 static inline void
5218 add_AT_macptr (dw_die_ref die, enum dwarf_attribute attr_kind,
5219 const char *label)
5220 {
5221 dw_attr_node attr;
5222
5223 attr.dw_attr = attr_kind;
5224 attr.dw_attr_val.val_class = dw_val_class_macptr;
5225 attr.dw_attr_val.val_entry = NULL;
5226 attr.dw_attr_val.v.val_lbl_id = xstrdup (label);
5227 add_dwarf_attr (die, &attr);
5228 }
5229
5230 /* Add a range_list attribute value to a DIE. When using
5231 dwarf_split_debug_info, address attributes in dies destined for the
5232 final executable should be direct references--setting the parameter
5233 force_direct ensures this behavior. */
5234
5235 #define UNRELOCATED_OFFSET ((addr_table_entry *) 1)
5236 #define RELOCATED_OFFSET (NULL)
5237
5238 static void
5239 add_AT_range_list (dw_die_ref die, enum dwarf_attribute attr_kind,
5240 long unsigned int offset, bool force_direct)
5241 {
5242 dw_attr_node attr;
5243
5244 attr.dw_attr = attr_kind;
5245 attr.dw_attr_val.val_class = dw_val_class_range_list;
5246 /* For the range_list attribute, use val_entry to store whether the
5247 offset should follow split-debug-info or normal semantics. This
5248 value is read in output_range_list_offset. */
5249 if (dwarf_split_debug_info && !force_direct)
5250 attr.dw_attr_val.val_entry = UNRELOCATED_OFFSET;
5251 else
5252 attr.dw_attr_val.val_entry = RELOCATED_OFFSET;
5253 attr.dw_attr_val.v.val_offset = offset;
5254 add_dwarf_attr (die, &attr);
5255 }
5256
5257 /* Return the start label of a delta attribute. */
5258
5259 static inline const char *
5260 AT_vms_delta1 (dw_attr_node *a)
5261 {
5262 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5263 return a->dw_attr_val.v.val_vms_delta.lbl1;
5264 }
5265
5266 /* Return the end label of a delta attribute. */
5267
5268 static inline const char *
5269 AT_vms_delta2 (dw_attr_node *a)
5270 {
5271 gcc_assert (a && (AT_class (a) == dw_val_class_vms_delta));
5272 return a->dw_attr_val.v.val_vms_delta.lbl2;
5273 }
5274
5275 static inline const char *
5276 AT_lbl (dw_attr_node *a)
5277 {
5278 gcc_assert (a && (AT_class (a) == dw_val_class_lbl_id
5279 || AT_class (a) == dw_val_class_lineptr
5280 || AT_class (a) == dw_val_class_macptr
5281 || AT_class (a) == dw_val_class_loclistsptr
5282 || AT_class (a) == dw_val_class_high_pc));
5283 return a->dw_attr_val.v.val_lbl_id;
5284 }
5285
5286 /* Get the attribute of type attr_kind. */
5287
5288 static dw_attr_node *
5289 get_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5290 {
5291 dw_attr_node *a;
5292 unsigned ix;
5293 dw_die_ref spec = NULL;
5294
5295 if (! die)
5296 return NULL;
5297
5298 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5299 if (a->dw_attr == attr_kind)
5300 return a;
5301 else if (a->dw_attr == DW_AT_specification
5302 || a->dw_attr == DW_AT_abstract_origin)
5303 spec = AT_ref (a);
5304
5305 if (spec)
5306 return get_AT (spec, attr_kind);
5307
5308 return NULL;
5309 }
5310
5311 /* Returns the parent of the declaration of DIE. */
5312
5313 static dw_die_ref
5314 get_die_parent (dw_die_ref die)
5315 {
5316 dw_die_ref t;
5317
5318 if (!die)
5319 return NULL;
5320
5321 if ((t = get_AT_ref (die, DW_AT_abstract_origin))
5322 || (t = get_AT_ref (die, DW_AT_specification)))
5323 die = t;
5324
5325 return die->die_parent;
5326 }
5327
5328 /* Return the "low pc" attribute value, typically associated with a subprogram
5329 DIE. Return null if the "low pc" attribute is either not present, or if it
5330 cannot be represented as an assembler label identifier. */
5331
5332 static inline const char *
5333 get_AT_low_pc (dw_die_ref die)
5334 {
5335 dw_attr_node *a = get_AT (die, DW_AT_low_pc);
5336
5337 return a ? AT_lbl (a) : NULL;
5338 }
5339
5340 /* Return the value of the string attribute designated by ATTR_KIND, or
5341 NULL if it is not present. */
5342
5343 static inline const char *
5344 get_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind)
5345 {
5346 dw_attr_node *a = get_AT (die, attr_kind);
5347
5348 return a ? AT_string (a) : NULL;
5349 }
5350
5351 /* Return the value of the flag attribute designated by ATTR_KIND, or -1
5352 if it is not present. */
5353
5354 static inline int
5355 get_AT_flag (dw_die_ref die, enum dwarf_attribute attr_kind)
5356 {
5357 dw_attr_node *a = get_AT (die, attr_kind);
5358
5359 return a ? AT_flag (a) : 0;
5360 }
5361
5362 /* Return the value of the unsigned attribute designated by ATTR_KIND, or 0
5363 if it is not present. */
5364
5365 static inline unsigned
5366 get_AT_unsigned (dw_die_ref die, enum dwarf_attribute attr_kind)
5367 {
5368 dw_attr_node *a = get_AT (die, attr_kind);
5369
5370 return a ? AT_unsigned (a) : 0;
5371 }
5372
5373 static inline dw_die_ref
5374 get_AT_ref (dw_die_ref die, enum dwarf_attribute attr_kind)
5375 {
5376 dw_attr_node *a = get_AT (die, attr_kind);
5377
5378 return a ? AT_ref (a) : NULL;
5379 }
5380
5381 static inline struct dwarf_file_data *
5382 get_AT_file (dw_die_ref die, enum dwarf_attribute attr_kind)
5383 {
5384 dw_attr_node *a = get_AT (die, attr_kind);
5385
5386 return a ? AT_file (a) : NULL;
5387 }
5388
5389 /* Return TRUE if the language is C. */
5390
5391 static inline bool
5392 is_c (void)
5393 {
5394 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5395
5396 return (lang == DW_LANG_C || lang == DW_LANG_C89 || lang == DW_LANG_C99
5397 || lang == DW_LANG_C11 || lang == DW_LANG_ObjC);
5398
5399
5400 }
5401
5402 /* Return TRUE if the language is C++. */
5403
5404 static inline bool
5405 is_cxx (void)
5406 {
5407 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5408
5409 return (lang == DW_LANG_C_plus_plus || lang == DW_LANG_ObjC_plus_plus
5410 || lang == DW_LANG_C_plus_plus_11 || lang == DW_LANG_C_plus_plus_14);
5411 }
5412
5413 /* Return TRUE if DECL was created by the C++ frontend. */
5414
5415 static bool
5416 is_cxx (const_tree decl)
5417 {
5418 if (in_lto_p)
5419 {
5420 const_tree context = get_ultimate_context (decl);
5421 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5422 return strncmp (TRANSLATION_UNIT_LANGUAGE (context), "GNU C++", 7) == 0;
5423 }
5424 return is_cxx ();
5425 }
5426
5427 /* Return TRUE if the language is Fortran. */
5428
5429 static inline bool
5430 is_fortran (void)
5431 {
5432 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5433
5434 return (lang == DW_LANG_Fortran77
5435 || lang == DW_LANG_Fortran90
5436 || lang == DW_LANG_Fortran95
5437 || lang == DW_LANG_Fortran03
5438 || lang == DW_LANG_Fortran08);
5439 }
5440
5441 static inline bool
5442 is_fortran (const_tree decl)
5443 {
5444 if (in_lto_p)
5445 {
5446 const_tree context = get_ultimate_context (decl);
5447 if (context && TRANSLATION_UNIT_LANGUAGE (context))
5448 return (strncmp (TRANSLATION_UNIT_LANGUAGE (context),
5449 "GNU Fortran", 11) == 0
5450 || strcmp (TRANSLATION_UNIT_LANGUAGE (context),
5451 "GNU F77") == 0);
5452 }
5453 return is_fortran ();
5454 }
5455
5456 /* Return TRUE if the language is Ada. */
5457
5458 static inline bool
5459 is_ada (void)
5460 {
5461 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5462
5463 return lang == DW_LANG_Ada95 || lang == DW_LANG_Ada83;
5464 }
5465
5466 /* Return TRUE if the language is D. */
5467
5468 static inline bool
5469 is_dlang (void)
5470 {
5471 unsigned int lang = get_AT_unsigned (comp_unit_die (), DW_AT_language);
5472
5473 return lang == DW_LANG_D;
5474 }
5475
5476 /* Remove the specified attribute if present. Return TRUE if removal
5477 was successful. */
5478
5479 static bool
5480 remove_AT (dw_die_ref die, enum dwarf_attribute attr_kind)
5481 {
5482 dw_attr_node *a;
5483 unsigned ix;
5484
5485 if (! die)
5486 return false;
5487
5488 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
5489 if (a->dw_attr == attr_kind)
5490 {
5491 if (AT_class (a) == dw_val_class_str)
5492 if (a->dw_attr_val.v.val_str->refcount)
5493 a->dw_attr_val.v.val_str->refcount--;
5494
5495 /* vec::ordered_remove should help reduce the number of abbrevs
5496 that are needed. */
5497 die->die_attr->ordered_remove (ix);
5498 return true;
5499 }
5500 return false;
5501 }
5502
5503 /* Remove CHILD from its parent. PREV must have the property that
5504 PREV->DIE_SIB == CHILD. Does not alter CHILD. */
5505
5506 static void
5507 remove_child_with_prev (dw_die_ref child, dw_die_ref prev)
5508 {
5509 gcc_assert (child->die_parent == prev->die_parent);
5510 gcc_assert (prev->die_sib == child);
5511 if (prev == child)
5512 {
5513 gcc_assert (child->die_parent->die_child == child);
5514 prev = NULL;
5515 }
5516 else
5517 prev->die_sib = child->die_sib;
5518 if (child->die_parent->die_child == child)
5519 child->die_parent->die_child = prev;
5520 child->die_sib = NULL;
5521 }
5522
5523 /* Replace OLD_CHILD with NEW_CHILD. PREV must have the property that
5524 PREV->DIE_SIB == OLD_CHILD. Does not alter OLD_CHILD. */
5525
5526 static void
5527 replace_child (dw_die_ref old_child, dw_die_ref new_child, dw_die_ref prev)
5528 {
5529 dw_die_ref parent = old_child->die_parent;
5530
5531 gcc_assert (parent == prev->die_parent);
5532 gcc_assert (prev->die_sib == old_child);
5533
5534 new_child->die_parent = parent;
5535 if (prev == old_child)
5536 {
5537 gcc_assert (parent->die_child == old_child);
5538 new_child->die_sib = new_child;
5539 }
5540 else
5541 {
5542 prev->die_sib = new_child;
5543 new_child->die_sib = old_child->die_sib;
5544 }
5545 if (old_child->die_parent->die_child == old_child)
5546 old_child->die_parent->die_child = new_child;
5547 old_child->die_sib = NULL;
5548 }
5549
5550 /* Move all children from OLD_PARENT to NEW_PARENT. */
5551
5552 static void
5553 move_all_children (dw_die_ref old_parent, dw_die_ref new_parent)
5554 {
5555 dw_die_ref c;
5556 new_parent->die_child = old_parent->die_child;
5557 old_parent->die_child = NULL;
5558 FOR_EACH_CHILD (new_parent, c, c->die_parent = new_parent);
5559 }
5560
5561 /* Remove child DIE whose die_tag is TAG. Do nothing if no child
5562 matches TAG. */
5563
5564 static void
5565 remove_child_TAG (dw_die_ref die, enum dwarf_tag tag)
5566 {
5567 dw_die_ref c;
5568
5569 c = die->die_child;
5570 if (c) do {
5571 dw_die_ref prev = c;
5572 c = c->die_sib;
5573 while (c->die_tag == tag)
5574 {
5575 remove_child_with_prev (c, prev);
5576 c->die_parent = NULL;
5577 /* Might have removed every child. */
5578 if (die->die_child == NULL)
5579 return;
5580 c = prev->die_sib;
5581 }
5582 } while (c != die->die_child);
5583 }
5584
5585 /* Add a CHILD_DIE as the last child of DIE. */
5586
5587 static void
5588 add_child_die (dw_die_ref die, dw_die_ref child_die)
5589 {
5590 /* FIXME this should probably be an assert. */
5591 if (! die || ! child_die)
5592 return;
5593 gcc_assert (die != child_die);
5594
5595 child_die->die_parent = die;
5596 if (die->die_child)
5597 {
5598 child_die->die_sib = die->die_child->die_sib;
5599 die->die_child->die_sib = child_die;
5600 }
5601 else
5602 child_die->die_sib = child_die;
5603 die->die_child = child_die;
5604 }
5605
5606 /* Like add_child_die, but put CHILD_DIE after AFTER_DIE. */
5607
5608 static void
5609 add_child_die_after (dw_die_ref die, dw_die_ref child_die,
5610 dw_die_ref after_die)
5611 {
5612 gcc_assert (die
5613 && child_die
5614 && after_die
5615 && die->die_child
5616 && die != child_die);
5617
5618 child_die->die_parent = die;
5619 child_die->die_sib = after_die->die_sib;
5620 after_die->die_sib = child_die;
5621 if (die->die_child == after_die)
5622 die->die_child = child_die;
5623 }
5624
5625 /* Unassociate CHILD from its parent, and make its parent be
5626 NEW_PARENT. */
5627
5628 static void
5629 reparent_child (dw_die_ref child, dw_die_ref new_parent)
5630 {
5631 for (dw_die_ref p = child->die_parent->die_child; ; p = p->die_sib)
5632 if (p->die_sib == child)
5633 {
5634 remove_child_with_prev (child, p);
5635 break;
5636 }
5637 add_child_die (new_parent, child);
5638 }
5639
5640 /* Move CHILD, which must be a child of PARENT or the DIE for which PARENT
5641 is the specification, to the end of PARENT's list of children.
5642 This is done by removing and re-adding it. */
5643
5644 static void
5645 splice_child_die (dw_die_ref parent, dw_die_ref child)
5646 {
5647 /* We want the declaration DIE from inside the class, not the
5648 specification DIE at toplevel. */
5649 if (child->die_parent != parent)
5650 {
5651 dw_die_ref tmp = get_AT_ref (child, DW_AT_specification);
5652
5653 if (tmp)
5654 child = tmp;
5655 }
5656
5657 gcc_assert (child->die_parent == parent
5658 || (child->die_parent
5659 == get_AT_ref (parent, DW_AT_specification)));
5660
5661 reparent_child (child, parent);
5662 }
5663
5664 /* Create and return a new die with TAG_VALUE as tag. */
5665
5666 static inline dw_die_ref
5667 new_die_raw (enum dwarf_tag tag_value)
5668 {
5669 dw_die_ref die = ggc_cleared_alloc<die_node> ();
5670 die->die_tag = tag_value;
5671 return die;
5672 }
5673
5674 /* Create and return a new die with a parent of PARENT_DIE. If
5675 PARENT_DIE is NULL, the new DIE is placed in limbo and an
5676 associated tree T must be supplied to determine parenthood
5677 later. */
5678
5679 static inline dw_die_ref
5680 new_die (enum dwarf_tag tag_value, dw_die_ref parent_die, tree t)
5681 {
5682 dw_die_ref die = new_die_raw (tag_value);
5683
5684 if (parent_die != NULL)
5685 add_child_die (parent_die, die);
5686 else
5687 {
5688 limbo_die_node *limbo_node;
5689
5690 /* No DIEs created after early dwarf should end up in limbo,
5691 because the limbo list should not persist past LTO
5692 streaming. */
5693 if (tag_value != DW_TAG_compile_unit
5694 /* These are allowed because they're generated while
5695 breaking out COMDAT units late. */
5696 && tag_value != DW_TAG_type_unit
5697 && tag_value != DW_TAG_skeleton_unit
5698 && !early_dwarf
5699 /* Allow nested functions to live in limbo because they will
5700 only temporarily live there, as decls_for_scope will fix
5701 them up. */
5702 && (TREE_CODE (t) != FUNCTION_DECL
5703 || !decl_function_context (t))
5704 /* Same as nested functions above but for types. Types that
5705 are local to a function will be fixed in
5706 decls_for_scope. */
5707 && (!RECORD_OR_UNION_TYPE_P (t)
5708 || !TYPE_CONTEXT (t)
5709 || TREE_CODE (TYPE_CONTEXT (t)) != FUNCTION_DECL)
5710 /* FIXME debug-early: Allow late limbo DIE creation for LTO,
5711 especially in the ltrans stage, but once we implement LTO
5712 dwarf streaming, we should remove this exception. */
5713 && !in_lto_p)
5714 {
5715 fprintf (stderr, "symbol ended up in limbo too late:");
5716 debug_generic_stmt (t);
5717 gcc_unreachable ();
5718 }
5719
5720 limbo_node = ggc_cleared_alloc<limbo_die_node> ();
5721 limbo_node->die = die;
5722 limbo_node->created_for = t;
5723 limbo_node->next = limbo_die_list;
5724 limbo_die_list = limbo_node;
5725 }
5726
5727 return die;
5728 }
5729
5730 /* Return the DIE associated with the given type specifier. */
5731
5732 static inline dw_die_ref
5733 lookup_type_die (tree type)
5734 {
5735 dw_die_ref die = TYPE_SYMTAB_DIE (type);
5736 if (die && die->removed)
5737 {
5738 TYPE_SYMTAB_DIE (type) = NULL;
5739 return NULL;
5740 }
5741 return die;
5742 }
5743
5744 /* Given a TYPE_DIE representing the type TYPE, if TYPE is an
5745 anonymous type named by the typedef TYPE_DIE, return the DIE of the
5746 anonymous type instead the one of the naming typedef. */
5747
5748 static inline dw_die_ref
5749 strip_naming_typedef (tree type, dw_die_ref type_die)
5750 {
5751 if (type
5752 && TREE_CODE (type) == RECORD_TYPE
5753 && type_die
5754 && type_die->die_tag == DW_TAG_typedef
5755 && is_naming_typedef_decl (TYPE_NAME (type)))
5756 type_die = get_AT_ref (type_die, DW_AT_type);
5757 return type_die;
5758 }
5759
5760 /* Like lookup_type_die, but if type is an anonymous type named by a
5761 typedef[1], return the DIE of the anonymous type instead the one of
5762 the naming typedef. This is because in gen_typedef_die, we did
5763 equate the anonymous struct named by the typedef with the DIE of
5764 the naming typedef. So by default, lookup_type_die on an anonymous
5765 struct yields the DIE of the naming typedef.
5766
5767 [1]: Read the comment of is_naming_typedef_decl to learn about what
5768 a naming typedef is. */
5769
5770 static inline dw_die_ref
5771 lookup_type_die_strip_naming_typedef (tree type)
5772 {
5773 dw_die_ref die = lookup_type_die (type);
5774 return strip_naming_typedef (type, die);
5775 }
5776
5777 /* Equate a DIE to a given type specifier. */
5778
5779 static inline void
5780 equate_type_number_to_die (tree type, dw_die_ref type_die)
5781 {
5782 TYPE_SYMTAB_DIE (type) = type_die;
5783 }
5784
5785 static dw_die_ref maybe_create_die_with_external_ref (tree);
5786 struct GTY(()) sym_off_pair
5787 {
5788 const char * GTY((skip)) sym;
5789 unsigned HOST_WIDE_INT off;
5790 };
5791 static GTY(()) hash_map<tree, sym_off_pair> *external_die_map;
5792
5793 /* Returns a hash value for X (which really is a die_struct). */
5794
5795 inline hashval_t
5796 decl_die_hasher::hash (die_node *x)
5797 {
5798 return (hashval_t) x->decl_id;
5799 }
5800
5801 /* Return nonzero if decl_id of die_struct X is the same as UID of decl *Y. */
5802
5803 inline bool
5804 decl_die_hasher::equal (die_node *x, tree y)
5805 {
5806 return (x->decl_id == DECL_UID (y));
5807 }
5808
5809 /* Return the DIE associated with a given declaration. */
5810
5811 static inline dw_die_ref
5812 lookup_decl_die (tree decl)
5813 {
5814 dw_die_ref *die = decl_die_table->find_slot_with_hash (decl, DECL_UID (decl),
5815 NO_INSERT);
5816 if (!die)
5817 {
5818 if (in_lto_p)
5819 return maybe_create_die_with_external_ref (decl);
5820 return NULL;
5821 }
5822 if ((*die)->removed)
5823 {
5824 decl_die_table->clear_slot (die);
5825 return NULL;
5826 }
5827 return *die;
5828 }
5829
5830
5831 /* Return the DIE associated with BLOCK. */
5832
5833 static inline dw_die_ref
5834 lookup_block_die (tree block)
5835 {
5836 dw_die_ref die = BLOCK_DIE (block);
5837 if (!die && in_lto_p)
5838 return maybe_create_die_with_external_ref (block);
5839 return die;
5840 }
5841
5842 /* Associate DIE with BLOCK. */
5843
5844 static inline void
5845 equate_block_to_die (tree block, dw_die_ref die)
5846 {
5847 BLOCK_DIE (block) = die;
5848 }
5849 #undef BLOCK_DIE
5850
5851
5852 /* For DECL which might have early dwarf output query a SYMBOL + OFFSET
5853 style reference. Return true if we found one refering to a DIE for
5854 DECL, otherwise return false. */
5855
5856 static bool
5857 dwarf2out_die_ref_for_decl (tree decl, const char **sym,
5858 unsigned HOST_WIDE_INT *off)
5859 {
5860 dw_die_ref die;
5861
5862 if (in_lto_p)
5863 {
5864 /* During WPA stage and incremental linking we use a hash-map
5865 to store the decl <-> label + offset map. */
5866 if (!external_die_map)
5867 return false;
5868 sym_off_pair *desc = external_die_map->get (decl);
5869 if (!desc)
5870 return false;
5871 *sym = desc->sym;
5872 *off = desc->off;
5873 return true;
5874 }
5875
5876 if (TREE_CODE (decl) == BLOCK)
5877 die = lookup_block_die (decl);
5878 else
5879 die = lookup_decl_die (decl);
5880 if (!die)
5881 return false;
5882
5883 /* Similar to get_ref_die_offset_label, but using the "correct"
5884 label. */
5885 *off = die->die_offset;
5886 while (die->die_parent)
5887 die = die->die_parent;
5888 /* For the containing CU DIE we compute a die_symbol in
5889 compute_comp_unit_symbol. */
5890 gcc_assert (die->die_tag == DW_TAG_compile_unit
5891 && die->die_id.die_symbol != NULL);
5892 *sym = die->die_id.die_symbol;
5893 return true;
5894 }
5895
5896 /* Add a reference of kind ATTR_KIND to a DIE at SYMBOL + OFFSET to DIE. */
5897
5898 static void
5899 add_AT_external_die_ref (dw_die_ref die, enum dwarf_attribute attr_kind,
5900 const char *symbol, HOST_WIDE_INT offset)
5901 {
5902 /* Create a fake DIE that contains the reference. Don't use
5903 new_die because we don't want to end up in the limbo list. */
5904 /* ??? We probably want to share these, thus put a ref to the DIE
5905 we create here to the external_die_map entry. */
5906 dw_die_ref ref = new_die_raw (die->die_tag);
5907 ref->die_id.die_symbol = symbol;
5908 ref->die_offset = offset;
5909 ref->with_offset = 1;
5910 add_AT_die_ref (die, attr_kind, ref);
5911 }
5912
5913 /* Create a DIE for DECL if required and add a reference to a DIE
5914 at SYMBOL + OFFSET which contains attributes dumped early. */
5915
5916 static void
5917 dwarf2out_register_external_die (tree decl, const char *sym,
5918 unsigned HOST_WIDE_INT off)
5919 {
5920 if (debug_info_level == DINFO_LEVEL_NONE)
5921 return;
5922
5923 if (!external_die_map)
5924 external_die_map = hash_map<tree, sym_off_pair>::create_ggc (1000);
5925 gcc_checking_assert (!external_die_map->get (decl));
5926 sym_off_pair p = { IDENTIFIER_POINTER (get_identifier (sym)), off };
5927 external_die_map->put (decl, p);
5928 }
5929
5930 /* If we have a registered external DIE for DECL return a new DIE for
5931 the concrete instance with an appropriate abstract origin. */
5932
5933 static dw_die_ref
5934 maybe_create_die_with_external_ref (tree decl)
5935 {
5936 if (!external_die_map)
5937 return NULL;
5938 sym_off_pair *desc = external_die_map->get (decl);
5939 if (!desc)
5940 return NULL;
5941
5942 const char *sym = desc->sym;
5943 unsigned HOST_WIDE_INT off = desc->off;
5944
5945 in_lto_p = false;
5946 dw_die_ref die = (TREE_CODE (decl) == BLOCK
5947 ? lookup_block_die (decl) : lookup_decl_die (decl));
5948 gcc_assert (!die);
5949 in_lto_p = true;
5950
5951 tree ctx;
5952 dw_die_ref parent = NULL;
5953 /* Need to lookup a DIE for the decls context - the containing
5954 function or translation unit. */
5955 if (TREE_CODE (decl) == BLOCK)
5956 {
5957 ctx = BLOCK_SUPERCONTEXT (decl);
5958 /* ??? We do not output DIEs for all scopes thus skip as
5959 many DIEs as needed. */
5960 while (TREE_CODE (ctx) == BLOCK
5961 && !lookup_block_die (ctx))
5962 ctx = BLOCK_SUPERCONTEXT (ctx);
5963 }
5964 else
5965 ctx = DECL_CONTEXT (decl);
5966 /* Peel types in the context stack. */
5967 while (ctx && TYPE_P (ctx))
5968 ctx = TYPE_CONTEXT (ctx);
5969 /* Likewise namespaces in case we do not want to emit DIEs for them. */
5970 if (debug_info_level <= DINFO_LEVEL_TERSE)
5971 while (ctx && TREE_CODE (ctx) == NAMESPACE_DECL)
5972 ctx = DECL_CONTEXT (ctx);
5973 if (ctx)
5974 {
5975 if (TREE_CODE (ctx) == BLOCK)
5976 parent = lookup_block_die (ctx);
5977 else if (TREE_CODE (ctx) == TRANSLATION_UNIT_DECL
5978 /* Keep the 1:1 association during WPA. */
5979 && !flag_wpa
5980 && flag_incremental_link != INCREMENTAL_LINK_LTO)
5981 /* Otherwise all late annotations go to the main CU which
5982 imports the original CUs. */
5983 parent = comp_unit_die ();
5984 else if (TREE_CODE (ctx) == FUNCTION_DECL
5985 && TREE_CODE (decl) != FUNCTION_DECL
5986 && TREE_CODE (decl) != PARM_DECL
5987 && TREE_CODE (decl) != RESULT_DECL
5988 && TREE_CODE (decl) != BLOCK)
5989 /* Leave function local entities parent determination to when
5990 we process scope vars. */
5991 ;
5992 else
5993 parent = lookup_decl_die (ctx);
5994 }
5995 else
5996 /* In some cases the FEs fail to set DECL_CONTEXT properly.
5997 Handle this case gracefully by globalizing stuff. */
5998 parent = comp_unit_die ();
5999 /* Create a DIE "stub". */
6000 switch (TREE_CODE (decl))
6001 {
6002 case TRANSLATION_UNIT_DECL:
6003 {
6004 die = comp_unit_die ();
6005 /* We re-target all CU decls to the LTRANS CU DIE, so no need
6006 to create a DIE for the original CUs. */
6007 return die;
6008 }
6009 case NAMESPACE_DECL:
6010 if (is_fortran (decl))
6011 die = new_die (DW_TAG_module, parent, decl);
6012 else
6013 die = new_die (DW_TAG_namespace, parent, decl);
6014 break;
6015 case FUNCTION_DECL:
6016 die = new_die (DW_TAG_subprogram, parent, decl);
6017 break;
6018 case VAR_DECL:
6019 die = new_die (DW_TAG_variable, parent, decl);
6020 break;
6021 case RESULT_DECL:
6022 die = new_die (DW_TAG_variable, parent, decl);
6023 break;
6024 case PARM_DECL:
6025 die = new_die (DW_TAG_formal_parameter, parent, decl);
6026 break;
6027 case CONST_DECL:
6028 die = new_die (DW_TAG_constant, parent, decl);
6029 break;
6030 case LABEL_DECL:
6031 die = new_die (DW_TAG_label, parent, decl);
6032 break;
6033 case BLOCK:
6034 die = new_die (DW_TAG_lexical_block, parent, decl);
6035 break;
6036 default:
6037 gcc_unreachable ();
6038 }
6039 if (TREE_CODE (decl) == BLOCK)
6040 equate_block_to_die (decl, die);
6041 else
6042 equate_decl_number_to_die (decl, die);
6043
6044 add_desc_attribute (die, decl);
6045
6046 /* Add a reference to the DIE providing early debug at $sym + off. */
6047 add_AT_external_die_ref (die, DW_AT_abstract_origin, sym, off);
6048
6049 return die;
6050 }
6051
6052 /* Returns a hash value for X (which really is a var_loc_list). */
6053
6054 inline hashval_t
6055 decl_loc_hasher::hash (var_loc_list *x)
6056 {
6057 return (hashval_t) x->decl_id;
6058 }
6059
6060 /* Return nonzero if decl_id of var_loc_list X is the same as
6061 UID of decl *Y. */
6062
6063 inline bool
6064 decl_loc_hasher::equal (var_loc_list *x, const_tree y)
6065 {
6066 return (x->decl_id == DECL_UID (y));
6067 }
6068
6069 /* Return the var_loc list associated with a given declaration. */
6070
6071 static inline var_loc_list *
6072 lookup_decl_loc (const_tree decl)
6073 {
6074 if (!decl_loc_table)
6075 return NULL;
6076 return decl_loc_table->find_with_hash (decl, DECL_UID (decl));
6077 }
6078
6079 /* Returns a hash value for X (which really is a cached_dw_loc_list_list). */
6080
6081 inline hashval_t
6082 dw_loc_list_hasher::hash (cached_dw_loc_list *x)
6083 {
6084 return (hashval_t) x->decl_id;
6085 }
6086
6087 /* Return nonzero if decl_id of cached_dw_loc_list X is the same as
6088 UID of decl *Y. */
6089
6090 inline bool
6091 dw_loc_list_hasher::equal (cached_dw_loc_list *x, const_tree y)
6092 {
6093 return (x->decl_id == DECL_UID (y));
6094 }
6095
6096 /* Equate a DIE to a particular declaration. */
6097
6098 static void
6099 equate_decl_number_to_die (tree decl, dw_die_ref decl_die)
6100 {
6101 unsigned int decl_id = DECL_UID (decl);
6102
6103 *decl_die_table->find_slot_with_hash (decl, decl_id, INSERT) = decl_die;
6104 decl_die->decl_id = decl_id;
6105 }
6106
6107 /* Return how many bits covers PIECE EXPR_LIST. */
6108
6109 static HOST_WIDE_INT
6110 decl_piece_bitsize (rtx piece)
6111 {
6112 int ret = (int) GET_MODE (piece);
6113 if (ret)
6114 return ret;
6115 gcc_assert (GET_CODE (XEXP (piece, 0)) == CONCAT
6116 && CONST_INT_P (XEXP (XEXP (piece, 0), 0)));
6117 return INTVAL (XEXP (XEXP (piece, 0), 0));
6118 }
6119
6120 /* Return pointer to the location of location note in PIECE EXPR_LIST. */
6121
6122 static rtx *
6123 decl_piece_varloc_ptr (rtx piece)
6124 {
6125 if ((int) GET_MODE (piece))
6126 return &XEXP (piece, 0);
6127 else
6128 return &XEXP (XEXP (piece, 0), 1);
6129 }
6130
6131 /* Create an EXPR_LIST for location note LOC_NOTE covering BITSIZE bits.
6132 Next is the chain of following piece nodes. */
6133
6134 static rtx_expr_list *
6135 decl_piece_node (rtx loc_note, HOST_WIDE_INT bitsize, rtx next)
6136 {
6137 if (bitsize > 0 && bitsize <= (int) MAX_MACHINE_MODE)
6138 return alloc_EXPR_LIST (bitsize, loc_note, next);
6139 else
6140 return alloc_EXPR_LIST (0, gen_rtx_CONCAT (VOIDmode,
6141 GEN_INT (bitsize),
6142 loc_note), next);
6143 }
6144
6145 /* Return rtx that should be stored into loc field for
6146 LOC_NOTE and BITPOS/BITSIZE. */
6147
6148 static rtx
6149 construct_piece_list (rtx loc_note, HOST_WIDE_INT bitpos,
6150 HOST_WIDE_INT bitsize)
6151 {
6152 if (bitsize != -1)
6153 {
6154 loc_note = decl_piece_node (loc_note, bitsize, NULL_RTX);
6155 if (bitpos != 0)
6156 loc_note = decl_piece_node (NULL_RTX, bitpos, loc_note);
6157 }
6158 return loc_note;
6159 }
6160
6161 /* This function either modifies location piece list *DEST in
6162 place (if SRC and INNER is NULL), or copies location piece list
6163 *SRC to *DEST while modifying it. Location BITPOS is modified
6164 to contain LOC_NOTE, any pieces overlapping it are removed resp.
6165 not copied and if needed some padding around it is added.
6166 When modifying in place, DEST should point to EXPR_LIST where
6167 earlier pieces cover PIECE_BITPOS bits, when copying SRC points
6168 to the start of the whole list and INNER points to the EXPR_LIST
6169 where earlier pieces cover PIECE_BITPOS bits. */
6170
6171 static void
6172 adjust_piece_list (rtx *dest, rtx *src, rtx *inner,
6173 HOST_WIDE_INT bitpos, HOST_WIDE_INT piece_bitpos,
6174 HOST_WIDE_INT bitsize, rtx loc_note)
6175 {
6176 HOST_WIDE_INT diff;
6177 bool copy = inner != NULL;
6178
6179 if (copy)
6180 {
6181 /* First copy all nodes preceding the current bitpos. */
6182 while (src != inner)
6183 {
6184 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6185 decl_piece_bitsize (*src), NULL_RTX);
6186 dest = &XEXP (*dest, 1);
6187 src = &XEXP (*src, 1);
6188 }
6189 }
6190 /* Add padding if needed. */
6191 if (bitpos != piece_bitpos)
6192 {
6193 *dest = decl_piece_node (NULL_RTX, bitpos - piece_bitpos,
6194 copy ? NULL_RTX : *dest);
6195 dest = &XEXP (*dest, 1);
6196 }
6197 else if (*dest && decl_piece_bitsize (*dest) == bitsize)
6198 {
6199 gcc_assert (!copy);
6200 /* A piece with correct bitpos and bitsize already exist,
6201 just update the location for it and return. */
6202 *decl_piece_varloc_ptr (*dest) = loc_note;
6203 return;
6204 }
6205 /* Add the piece that changed. */
6206 *dest = decl_piece_node (loc_note, bitsize, copy ? NULL_RTX : *dest);
6207 dest = &XEXP (*dest, 1);
6208 /* Skip over pieces that overlap it. */
6209 diff = bitpos - piece_bitpos + bitsize;
6210 if (!copy)
6211 src = dest;
6212 while (diff > 0 && *src)
6213 {
6214 rtx piece = *src;
6215 diff -= decl_piece_bitsize (piece);
6216 if (copy)
6217 src = &XEXP (piece, 1);
6218 else
6219 {
6220 *src = XEXP (piece, 1);
6221 free_EXPR_LIST_node (piece);
6222 }
6223 }
6224 /* Add padding if needed. */
6225 if (diff < 0 && *src)
6226 {
6227 if (!copy)
6228 dest = src;
6229 *dest = decl_piece_node (NULL_RTX, -diff, copy ? NULL_RTX : *dest);
6230 dest = &XEXP (*dest, 1);
6231 }
6232 if (!copy)
6233 return;
6234 /* Finally copy all nodes following it. */
6235 while (*src)
6236 {
6237 *dest = decl_piece_node (*decl_piece_varloc_ptr (*src),
6238 decl_piece_bitsize (*src), NULL_RTX);
6239 dest = &XEXP (*dest, 1);
6240 src = &XEXP (*src, 1);
6241 }
6242 }
6243
6244 /* Add a variable location node to the linked list for DECL. */
6245
6246 static struct var_loc_node *
6247 add_var_loc_to_decl (tree decl, rtx loc_note, const char *label, var_loc_view view)
6248 {
6249 unsigned int decl_id;
6250 var_loc_list *temp;
6251 struct var_loc_node *loc = NULL;
6252 HOST_WIDE_INT bitsize = -1, bitpos = -1;
6253
6254 if (VAR_P (decl) && DECL_HAS_DEBUG_EXPR_P (decl))
6255 {
6256 tree realdecl = DECL_DEBUG_EXPR (decl);
6257 if (handled_component_p (realdecl)
6258 || (TREE_CODE (realdecl) == MEM_REF
6259 && TREE_CODE (TREE_OPERAND (realdecl, 0)) == ADDR_EXPR))
6260 {
6261 bool reverse;
6262 tree innerdecl = get_ref_base_and_extent_hwi (realdecl, &bitpos,
6263 &bitsize, &reverse);
6264 if (!innerdecl
6265 || !DECL_P (innerdecl)
6266 || DECL_IGNORED_P (innerdecl)
6267 || TREE_STATIC (innerdecl)
6268 || bitsize == 0
6269 || bitpos + bitsize > 256)
6270 return NULL;
6271 decl = innerdecl;
6272 }
6273 }
6274
6275 decl_id = DECL_UID (decl);
6276 var_loc_list **slot
6277 = decl_loc_table->find_slot_with_hash (decl, decl_id, INSERT);
6278 if (*slot == NULL)
6279 {
6280 temp = ggc_cleared_alloc<var_loc_list> ();
6281 temp->decl_id = decl_id;
6282 *slot = temp;
6283 }
6284 else
6285 temp = *slot;
6286
6287 /* For PARM_DECLs try to keep around the original incoming value,
6288 even if that means we'll emit a zero-range .debug_loc entry. */
6289 if (temp->last
6290 && temp->first == temp->last
6291 && TREE_CODE (decl) == PARM_DECL
6292 && NOTE_P (temp->first->loc)
6293 && NOTE_VAR_LOCATION_DECL (temp->first->loc) == decl
6294 && DECL_INCOMING_RTL (decl)
6295 && NOTE_VAR_LOCATION_LOC (temp->first->loc)
6296 && GET_CODE (NOTE_VAR_LOCATION_LOC (temp->first->loc))
6297 == GET_CODE (DECL_INCOMING_RTL (decl))
6298 && prev_real_insn (as_a<rtx_insn *> (temp->first->loc)) == NULL_RTX
6299 && (bitsize != -1
6300 || !rtx_equal_p (NOTE_VAR_LOCATION_LOC (temp->first->loc),
6301 NOTE_VAR_LOCATION_LOC (loc_note))
6302 || (NOTE_VAR_LOCATION_STATUS (temp->first->loc)
6303 != NOTE_VAR_LOCATION_STATUS (loc_note))))
6304 {
6305 loc = ggc_cleared_alloc<var_loc_node> ();
6306 temp->first->next = loc;
6307 temp->last = loc;
6308 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6309 }
6310 else if (temp->last)
6311 {
6312 struct var_loc_node *last = temp->last, *unused = NULL;
6313 rtx *piece_loc = NULL, last_loc_note;
6314 HOST_WIDE_INT piece_bitpos = 0;
6315 if (last->next)
6316 {
6317 last = last->next;
6318 gcc_assert (last->next == NULL);
6319 }
6320 if (bitsize != -1 && GET_CODE (last->loc) == EXPR_LIST)
6321 {
6322 piece_loc = &last->loc;
6323 do
6324 {
6325 HOST_WIDE_INT cur_bitsize = decl_piece_bitsize (*piece_loc);
6326 if (piece_bitpos + cur_bitsize > bitpos)
6327 break;
6328 piece_bitpos += cur_bitsize;
6329 piece_loc = &XEXP (*piece_loc, 1);
6330 }
6331 while (*piece_loc);
6332 }
6333 /* TEMP->LAST here is either pointer to the last but one or
6334 last element in the chained list, LAST is pointer to the
6335 last element. */
6336 if (label && strcmp (last->label, label) == 0 && last->view == view)
6337 {
6338 /* For SRA optimized variables if there weren't any real
6339 insns since last note, just modify the last node. */
6340 if (piece_loc != NULL)
6341 {
6342 adjust_piece_list (piece_loc, NULL, NULL,
6343 bitpos, piece_bitpos, bitsize, loc_note);
6344 return NULL;
6345 }
6346 /* If the last note doesn't cover any instructions, remove it. */
6347 if (temp->last != last)
6348 {
6349 temp->last->next = NULL;
6350 unused = last;
6351 last = temp->last;
6352 gcc_assert (strcmp (last->label, label) != 0 || last->view != view);
6353 }
6354 else
6355 {
6356 gcc_assert (temp->first == temp->last
6357 || (temp->first->next == temp->last
6358 && TREE_CODE (decl) == PARM_DECL));
6359 memset (temp->last, '\0', sizeof (*temp->last));
6360 temp->last->loc = construct_piece_list (loc_note, bitpos, bitsize);
6361 return temp->last;
6362 }
6363 }
6364 if (bitsize == -1 && NOTE_P (last->loc))
6365 last_loc_note = last->loc;
6366 else if (piece_loc != NULL
6367 && *piece_loc != NULL_RTX
6368 && piece_bitpos == bitpos
6369 && decl_piece_bitsize (*piece_loc) == bitsize)
6370 last_loc_note = *decl_piece_varloc_ptr (*piece_loc);
6371 else
6372 last_loc_note = NULL_RTX;
6373 /* If the current location is the same as the end of the list,
6374 and either both or neither of the locations is uninitialized,
6375 we have nothing to do. */
6376 if (last_loc_note == NULL_RTX
6377 || (!rtx_equal_p (NOTE_VAR_LOCATION_LOC (last_loc_note),
6378 NOTE_VAR_LOCATION_LOC (loc_note)))
6379 || ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6380 != NOTE_VAR_LOCATION_STATUS (loc_note))
6381 && ((NOTE_VAR_LOCATION_STATUS (last_loc_note)
6382 == VAR_INIT_STATUS_UNINITIALIZED)
6383 || (NOTE_VAR_LOCATION_STATUS (loc_note)
6384 == VAR_INIT_STATUS_UNINITIALIZED))))
6385 {
6386 /* Add LOC to the end of list and update LAST. If the last
6387 element of the list has been removed above, reuse its
6388 memory for the new node, otherwise allocate a new one. */
6389 if (unused)
6390 {
6391 loc = unused;
6392 memset (loc, '\0', sizeof (*loc));
6393 }
6394 else
6395 loc = ggc_cleared_alloc<var_loc_node> ();
6396 if (bitsize == -1 || piece_loc == NULL)
6397 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6398 else
6399 adjust_piece_list (&loc->loc, &last->loc, piece_loc,
6400 bitpos, piece_bitpos, bitsize, loc_note);
6401 last->next = loc;
6402 /* Ensure TEMP->LAST will point either to the new last but one
6403 element of the chain, or to the last element in it. */
6404 if (last != temp->last)
6405 temp->last = last;
6406 }
6407 else if (unused)
6408 ggc_free (unused);
6409 }
6410 else
6411 {
6412 loc = ggc_cleared_alloc<var_loc_node> ();
6413 temp->first = loc;
6414 temp->last = loc;
6415 loc->loc = construct_piece_list (loc_note, bitpos, bitsize);
6416 }
6417 return loc;
6418 }
6419 \f
6420 /* Keep track of the number of spaces used to indent the
6421 output of the debugging routines that print the structure of
6422 the DIE internal representation. */
6423 static int print_indent;
6424
6425 /* Indent the line the number of spaces given by print_indent. */
6426
6427 static inline void
6428 print_spaces (FILE *outfile)
6429 {
6430 fprintf (outfile, "%*s", print_indent, "");
6431 }
6432
6433 /* Print a type signature in hex. */
6434
6435 static inline void
6436 print_signature (FILE *outfile, char *sig)
6437 {
6438 int i;
6439
6440 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
6441 fprintf (outfile, "%02x", sig[i] & 0xff);
6442 }
6443
6444 static inline void
6445 print_discr_value (FILE *outfile, dw_discr_value *discr_value)
6446 {
6447 if (discr_value->pos)
6448 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, discr_value->v.sval);
6449 else
6450 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, discr_value->v.uval);
6451 }
6452
6453 static void print_loc_descr (dw_loc_descr_ref, FILE *);
6454
6455 /* Print the value associated to the VAL DWARF value node to OUTFILE. If
6456 RECURSE, output location descriptor operations. */
6457
6458 static void
6459 print_dw_val (dw_val_node *val, bool recurse, FILE *outfile)
6460 {
6461 switch (val->val_class)
6462 {
6463 case dw_val_class_addr:
6464 fprintf (outfile, "address");
6465 break;
6466 case dw_val_class_offset:
6467 fprintf (outfile, "offset");
6468 break;
6469 case dw_val_class_loc:
6470 fprintf (outfile, "location descriptor");
6471 if (val->v.val_loc == NULL)
6472 fprintf (outfile, " -> <null>\n");
6473 else if (recurse)
6474 {
6475 fprintf (outfile, ":\n");
6476 print_indent += 4;
6477 print_loc_descr (val->v.val_loc, outfile);
6478 print_indent -= 4;
6479 }
6480 else
6481 {
6482 if (flag_dump_noaddr || flag_dump_unnumbered)
6483 fprintf (outfile, " #\n");
6484 else
6485 fprintf (outfile, " (%p)\n", (void *) val->v.val_loc);
6486 }
6487 break;
6488 case dw_val_class_loc_list:
6489 fprintf (outfile, "location list -> label:%s",
6490 val->v.val_loc_list->ll_symbol);
6491 break;
6492 case dw_val_class_view_list:
6493 val = view_list_to_loc_list_val_node (val);
6494 fprintf (outfile, "location list with views -> labels:%s and %s",
6495 val->v.val_loc_list->ll_symbol,
6496 val->v.val_loc_list->vl_symbol);
6497 break;
6498 case dw_val_class_range_list:
6499 fprintf (outfile, "range list");
6500 break;
6501 case dw_val_class_const:
6502 case dw_val_class_const_implicit:
6503 fprintf (outfile, HOST_WIDE_INT_PRINT_DEC, val->v.val_int);
6504 break;
6505 case dw_val_class_unsigned_const:
6506 case dw_val_class_unsigned_const_implicit:
6507 fprintf (outfile, HOST_WIDE_INT_PRINT_UNSIGNED, val->v.val_unsigned);
6508 break;
6509 case dw_val_class_const_double:
6510 fprintf (outfile, "constant (" HOST_WIDE_INT_PRINT_DEC","\
6511 HOST_WIDE_INT_PRINT_UNSIGNED")",
6512 val->v.val_double.high,
6513 val->v.val_double.low);
6514 break;
6515 case dw_val_class_wide_int:
6516 {
6517 int i = val->v.val_wide->get_len ();
6518 fprintf (outfile, "constant (");
6519 gcc_assert (i > 0);
6520 if (val->v.val_wide->elt (i - 1) == 0)
6521 fprintf (outfile, "0x");
6522 fprintf (outfile, HOST_WIDE_INT_PRINT_HEX,
6523 val->v.val_wide->elt (--i));
6524 while (--i >= 0)
6525 fprintf (outfile, HOST_WIDE_INT_PRINT_PADDED_HEX,
6526 val->v.val_wide->elt (i));
6527 fprintf (outfile, ")");
6528 break;
6529 }
6530 case dw_val_class_vec:
6531 fprintf (outfile, "floating-point or vector constant");
6532 break;
6533 case dw_val_class_flag:
6534 fprintf (outfile, "%u", val->v.val_flag);
6535 break;
6536 case dw_val_class_die_ref:
6537 if (val->v.val_die_ref.die != NULL)
6538 {
6539 dw_die_ref die = val->v.val_die_ref.die;
6540
6541 if (die->comdat_type_p)
6542 {
6543 fprintf (outfile, "die -> signature: ");
6544 print_signature (outfile,
6545 die->die_id.die_type_node->signature);
6546 }
6547 else if (die->die_id.die_symbol)
6548 {
6549 fprintf (outfile, "die -> label: %s", die->die_id.die_symbol);
6550 if (die->with_offset)
6551 fprintf (outfile, " + %ld", die->die_offset);
6552 }
6553 else
6554 fprintf (outfile, "die -> %ld", die->die_offset);
6555 if (flag_dump_noaddr || flag_dump_unnumbered)
6556 fprintf (outfile, " #");
6557 else
6558 fprintf (outfile, " (%p)", (void *) die);
6559 }
6560 else
6561 fprintf (outfile, "die -> <null>");
6562 break;
6563 case dw_val_class_vms_delta:
6564 fprintf (outfile, "delta: @slotcount(%s-%s)",
6565 val->v.val_vms_delta.lbl2, val->v.val_vms_delta.lbl1);
6566 break;
6567 case dw_val_class_symview:
6568 fprintf (outfile, "view: %s", val->v.val_symbolic_view);
6569 break;
6570 case dw_val_class_lbl_id:
6571 case dw_val_class_lineptr:
6572 case dw_val_class_macptr:
6573 case dw_val_class_loclistsptr:
6574 case dw_val_class_high_pc:
6575 fprintf (outfile, "label: %s", val->v.val_lbl_id);
6576 break;
6577 case dw_val_class_str:
6578 if (val->v.val_str->str != NULL)
6579 fprintf (outfile, "\"%s\"", val->v.val_str->str);
6580 else
6581 fprintf (outfile, "<null>");
6582 break;
6583 case dw_val_class_file:
6584 case dw_val_class_file_implicit:
6585 fprintf (outfile, "\"%s\" (%d)", val->v.val_file->filename,
6586 val->v.val_file->emitted_number);
6587 break;
6588 case dw_val_class_data8:
6589 {
6590 int i;
6591
6592 for (i = 0; i < 8; i++)
6593 fprintf (outfile, "%02x", val->v.val_data8[i]);
6594 break;
6595 }
6596 case dw_val_class_discr_value:
6597 print_discr_value (outfile, &val->v.val_discr_value);
6598 break;
6599 case dw_val_class_discr_list:
6600 for (dw_discr_list_ref node = val->v.val_discr_list;
6601 node != NULL;
6602 node = node->dw_discr_next)
6603 {
6604 if (node->dw_discr_range)
6605 {
6606 fprintf (outfile, " .. ");
6607 print_discr_value (outfile, &node->dw_discr_lower_bound);
6608 print_discr_value (outfile, &node->dw_discr_upper_bound);
6609 }
6610 else
6611 print_discr_value (outfile, &node->dw_discr_lower_bound);
6612
6613 if (node->dw_discr_next != NULL)
6614 fprintf (outfile, " | ");
6615 }
6616 default:
6617 break;
6618 }
6619 }
6620
6621 /* Likewise, for a DIE attribute. */
6622
6623 static void
6624 print_attribute (dw_attr_node *a, bool recurse, FILE *outfile)
6625 {
6626 print_dw_val (&a->dw_attr_val, recurse, outfile);
6627 }
6628
6629
6630 /* Print the list of operands in the LOC location description to OUTFILE. This
6631 routine is a debugging aid only. */
6632
6633 static void
6634 print_loc_descr (dw_loc_descr_ref loc, FILE *outfile)
6635 {
6636 dw_loc_descr_ref l = loc;
6637
6638 if (loc == NULL)
6639 {
6640 print_spaces (outfile);
6641 fprintf (outfile, "<null>\n");
6642 return;
6643 }
6644
6645 for (l = loc; l != NULL; l = l->dw_loc_next)
6646 {
6647 print_spaces (outfile);
6648 if (flag_dump_noaddr || flag_dump_unnumbered)
6649 fprintf (outfile, "#");
6650 else
6651 fprintf (outfile, "(%p)", (void *) l);
6652 fprintf (outfile, " %s",
6653 dwarf_stack_op_name (l->dw_loc_opc));
6654 if (l->dw_loc_oprnd1.val_class != dw_val_class_none)
6655 {
6656 fprintf (outfile, " ");
6657 print_dw_val (&l->dw_loc_oprnd1, false, outfile);
6658 }
6659 if (l->dw_loc_oprnd2.val_class != dw_val_class_none)
6660 {
6661 fprintf (outfile, ", ");
6662 print_dw_val (&l->dw_loc_oprnd2, false, outfile);
6663 }
6664 fprintf (outfile, "\n");
6665 }
6666 }
6667
6668 /* Print the information associated with a given DIE, and its children.
6669 This routine is a debugging aid only. */
6670
6671 static void
6672 print_die (dw_die_ref die, FILE *outfile)
6673 {
6674 dw_attr_node *a;
6675 dw_die_ref c;
6676 unsigned ix;
6677
6678 print_spaces (outfile);
6679 fprintf (outfile, "DIE %4ld: %s ",
6680 die->die_offset, dwarf_tag_name (die->die_tag));
6681 if (flag_dump_noaddr || flag_dump_unnumbered)
6682 fprintf (outfile, "#\n");
6683 else
6684 fprintf (outfile, "(%p)\n", (void*) die);
6685 print_spaces (outfile);
6686 fprintf (outfile, " abbrev id: %lu", die->die_abbrev);
6687 fprintf (outfile, " offset: %ld", die->die_offset);
6688 fprintf (outfile, " mark: %d\n", die->die_mark);
6689
6690 if (die->comdat_type_p)
6691 {
6692 print_spaces (outfile);
6693 fprintf (outfile, " signature: ");
6694 print_signature (outfile, die->die_id.die_type_node->signature);
6695 fprintf (outfile, "\n");
6696 }
6697
6698 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6699 {
6700 print_spaces (outfile);
6701 fprintf (outfile, " %s: ", dwarf_attr_name (a->dw_attr));
6702
6703 print_attribute (a, true, outfile);
6704 fprintf (outfile, "\n");
6705 }
6706
6707 if (die->die_child != NULL)
6708 {
6709 print_indent += 4;
6710 FOR_EACH_CHILD (die, c, print_die (c, outfile));
6711 print_indent -= 4;
6712 }
6713 if (print_indent == 0)
6714 fprintf (outfile, "\n");
6715 }
6716
6717 /* Print the list of operations in the LOC location description. */
6718
6719 DEBUG_FUNCTION void
6720 debug_dwarf_loc_descr (dw_loc_descr_ref loc)
6721 {
6722 print_loc_descr (loc, stderr);
6723 }
6724
6725 /* Print the information collected for a given DIE. */
6726
6727 DEBUG_FUNCTION void
6728 debug_dwarf_die (dw_die_ref die)
6729 {
6730 print_die (die, stderr);
6731 }
6732
6733 DEBUG_FUNCTION void
6734 debug (die_struct &ref)
6735 {
6736 print_die (&ref, stderr);
6737 }
6738
6739 DEBUG_FUNCTION void
6740 debug (die_struct *ptr)
6741 {
6742 if (ptr)
6743 debug (*ptr);
6744 else
6745 fprintf (stderr, "<nil>\n");
6746 }
6747
6748
6749 /* Print all DWARF information collected for the compilation unit.
6750 This routine is a debugging aid only. */
6751
6752 DEBUG_FUNCTION void
6753 debug_dwarf (void)
6754 {
6755 print_indent = 0;
6756 print_die (comp_unit_die (), stderr);
6757 }
6758
6759 /* Verify the DIE tree structure. */
6760
6761 DEBUG_FUNCTION void
6762 verify_die (dw_die_ref die)
6763 {
6764 gcc_assert (!die->die_mark);
6765 if (die->die_parent == NULL
6766 && die->die_sib == NULL)
6767 return;
6768 /* Verify the die_sib list is cyclic. */
6769 dw_die_ref x = die;
6770 do
6771 {
6772 x->die_mark = 1;
6773 x = x->die_sib;
6774 }
6775 while (x && !x->die_mark);
6776 gcc_assert (x == die);
6777 x = die;
6778 do
6779 {
6780 /* Verify all dies have the same parent. */
6781 gcc_assert (x->die_parent == die->die_parent);
6782 if (x->die_child)
6783 {
6784 /* Verify the child has the proper parent and recurse. */
6785 gcc_assert (x->die_child->die_parent == x);
6786 verify_die (x->die_child);
6787 }
6788 x->die_mark = 0;
6789 x = x->die_sib;
6790 }
6791 while (x && x->die_mark);
6792 }
6793
6794 /* Sanity checks on DIEs. */
6795
6796 static void
6797 check_die (dw_die_ref die)
6798 {
6799 unsigned ix;
6800 dw_attr_node *a;
6801 bool inline_found = false;
6802 int n_location = 0, n_low_pc = 0, n_high_pc = 0, n_artificial = 0;
6803 int n_decl_line = 0, n_decl_column = 0, n_decl_file = 0;
6804 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6805 {
6806 switch (a->dw_attr)
6807 {
6808 case DW_AT_inline:
6809 if (a->dw_attr_val.v.val_unsigned)
6810 inline_found = true;
6811 break;
6812 case DW_AT_location:
6813 ++n_location;
6814 break;
6815 case DW_AT_low_pc:
6816 ++n_low_pc;
6817 break;
6818 case DW_AT_high_pc:
6819 ++n_high_pc;
6820 break;
6821 case DW_AT_artificial:
6822 ++n_artificial;
6823 break;
6824 case DW_AT_decl_column:
6825 ++n_decl_column;
6826 break;
6827 case DW_AT_decl_line:
6828 ++n_decl_line;
6829 break;
6830 case DW_AT_decl_file:
6831 ++n_decl_file;
6832 break;
6833 default:
6834 break;
6835 }
6836 }
6837 if (n_location > 1 || n_low_pc > 1 || n_high_pc > 1 || n_artificial > 1
6838 || n_decl_column > 1 || n_decl_line > 1 || n_decl_file > 1)
6839 {
6840 fprintf (stderr, "Duplicate attributes in DIE:\n");
6841 debug_dwarf_die (die);
6842 gcc_unreachable ();
6843 }
6844 if (inline_found)
6845 {
6846 /* A debugging information entry that is a member of an abstract
6847 instance tree [that has DW_AT_inline] should not contain any
6848 attributes which describe aspects of the subroutine which vary
6849 between distinct inlined expansions or distinct out-of-line
6850 expansions. */
6851 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6852 gcc_assert (a->dw_attr != DW_AT_low_pc
6853 && a->dw_attr != DW_AT_high_pc
6854 && a->dw_attr != DW_AT_location
6855 && a->dw_attr != DW_AT_frame_base
6856 && a->dw_attr != DW_AT_call_all_calls
6857 && a->dw_attr != DW_AT_GNU_all_call_sites);
6858 }
6859 }
6860 \f
6861 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
6862 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
6863 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO), ctx)
6864
6865 /* Calculate the checksum of a location expression. */
6866
6867 static inline void
6868 loc_checksum (dw_loc_descr_ref loc, struct md5_ctx *ctx)
6869 {
6870 int tem;
6871 inchash::hash hstate;
6872 hashval_t hash;
6873
6874 tem = (loc->dtprel << 8) | ((unsigned int) loc->dw_loc_opc);
6875 CHECKSUM (tem);
6876 hash_loc_operands (loc, hstate);
6877 hash = hstate.end();
6878 CHECKSUM (hash);
6879 }
6880
6881 /* Calculate the checksum of an attribute. */
6882
6883 static void
6884 attr_checksum (dw_attr_node *at, struct md5_ctx *ctx, int *mark)
6885 {
6886 dw_loc_descr_ref loc;
6887 rtx r;
6888
6889 CHECKSUM (at->dw_attr);
6890
6891 /* We don't care that this was compiled with a different compiler
6892 snapshot; if the output is the same, that's what matters. */
6893 if (at->dw_attr == DW_AT_producer)
6894 return;
6895
6896 switch (AT_class (at))
6897 {
6898 case dw_val_class_const:
6899 case dw_val_class_const_implicit:
6900 CHECKSUM (at->dw_attr_val.v.val_int);
6901 break;
6902 case dw_val_class_unsigned_const:
6903 case dw_val_class_unsigned_const_implicit:
6904 CHECKSUM (at->dw_attr_val.v.val_unsigned);
6905 break;
6906 case dw_val_class_const_double:
6907 CHECKSUM (at->dw_attr_val.v.val_double);
6908 break;
6909 case dw_val_class_wide_int:
6910 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
6911 get_full_len (*at->dw_attr_val.v.val_wide)
6912 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
6913 break;
6914 case dw_val_class_vec:
6915 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
6916 (at->dw_attr_val.v.val_vec.length
6917 * at->dw_attr_val.v.val_vec.elt_size));
6918 break;
6919 case dw_val_class_flag:
6920 CHECKSUM (at->dw_attr_val.v.val_flag);
6921 break;
6922 case dw_val_class_str:
6923 CHECKSUM_STRING (AT_string (at));
6924 break;
6925
6926 case dw_val_class_addr:
6927 r = AT_addr (at);
6928 gcc_assert (GET_CODE (r) == SYMBOL_REF);
6929 CHECKSUM_STRING (XSTR (r, 0));
6930 break;
6931
6932 case dw_val_class_offset:
6933 CHECKSUM (at->dw_attr_val.v.val_offset);
6934 break;
6935
6936 case dw_val_class_loc:
6937 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
6938 loc_checksum (loc, ctx);
6939 break;
6940
6941 case dw_val_class_die_ref:
6942 die_checksum (AT_ref (at), ctx, mark);
6943 break;
6944
6945 case dw_val_class_fde_ref:
6946 case dw_val_class_vms_delta:
6947 case dw_val_class_symview:
6948 case dw_val_class_lbl_id:
6949 case dw_val_class_lineptr:
6950 case dw_val_class_macptr:
6951 case dw_val_class_loclistsptr:
6952 case dw_val_class_high_pc:
6953 break;
6954
6955 case dw_val_class_file:
6956 case dw_val_class_file_implicit:
6957 CHECKSUM_STRING (AT_file (at)->filename);
6958 break;
6959
6960 case dw_val_class_data8:
6961 CHECKSUM (at->dw_attr_val.v.val_data8);
6962 break;
6963
6964 default:
6965 break;
6966 }
6967 }
6968
6969 /* Calculate the checksum of a DIE. */
6970
6971 static void
6972 die_checksum (dw_die_ref die, struct md5_ctx *ctx, int *mark)
6973 {
6974 dw_die_ref c;
6975 dw_attr_node *a;
6976 unsigned ix;
6977
6978 /* To avoid infinite recursion. */
6979 if (die->die_mark)
6980 {
6981 CHECKSUM (die->die_mark);
6982 return;
6983 }
6984 die->die_mark = ++(*mark);
6985
6986 CHECKSUM (die->die_tag);
6987
6988 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
6989 attr_checksum (a, ctx, mark);
6990
6991 FOR_EACH_CHILD (die, c, die_checksum (c, ctx, mark));
6992 }
6993
6994 #undef CHECKSUM
6995 #undef CHECKSUM_BLOCK
6996 #undef CHECKSUM_STRING
6997
6998 /* For DWARF-4 types, include the trailing NULL when checksumming strings. */
6999 #define CHECKSUM(FOO) md5_process_bytes (&(FOO), sizeof (FOO), ctx)
7000 #define CHECKSUM_BLOCK(FOO, SIZE) md5_process_bytes ((FOO), (SIZE), ctx)
7001 #define CHECKSUM_STRING(FOO) md5_process_bytes ((FOO), strlen (FOO) + 1, ctx)
7002 #define CHECKSUM_SLEB128(FOO) checksum_sleb128 ((FOO), ctx)
7003 #define CHECKSUM_ULEB128(FOO) checksum_uleb128 ((FOO), ctx)
7004 #define CHECKSUM_ATTR(FOO) \
7005 if (FOO) attr_checksum_ordered (die->die_tag, (FOO), ctx, mark)
7006
7007 /* Calculate the checksum of a number in signed LEB128 format. */
7008
7009 static void
7010 checksum_sleb128 (HOST_WIDE_INT value, struct md5_ctx *ctx)
7011 {
7012 unsigned char byte;
7013 bool more;
7014
7015 while (1)
7016 {
7017 byte = (value & 0x7f);
7018 value >>= 7;
7019 more = !((value == 0 && (byte & 0x40) == 0)
7020 || (value == -1 && (byte & 0x40) != 0));
7021 if (more)
7022 byte |= 0x80;
7023 CHECKSUM (byte);
7024 if (!more)
7025 break;
7026 }
7027 }
7028
7029 /* Calculate the checksum of a number in unsigned LEB128 format. */
7030
7031 static void
7032 checksum_uleb128 (unsigned HOST_WIDE_INT value, struct md5_ctx *ctx)
7033 {
7034 while (1)
7035 {
7036 unsigned char byte = (value & 0x7f);
7037 value >>= 7;
7038 if (value != 0)
7039 /* More bytes to follow. */
7040 byte |= 0x80;
7041 CHECKSUM (byte);
7042 if (value == 0)
7043 break;
7044 }
7045 }
7046
7047 /* Checksum the context of the DIE. This adds the names of any
7048 surrounding namespaces or structures to the checksum. */
7049
7050 static void
7051 checksum_die_context (dw_die_ref die, struct md5_ctx *ctx)
7052 {
7053 const char *name;
7054 dw_die_ref spec;
7055 int tag = die->die_tag;
7056
7057 if (tag != DW_TAG_namespace
7058 && tag != DW_TAG_structure_type
7059 && tag != DW_TAG_class_type)
7060 return;
7061
7062 name = get_AT_string (die, DW_AT_name);
7063
7064 spec = get_AT_ref (die, DW_AT_specification);
7065 if (spec != NULL)
7066 die = spec;
7067
7068 if (die->die_parent != NULL)
7069 checksum_die_context (die->die_parent, ctx);
7070
7071 CHECKSUM_ULEB128 ('C');
7072 CHECKSUM_ULEB128 (tag);
7073 if (name != NULL)
7074 CHECKSUM_STRING (name);
7075 }
7076
7077 /* Calculate the checksum of a location expression. */
7078
7079 static inline void
7080 loc_checksum_ordered (dw_loc_descr_ref loc, struct md5_ctx *ctx)
7081 {
7082 /* Special case for lone DW_OP_plus_uconst: checksum as if the location
7083 were emitted as a DW_FORM_sdata instead of a location expression. */
7084 if (loc->dw_loc_opc == DW_OP_plus_uconst && loc->dw_loc_next == NULL)
7085 {
7086 CHECKSUM_ULEB128 (DW_FORM_sdata);
7087 CHECKSUM_SLEB128 ((HOST_WIDE_INT) loc->dw_loc_oprnd1.v.val_unsigned);
7088 return;
7089 }
7090
7091 /* Otherwise, just checksum the raw location expression. */
7092 while (loc != NULL)
7093 {
7094 inchash::hash hstate;
7095 hashval_t hash;
7096
7097 CHECKSUM_ULEB128 (loc->dtprel);
7098 CHECKSUM_ULEB128 (loc->dw_loc_opc);
7099 hash_loc_operands (loc, hstate);
7100 hash = hstate.end ();
7101 CHECKSUM (hash);
7102 loc = loc->dw_loc_next;
7103 }
7104 }
7105
7106 /* Calculate the checksum of an attribute. */
7107
7108 static void
7109 attr_checksum_ordered (enum dwarf_tag tag, dw_attr_node *at,
7110 struct md5_ctx *ctx, int *mark)
7111 {
7112 dw_loc_descr_ref loc;
7113 rtx r;
7114
7115 if (AT_class (at) == dw_val_class_die_ref)
7116 {
7117 dw_die_ref target_die = AT_ref (at);
7118
7119 /* For pointer and reference types, we checksum only the (qualified)
7120 name of the target type (if there is a name). For friend entries,
7121 we checksum only the (qualified) name of the target type or function.
7122 This allows the checksum to remain the same whether the target type
7123 is complete or not. */
7124 if ((at->dw_attr == DW_AT_type
7125 && (tag == DW_TAG_pointer_type
7126 || tag == DW_TAG_reference_type
7127 || tag == DW_TAG_rvalue_reference_type
7128 || tag == DW_TAG_ptr_to_member_type))
7129 || (at->dw_attr == DW_AT_friend
7130 && tag == DW_TAG_friend))
7131 {
7132 dw_attr_node *name_attr = get_AT (target_die, DW_AT_name);
7133
7134 if (name_attr != NULL)
7135 {
7136 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7137
7138 if (decl == NULL)
7139 decl = target_die;
7140 CHECKSUM_ULEB128 ('N');
7141 CHECKSUM_ULEB128 (at->dw_attr);
7142 if (decl->die_parent != NULL)
7143 checksum_die_context (decl->die_parent, ctx);
7144 CHECKSUM_ULEB128 ('E');
7145 CHECKSUM_STRING (AT_string (name_attr));
7146 return;
7147 }
7148 }
7149
7150 /* For all other references to another DIE, we check to see if the
7151 target DIE has already been visited. If it has, we emit a
7152 backward reference; if not, we descend recursively. */
7153 if (target_die->die_mark > 0)
7154 {
7155 CHECKSUM_ULEB128 ('R');
7156 CHECKSUM_ULEB128 (at->dw_attr);
7157 CHECKSUM_ULEB128 (target_die->die_mark);
7158 }
7159 else
7160 {
7161 dw_die_ref decl = get_AT_ref (target_die, DW_AT_specification);
7162
7163 if (decl == NULL)
7164 decl = target_die;
7165 target_die->die_mark = ++(*mark);
7166 CHECKSUM_ULEB128 ('T');
7167 CHECKSUM_ULEB128 (at->dw_attr);
7168 if (decl->die_parent != NULL)
7169 checksum_die_context (decl->die_parent, ctx);
7170 die_checksum_ordered (target_die, ctx, mark);
7171 }
7172 return;
7173 }
7174
7175 CHECKSUM_ULEB128 ('A');
7176 CHECKSUM_ULEB128 (at->dw_attr);
7177
7178 switch (AT_class (at))
7179 {
7180 case dw_val_class_const:
7181 case dw_val_class_const_implicit:
7182 CHECKSUM_ULEB128 (DW_FORM_sdata);
7183 CHECKSUM_SLEB128 (at->dw_attr_val.v.val_int);
7184 break;
7185
7186 case dw_val_class_unsigned_const:
7187 case dw_val_class_unsigned_const_implicit:
7188 CHECKSUM_ULEB128 (DW_FORM_sdata);
7189 CHECKSUM_SLEB128 ((int) at->dw_attr_val.v.val_unsigned);
7190 break;
7191
7192 case dw_val_class_const_double:
7193 CHECKSUM_ULEB128 (DW_FORM_block);
7194 CHECKSUM_ULEB128 (sizeof (at->dw_attr_val.v.val_double));
7195 CHECKSUM (at->dw_attr_val.v.val_double);
7196 break;
7197
7198 case dw_val_class_wide_int:
7199 CHECKSUM_ULEB128 (DW_FORM_block);
7200 CHECKSUM_ULEB128 (get_full_len (*at->dw_attr_val.v.val_wide)
7201 * HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
7202 CHECKSUM_BLOCK (at->dw_attr_val.v.val_wide->get_val (),
7203 get_full_len (*at->dw_attr_val.v.val_wide)
7204 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
7205 break;
7206
7207 case dw_val_class_vec:
7208 CHECKSUM_ULEB128 (DW_FORM_block);
7209 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_vec.length
7210 * at->dw_attr_val.v.val_vec.elt_size);
7211 CHECKSUM_BLOCK (at->dw_attr_val.v.val_vec.array,
7212 (at->dw_attr_val.v.val_vec.length
7213 * at->dw_attr_val.v.val_vec.elt_size));
7214 break;
7215
7216 case dw_val_class_flag:
7217 CHECKSUM_ULEB128 (DW_FORM_flag);
7218 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_flag ? 1 : 0);
7219 break;
7220
7221 case dw_val_class_str:
7222 CHECKSUM_ULEB128 (DW_FORM_string);
7223 CHECKSUM_STRING (AT_string (at));
7224 break;
7225
7226 case dw_val_class_addr:
7227 r = AT_addr (at);
7228 gcc_assert (GET_CODE (r) == SYMBOL_REF);
7229 CHECKSUM_ULEB128 (DW_FORM_string);
7230 CHECKSUM_STRING (XSTR (r, 0));
7231 break;
7232
7233 case dw_val_class_offset:
7234 CHECKSUM_ULEB128 (DW_FORM_sdata);
7235 CHECKSUM_ULEB128 (at->dw_attr_val.v.val_offset);
7236 break;
7237
7238 case dw_val_class_loc:
7239 for (loc = AT_loc (at); loc; loc = loc->dw_loc_next)
7240 loc_checksum_ordered (loc, ctx);
7241 break;
7242
7243 case dw_val_class_fde_ref:
7244 case dw_val_class_symview:
7245 case dw_val_class_lbl_id:
7246 case dw_val_class_lineptr:
7247 case dw_val_class_macptr:
7248 case dw_val_class_loclistsptr:
7249 case dw_val_class_high_pc:
7250 break;
7251
7252 case dw_val_class_file:
7253 case dw_val_class_file_implicit:
7254 CHECKSUM_ULEB128 (DW_FORM_string);
7255 CHECKSUM_STRING (AT_file (at)->filename);
7256 break;
7257
7258 case dw_val_class_data8:
7259 CHECKSUM (at->dw_attr_val.v.val_data8);
7260 break;
7261
7262 default:
7263 break;
7264 }
7265 }
7266
7267 struct checksum_attributes
7268 {
7269 dw_attr_node *at_name;
7270 dw_attr_node *at_type;
7271 dw_attr_node *at_friend;
7272 dw_attr_node *at_accessibility;
7273 dw_attr_node *at_address_class;
7274 dw_attr_node *at_alignment;
7275 dw_attr_node *at_allocated;
7276 dw_attr_node *at_artificial;
7277 dw_attr_node *at_associated;
7278 dw_attr_node *at_binary_scale;
7279 dw_attr_node *at_bit_offset;
7280 dw_attr_node *at_bit_size;
7281 dw_attr_node *at_bit_stride;
7282 dw_attr_node *at_byte_size;
7283 dw_attr_node *at_byte_stride;
7284 dw_attr_node *at_const_value;
7285 dw_attr_node *at_containing_type;
7286 dw_attr_node *at_count;
7287 dw_attr_node *at_data_location;
7288 dw_attr_node *at_data_member_location;
7289 dw_attr_node *at_decimal_scale;
7290 dw_attr_node *at_decimal_sign;
7291 dw_attr_node *at_default_value;
7292 dw_attr_node *at_digit_count;
7293 dw_attr_node *at_discr;
7294 dw_attr_node *at_discr_list;
7295 dw_attr_node *at_discr_value;
7296 dw_attr_node *at_encoding;
7297 dw_attr_node *at_endianity;
7298 dw_attr_node *at_explicit;
7299 dw_attr_node *at_is_optional;
7300 dw_attr_node *at_location;
7301 dw_attr_node *at_lower_bound;
7302 dw_attr_node *at_mutable;
7303 dw_attr_node *at_ordering;
7304 dw_attr_node *at_picture_string;
7305 dw_attr_node *at_prototyped;
7306 dw_attr_node *at_small;
7307 dw_attr_node *at_segment;
7308 dw_attr_node *at_string_length;
7309 dw_attr_node *at_string_length_bit_size;
7310 dw_attr_node *at_string_length_byte_size;
7311 dw_attr_node *at_threads_scaled;
7312 dw_attr_node *at_upper_bound;
7313 dw_attr_node *at_use_location;
7314 dw_attr_node *at_use_UTF8;
7315 dw_attr_node *at_variable_parameter;
7316 dw_attr_node *at_virtuality;
7317 dw_attr_node *at_visibility;
7318 dw_attr_node *at_vtable_elem_location;
7319 };
7320
7321 /* Collect the attributes that we will want to use for the checksum. */
7322
7323 static void
7324 collect_checksum_attributes (struct checksum_attributes *attrs, dw_die_ref die)
7325 {
7326 dw_attr_node *a;
7327 unsigned ix;
7328
7329 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7330 {
7331 switch (a->dw_attr)
7332 {
7333 case DW_AT_name:
7334 attrs->at_name = a;
7335 break;
7336 case DW_AT_type:
7337 attrs->at_type = a;
7338 break;
7339 case DW_AT_friend:
7340 attrs->at_friend = a;
7341 break;
7342 case DW_AT_accessibility:
7343 attrs->at_accessibility = a;
7344 break;
7345 case DW_AT_address_class:
7346 attrs->at_address_class = a;
7347 break;
7348 case DW_AT_alignment:
7349 attrs->at_alignment = a;
7350 break;
7351 case DW_AT_allocated:
7352 attrs->at_allocated = a;
7353 break;
7354 case DW_AT_artificial:
7355 attrs->at_artificial = a;
7356 break;
7357 case DW_AT_associated:
7358 attrs->at_associated = a;
7359 break;
7360 case DW_AT_binary_scale:
7361 attrs->at_binary_scale = a;
7362 break;
7363 case DW_AT_bit_offset:
7364 attrs->at_bit_offset = a;
7365 break;
7366 case DW_AT_bit_size:
7367 attrs->at_bit_size = a;
7368 break;
7369 case DW_AT_bit_stride:
7370 attrs->at_bit_stride = a;
7371 break;
7372 case DW_AT_byte_size:
7373 attrs->at_byte_size = a;
7374 break;
7375 case DW_AT_byte_stride:
7376 attrs->at_byte_stride = a;
7377 break;
7378 case DW_AT_const_value:
7379 attrs->at_const_value = a;
7380 break;
7381 case DW_AT_containing_type:
7382 attrs->at_containing_type = a;
7383 break;
7384 case DW_AT_count:
7385 attrs->at_count = a;
7386 break;
7387 case DW_AT_data_location:
7388 attrs->at_data_location = a;
7389 break;
7390 case DW_AT_data_member_location:
7391 attrs->at_data_member_location = a;
7392 break;
7393 case DW_AT_decimal_scale:
7394 attrs->at_decimal_scale = a;
7395 break;
7396 case DW_AT_decimal_sign:
7397 attrs->at_decimal_sign = a;
7398 break;
7399 case DW_AT_default_value:
7400 attrs->at_default_value = a;
7401 break;
7402 case DW_AT_digit_count:
7403 attrs->at_digit_count = a;
7404 break;
7405 case DW_AT_discr:
7406 attrs->at_discr = a;
7407 break;
7408 case DW_AT_discr_list:
7409 attrs->at_discr_list = a;
7410 break;
7411 case DW_AT_discr_value:
7412 attrs->at_discr_value = a;
7413 break;
7414 case DW_AT_encoding:
7415 attrs->at_encoding = a;
7416 break;
7417 case DW_AT_endianity:
7418 attrs->at_endianity = a;
7419 break;
7420 case DW_AT_explicit:
7421 attrs->at_explicit = a;
7422 break;
7423 case DW_AT_is_optional:
7424 attrs->at_is_optional = a;
7425 break;
7426 case DW_AT_location:
7427 attrs->at_location = a;
7428 break;
7429 case DW_AT_lower_bound:
7430 attrs->at_lower_bound = a;
7431 break;
7432 case DW_AT_mutable:
7433 attrs->at_mutable = a;
7434 break;
7435 case DW_AT_ordering:
7436 attrs->at_ordering = a;
7437 break;
7438 case DW_AT_picture_string:
7439 attrs->at_picture_string = a;
7440 break;
7441 case DW_AT_prototyped:
7442 attrs->at_prototyped = a;
7443 break;
7444 case DW_AT_small:
7445 attrs->at_small = a;
7446 break;
7447 case DW_AT_segment:
7448 attrs->at_segment = a;
7449 break;
7450 case DW_AT_string_length:
7451 attrs->at_string_length = a;
7452 break;
7453 case DW_AT_string_length_bit_size:
7454 attrs->at_string_length_bit_size = a;
7455 break;
7456 case DW_AT_string_length_byte_size:
7457 attrs->at_string_length_byte_size = a;
7458 break;
7459 case DW_AT_threads_scaled:
7460 attrs->at_threads_scaled = a;
7461 break;
7462 case DW_AT_upper_bound:
7463 attrs->at_upper_bound = a;
7464 break;
7465 case DW_AT_use_location:
7466 attrs->at_use_location = a;
7467 break;
7468 case DW_AT_use_UTF8:
7469 attrs->at_use_UTF8 = a;
7470 break;
7471 case DW_AT_variable_parameter:
7472 attrs->at_variable_parameter = a;
7473 break;
7474 case DW_AT_virtuality:
7475 attrs->at_virtuality = a;
7476 break;
7477 case DW_AT_visibility:
7478 attrs->at_visibility = a;
7479 break;
7480 case DW_AT_vtable_elem_location:
7481 attrs->at_vtable_elem_location = a;
7482 break;
7483 default:
7484 break;
7485 }
7486 }
7487 }
7488
7489 /* Calculate the checksum of a DIE, using an ordered subset of attributes. */
7490
7491 static void
7492 die_checksum_ordered (dw_die_ref die, struct md5_ctx *ctx, int *mark)
7493 {
7494 dw_die_ref c;
7495 dw_die_ref decl;
7496 struct checksum_attributes attrs;
7497
7498 CHECKSUM_ULEB128 ('D');
7499 CHECKSUM_ULEB128 (die->die_tag);
7500
7501 memset (&attrs, 0, sizeof (attrs));
7502
7503 decl = get_AT_ref (die, DW_AT_specification);
7504 if (decl != NULL)
7505 collect_checksum_attributes (&attrs, decl);
7506 collect_checksum_attributes (&attrs, die);
7507
7508 CHECKSUM_ATTR (attrs.at_name);
7509 CHECKSUM_ATTR (attrs.at_accessibility);
7510 CHECKSUM_ATTR (attrs.at_address_class);
7511 CHECKSUM_ATTR (attrs.at_allocated);
7512 CHECKSUM_ATTR (attrs.at_artificial);
7513 CHECKSUM_ATTR (attrs.at_associated);
7514 CHECKSUM_ATTR (attrs.at_binary_scale);
7515 CHECKSUM_ATTR (attrs.at_bit_offset);
7516 CHECKSUM_ATTR (attrs.at_bit_size);
7517 CHECKSUM_ATTR (attrs.at_bit_stride);
7518 CHECKSUM_ATTR (attrs.at_byte_size);
7519 CHECKSUM_ATTR (attrs.at_byte_stride);
7520 CHECKSUM_ATTR (attrs.at_const_value);
7521 CHECKSUM_ATTR (attrs.at_containing_type);
7522 CHECKSUM_ATTR (attrs.at_count);
7523 CHECKSUM_ATTR (attrs.at_data_location);
7524 CHECKSUM_ATTR (attrs.at_data_member_location);
7525 CHECKSUM_ATTR (attrs.at_decimal_scale);
7526 CHECKSUM_ATTR (attrs.at_decimal_sign);
7527 CHECKSUM_ATTR (attrs.at_default_value);
7528 CHECKSUM_ATTR (attrs.at_digit_count);
7529 CHECKSUM_ATTR (attrs.at_discr);
7530 CHECKSUM_ATTR (attrs.at_discr_list);
7531 CHECKSUM_ATTR (attrs.at_discr_value);
7532 CHECKSUM_ATTR (attrs.at_encoding);
7533 CHECKSUM_ATTR (attrs.at_endianity);
7534 CHECKSUM_ATTR (attrs.at_explicit);
7535 CHECKSUM_ATTR (attrs.at_is_optional);
7536 CHECKSUM_ATTR (attrs.at_location);
7537 CHECKSUM_ATTR (attrs.at_lower_bound);
7538 CHECKSUM_ATTR (attrs.at_mutable);
7539 CHECKSUM_ATTR (attrs.at_ordering);
7540 CHECKSUM_ATTR (attrs.at_picture_string);
7541 CHECKSUM_ATTR (attrs.at_prototyped);
7542 CHECKSUM_ATTR (attrs.at_small);
7543 CHECKSUM_ATTR (attrs.at_segment);
7544 CHECKSUM_ATTR (attrs.at_string_length);
7545 CHECKSUM_ATTR (attrs.at_string_length_bit_size);
7546 CHECKSUM_ATTR (attrs.at_string_length_byte_size);
7547 CHECKSUM_ATTR (attrs.at_threads_scaled);
7548 CHECKSUM_ATTR (attrs.at_upper_bound);
7549 CHECKSUM_ATTR (attrs.at_use_location);
7550 CHECKSUM_ATTR (attrs.at_use_UTF8);
7551 CHECKSUM_ATTR (attrs.at_variable_parameter);
7552 CHECKSUM_ATTR (attrs.at_virtuality);
7553 CHECKSUM_ATTR (attrs.at_visibility);
7554 CHECKSUM_ATTR (attrs.at_vtable_elem_location);
7555 CHECKSUM_ATTR (attrs.at_type);
7556 CHECKSUM_ATTR (attrs.at_friend);
7557 CHECKSUM_ATTR (attrs.at_alignment);
7558
7559 /* Checksum the child DIEs. */
7560 c = die->die_child;
7561 if (c) do {
7562 dw_attr_node *name_attr;
7563
7564 c = c->die_sib;
7565 name_attr = get_AT (c, DW_AT_name);
7566 if (is_template_instantiation (c))
7567 {
7568 /* Ignore instantiations of member type and function templates. */
7569 }
7570 else if (name_attr != NULL
7571 && (is_type_die (c) || c->die_tag == DW_TAG_subprogram))
7572 {
7573 /* Use a shallow checksum for named nested types and member
7574 functions. */
7575 CHECKSUM_ULEB128 ('S');
7576 CHECKSUM_ULEB128 (c->die_tag);
7577 CHECKSUM_STRING (AT_string (name_attr));
7578 }
7579 else
7580 {
7581 /* Use a deep checksum for other children. */
7582 /* Mark this DIE so it gets processed when unmarking. */
7583 if (c->die_mark == 0)
7584 c->die_mark = -1;
7585 die_checksum_ordered (c, ctx, mark);
7586 }
7587 } while (c != die->die_child);
7588
7589 CHECKSUM_ULEB128 (0);
7590 }
7591
7592 /* Add a type name and tag to a hash. */
7593 static void
7594 die_odr_checksum (int tag, const char *name, md5_ctx *ctx)
7595 {
7596 CHECKSUM_ULEB128 (tag);
7597 CHECKSUM_STRING (name);
7598 }
7599
7600 #undef CHECKSUM
7601 #undef CHECKSUM_STRING
7602 #undef CHECKSUM_ATTR
7603 #undef CHECKSUM_LEB128
7604 #undef CHECKSUM_ULEB128
7605
7606 /* Generate the type signature for DIE. This is computed by generating an
7607 MD5 checksum over the DIE's tag, its relevant attributes, and its
7608 children. Attributes that are references to other DIEs are processed
7609 by recursion, using the MARK field to prevent infinite recursion.
7610 If the DIE is nested inside a namespace or another type, we also
7611 need to include that context in the signature. The lower 64 bits
7612 of the resulting MD5 checksum comprise the signature. */
7613
7614 static void
7615 generate_type_signature (dw_die_ref die, comdat_type_node *type_node)
7616 {
7617 int mark;
7618 const char *name;
7619 unsigned char checksum[16];
7620 struct md5_ctx ctx;
7621 dw_die_ref decl;
7622 dw_die_ref parent;
7623
7624 name = get_AT_string (die, DW_AT_name);
7625 decl = get_AT_ref (die, DW_AT_specification);
7626 parent = get_die_parent (die);
7627
7628 /* First, compute a signature for just the type name (and its surrounding
7629 context, if any. This is stored in the type unit DIE for link-time
7630 ODR (one-definition rule) checking. */
7631
7632 if (is_cxx () && name != NULL)
7633 {
7634 md5_init_ctx (&ctx);
7635
7636 /* Checksum the names of surrounding namespaces and structures. */
7637 if (parent != NULL)
7638 checksum_die_context (parent, &ctx);
7639
7640 /* Checksum the current DIE. */
7641 die_odr_checksum (die->die_tag, name, &ctx);
7642 md5_finish_ctx (&ctx, checksum);
7643
7644 add_AT_data8 (type_node->root_die, DW_AT_GNU_odr_signature, &checksum[8]);
7645 }
7646
7647 /* Next, compute the complete type signature. */
7648
7649 md5_init_ctx (&ctx);
7650 mark = 1;
7651 die->die_mark = mark;
7652
7653 /* Checksum the names of surrounding namespaces and structures. */
7654 if (parent != NULL)
7655 checksum_die_context (parent, &ctx);
7656
7657 /* Checksum the DIE and its children. */
7658 die_checksum_ordered (die, &ctx, &mark);
7659 unmark_all_dies (die);
7660 md5_finish_ctx (&ctx, checksum);
7661
7662 /* Store the signature in the type node and link the type DIE and the
7663 type node together. */
7664 memcpy (type_node->signature, &checksum[16 - DWARF_TYPE_SIGNATURE_SIZE],
7665 DWARF_TYPE_SIGNATURE_SIZE);
7666 die->comdat_type_p = true;
7667 die->die_id.die_type_node = type_node;
7668 type_node->type_die = die;
7669
7670 /* If the DIE is a specification, link its declaration to the type node
7671 as well. */
7672 if (decl != NULL)
7673 {
7674 decl->comdat_type_p = true;
7675 decl->die_id.die_type_node = type_node;
7676 }
7677 }
7678
7679 /* Do the location expressions look same? */
7680 static inline int
7681 same_loc_p (dw_loc_descr_ref loc1, dw_loc_descr_ref loc2, int *mark)
7682 {
7683 return loc1->dw_loc_opc == loc2->dw_loc_opc
7684 && same_dw_val_p (&loc1->dw_loc_oprnd1, &loc2->dw_loc_oprnd1, mark)
7685 && same_dw_val_p (&loc1->dw_loc_oprnd2, &loc2->dw_loc_oprnd2, mark);
7686 }
7687
7688 /* Do the values look the same? */
7689 static int
7690 same_dw_val_p (const dw_val_node *v1, const dw_val_node *v2, int *mark)
7691 {
7692 dw_loc_descr_ref loc1, loc2;
7693 rtx r1, r2;
7694
7695 if (v1->val_class != v2->val_class)
7696 return 0;
7697
7698 switch (v1->val_class)
7699 {
7700 case dw_val_class_const:
7701 case dw_val_class_const_implicit:
7702 return v1->v.val_int == v2->v.val_int;
7703 case dw_val_class_unsigned_const:
7704 case dw_val_class_unsigned_const_implicit:
7705 return v1->v.val_unsigned == v2->v.val_unsigned;
7706 case dw_val_class_const_double:
7707 return v1->v.val_double.high == v2->v.val_double.high
7708 && v1->v.val_double.low == v2->v.val_double.low;
7709 case dw_val_class_wide_int:
7710 return *v1->v.val_wide == *v2->v.val_wide;
7711 case dw_val_class_vec:
7712 if (v1->v.val_vec.length != v2->v.val_vec.length
7713 || v1->v.val_vec.elt_size != v2->v.val_vec.elt_size)
7714 return 0;
7715 if (memcmp (v1->v.val_vec.array, v2->v.val_vec.array,
7716 v1->v.val_vec.length * v1->v.val_vec.elt_size))
7717 return 0;
7718 return 1;
7719 case dw_val_class_flag:
7720 return v1->v.val_flag == v2->v.val_flag;
7721 case dw_val_class_str:
7722 return !strcmp (v1->v.val_str->str, v2->v.val_str->str);
7723
7724 case dw_val_class_addr:
7725 r1 = v1->v.val_addr;
7726 r2 = v2->v.val_addr;
7727 if (GET_CODE (r1) != GET_CODE (r2))
7728 return 0;
7729 return !rtx_equal_p (r1, r2);
7730
7731 case dw_val_class_offset:
7732 return v1->v.val_offset == v2->v.val_offset;
7733
7734 case dw_val_class_loc:
7735 for (loc1 = v1->v.val_loc, loc2 = v2->v.val_loc;
7736 loc1 && loc2;
7737 loc1 = loc1->dw_loc_next, loc2 = loc2->dw_loc_next)
7738 if (!same_loc_p (loc1, loc2, mark))
7739 return 0;
7740 return !loc1 && !loc2;
7741
7742 case dw_val_class_die_ref:
7743 return same_die_p (v1->v.val_die_ref.die, v2->v.val_die_ref.die, mark);
7744
7745 case dw_val_class_symview:
7746 return strcmp (v1->v.val_symbolic_view, v2->v.val_symbolic_view) == 0;
7747
7748 case dw_val_class_fde_ref:
7749 case dw_val_class_vms_delta:
7750 case dw_val_class_lbl_id:
7751 case dw_val_class_lineptr:
7752 case dw_val_class_macptr:
7753 case dw_val_class_loclistsptr:
7754 case dw_val_class_high_pc:
7755 return 1;
7756
7757 case dw_val_class_file:
7758 case dw_val_class_file_implicit:
7759 return v1->v.val_file == v2->v.val_file;
7760
7761 case dw_val_class_data8:
7762 return !memcmp (v1->v.val_data8, v2->v.val_data8, 8);
7763
7764 default:
7765 return 1;
7766 }
7767 }
7768
7769 /* Do the attributes look the same? */
7770
7771 static int
7772 same_attr_p (dw_attr_node *at1, dw_attr_node *at2, int *mark)
7773 {
7774 if (at1->dw_attr != at2->dw_attr)
7775 return 0;
7776
7777 /* We don't care that this was compiled with a different compiler
7778 snapshot; if the output is the same, that's what matters. */
7779 if (at1->dw_attr == DW_AT_producer)
7780 return 1;
7781
7782 return same_dw_val_p (&at1->dw_attr_val, &at2->dw_attr_val, mark);
7783 }
7784
7785 /* Do the dies look the same? */
7786
7787 static int
7788 same_die_p (dw_die_ref die1, dw_die_ref die2, int *mark)
7789 {
7790 dw_die_ref c1, c2;
7791 dw_attr_node *a1;
7792 unsigned ix;
7793
7794 /* To avoid infinite recursion. */
7795 if (die1->die_mark)
7796 return die1->die_mark == die2->die_mark;
7797 die1->die_mark = die2->die_mark = ++(*mark);
7798
7799 if (die1->die_tag != die2->die_tag)
7800 return 0;
7801
7802 if (vec_safe_length (die1->die_attr) != vec_safe_length (die2->die_attr))
7803 return 0;
7804
7805 FOR_EACH_VEC_SAFE_ELT (die1->die_attr, ix, a1)
7806 if (!same_attr_p (a1, &(*die2->die_attr)[ix], mark))
7807 return 0;
7808
7809 c1 = die1->die_child;
7810 c2 = die2->die_child;
7811 if (! c1)
7812 {
7813 if (c2)
7814 return 0;
7815 }
7816 else
7817 for (;;)
7818 {
7819 if (!same_die_p (c1, c2, mark))
7820 return 0;
7821 c1 = c1->die_sib;
7822 c2 = c2->die_sib;
7823 if (c1 == die1->die_child)
7824 {
7825 if (c2 == die2->die_child)
7826 break;
7827 else
7828 return 0;
7829 }
7830 }
7831
7832 return 1;
7833 }
7834
7835 /* Calculate the MD5 checksum of the compilation unit DIE UNIT_DIE and its
7836 children, and set die_symbol. */
7837
7838 static void
7839 compute_comp_unit_symbol (dw_die_ref unit_die)
7840 {
7841 const char *die_name = get_AT_string (unit_die, DW_AT_name);
7842 const char *base = die_name ? lbasename (die_name) : "anonymous";
7843 char *name = XALLOCAVEC (char, strlen (base) + 64);
7844 char *p;
7845 int i, mark;
7846 unsigned char checksum[16];
7847 struct md5_ctx ctx;
7848
7849 /* Compute the checksum of the DIE, then append part of it as hex digits to
7850 the name filename of the unit. */
7851
7852 md5_init_ctx (&ctx);
7853 mark = 0;
7854 die_checksum (unit_die, &ctx, &mark);
7855 unmark_all_dies (unit_die);
7856 md5_finish_ctx (&ctx, checksum);
7857
7858 /* When we this for comp_unit_die () we have a DW_AT_name that might
7859 not start with a letter but with anything valid for filenames and
7860 clean_symbol_name doesn't fix that up. Prepend 'g' if the first
7861 character is not a letter. */
7862 sprintf (name, "%s%s.", ISALPHA (*base) ? "" : "g", base);
7863 clean_symbol_name (name);
7864
7865 p = name + strlen (name);
7866 for (i = 0; i < 4; i++)
7867 {
7868 sprintf (p, "%.2x", checksum[i]);
7869 p += 2;
7870 }
7871
7872 unit_die->die_id.die_symbol = xstrdup (name);
7873 }
7874
7875 /* Returns nonzero if DIE represents a type, in the sense of TYPE_P. */
7876
7877 static int
7878 is_type_die (dw_die_ref die)
7879 {
7880 switch (die->die_tag)
7881 {
7882 case DW_TAG_array_type:
7883 case DW_TAG_class_type:
7884 case DW_TAG_interface_type:
7885 case DW_TAG_enumeration_type:
7886 case DW_TAG_pointer_type:
7887 case DW_TAG_reference_type:
7888 case DW_TAG_rvalue_reference_type:
7889 case DW_TAG_string_type:
7890 case DW_TAG_structure_type:
7891 case DW_TAG_subroutine_type:
7892 case DW_TAG_union_type:
7893 case DW_TAG_ptr_to_member_type:
7894 case DW_TAG_set_type:
7895 case DW_TAG_subrange_type:
7896 case DW_TAG_base_type:
7897 case DW_TAG_const_type:
7898 case DW_TAG_file_type:
7899 case DW_TAG_packed_type:
7900 case DW_TAG_volatile_type:
7901 case DW_TAG_typedef:
7902 return 1;
7903 default:
7904 return 0;
7905 }
7906 }
7907
7908 /* Returns true iff C is a compile-unit DIE. */
7909
7910 static inline bool
7911 is_cu_die (dw_die_ref c)
7912 {
7913 return c && (c->die_tag == DW_TAG_compile_unit
7914 || c->die_tag == DW_TAG_skeleton_unit);
7915 }
7916
7917 /* Returns true iff C is a unit DIE of some sort. */
7918
7919 static inline bool
7920 is_unit_die (dw_die_ref c)
7921 {
7922 return c && (c->die_tag == DW_TAG_compile_unit
7923 || c->die_tag == DW_TAG_partial_unit
7924 || c->die_tag == DW_TAG_type_unit
7925 || c->die_tag == DW_TAG_skeleton_unit);
7926 }
7927
7928 /* Returns true iff C is a namespace DIE. */
7929
7930 static inline bool
7931 is_namespace_die (dw_die_ref c)
7932 {
7933 return c && c->die_tag == DW_TAG_namespace;
7934 }
7935
7936 /* Return non-zero if this DIE is a template parameter. */
7937
7938 static inline bool
7939 is_template_parameter (dw_die_ref die)
7940 {
7941 switch (die->die_tag)
7942 {
7943 case DW_TAG_template_type_param:
7944 case DW_TAG_template_value_param:
7945 case DW_TAG_GNU_template_template_param:
7946 case DW_TAG_GNU_template_parameter_pack:
7947 return true;
7948 default:
7949 return false;
7950 }
7951 }
7952
7953 /* Return non-zero if this DIE represents a template instantiation. */
7954
7955 static inline bool
7956 is_template_instantiation (dw_die_ref die)
7957 {
7958 dw_die_ref c;
7959
7960 if (!is_type_die (die) && die->die_tag != DW_TAG_subprogram)
7961 return false;
7962 FOR_EACH_CHILD (die, c, if (is_template_parameter (c)) return true);
7963 return false;
7964 }
7965
7966 static char *
7967 gen_internal_sym (const char *prefix)
7968 {
7969 char buf[MAX_ARTIFICIAL_LABEL_BYTES];
7970
7971 ASM_GENERATE_INTERNAL_LABEL (buf, prefix, label_num++);
7972 return xstrdup (buf);
7973 }
7974
7975 /* Return non-zero if this DIE is a declaration. */
7976
7977 static int
7978 is_declaration_die (dw_die_ref die)
7979 {
7980 dw_attr_node *a;
7981 unsigned ix;
7982
7983 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
7984 if (a->dw_attr == DW_AT_declaration)
7985 return 1;
7986
7987 return 0;
7988 }
7989
7990 /* Return non-zero if this DIE is nested inside a subprogram. */
7991
7992 static int
7993 is_nested_in_subprogram (dw_die_ref die)
7994 {
7995 dw_die_ref decl = get_AT_ref (die, DW_AT_specification);
7996
7997 if (decl == NULL)
7998 decl = die;
7999 return local_scope_p (decl);
8000 }
8001
8002 /* Return non-zero if this DIE contains a defining declaration of a
8003 subprogram. */
8004
8005 static int
8006 contains_subprogram_definition (dw_die_ref die)
8007 {
8008 dw_die_ref c;
8009
8010 if (die->die_tag == DW_TAG_subprogram && ! is_declaration_die (die))
8011 return 1;
8012 FOR_EACH_CHILD (die, c, if (contains_subprogram_definition (c)) return 1);
8013 return 0;
8014 }
8015
8016 /* Return non-zero if this is a type DIE that should be moved to a
8017 COMDAT .debug_types section or .debug_info section with DW_UT_*type
8018 unit type. */
8019
8020 static int
8021 should_move_die_to_comdat (dw_die_ref die)
8022 {
8023 switch (die->die_tag)
8024 {
8025 case DW_TAG_class_type:
8026 case DW_TAG_structure_type:
8027 case DW_TAG_enumeration_type:
8028 case DW_TAG_union_type:
8029 /* Don't move declarations, inlined instances, types nested in a
8030 subprogram, or types that contain subprogram definitions. */
8031 if (is_declaration_die (die)
8032 || get_AT (die, DW_AT_abstract_origin)
8033 || is_nested_in_subprogram (die)
8034 || contains_subprogram_definition (die))
8035 return 0;
8036 return 1;
8037 case DW_TAG_array_type:
8038 case DW_TAG_interface_type:
8039 case DW_TAG_pointer_type:
8040 case DW_TAG_reference_type:
8041 case DW_TAG_rvalue_reference_type:
8042 case DW_TAG_string_type:
8043 case DW_TAG_subroutine_type:
8044 case DW_TAG_ptr_to_member_type:
8045 case DW_TAG_set_type:
8046 case DW_TAG_subrange_type:
8047 case DW_TAG_base_type:
8048 case DW_TAG_const_type:
8049 case DW_TAG_file_type:
8050 case DW_TAG_packed_type:
8051 case DW_TAG_volatile_type:
8052 case DW_TAG_typedef:
8053 default:
8054 return 0;
8055 }
8056 }
8057
8058 /* Make a clone of DIE. */
8059
8060 static dw_die_ref
8061 clone_die (dw_die_ref die)
8062 {
8063 dw_die_ref clone = new_die_raw (die->die_tag);
8064 dw_attr_node *a;
8065 unsigned ix;
8066
8067 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8068 add_dwarf_attr (clone, a);
8069
8070 return clone;
8071 }
8072
8073 /* Make a clone of the tree rooted at DIE. */
8074
8075 static dw_die_ref
8076 clone_tree (dw_die_ref die)
8077 {
8078 dw_die_ref c;
8079 dw_die_ref clone = clone_die (die);
8080
8081 FOR_EACH_CHILD (die, c, add_child_die (clone, clone_tree (c)));
8082
8083 return clone;
8084 }
8085
8086 /* Make a clone of DIE as a declaration. */
8087
8088 static dw_die_ref
8089 clone_as_declaration (dw_die_ref die)
8090 {
8091 dw_die_ref clone;
8092 dw_die_ref decl;
8093 dw_attr_node *a;
8094 unsigned ix;
8095
8096 /* If the DIE is already a declaration, just clone it. */
8097 if (is_declaration_die (die))
8098 return clone_die (die);
8099
8100 /* If the DIE is a specification, just clone its declaration DIE. */
8101 decl = get_AT_ref (die, DW_AT_specification);
8102 if (decl != NULL)
8103 {
8104 clone = clone_die (decl);
8105 if (die->comdat_type_p)
8106 add_AT_die_ref (clone, DW_AT_signature, die);
8107 return clone;
8108 }
8109
8110 clone = new_die_raw (die->die_tag);
8111
8112 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8113 {
8114 /* We don't want to copy over all attributes.
8115 For example we don't want DW_AT_byte_size because otherwise we will no
8116 longer have a declaration and GDB will treat it as a definition. */
8117
8118 switch (a->dw_attr)
8119 {
8120 case DW_AT_abstract_origin:
8121 case DW_AT_artificial:
8122 case DW_AT_containing_type:
8123 case DW_AT_external:
8124 case DW_AT_name:
8125 case DW_AT_type:
8126 case DW_AT_virtuality:
8127 case DW_AT_linkage_name:
8128 case DW_AT_MIPS_linkage_name:
8129 add_dwarf_attr (clone, a);
8130 break;
8131 case DW_AT_byte_size:
8132 case DW_AT_alignment:
8133 default:
8134 break;
8135 }
8136 }
8137
8138 if (die->comdat_type_p)
8139 add_AT_die_ref (clone, DW_AT_signature, die);
8140
8141 add_AT_flag (clone, DW_AT_declaration, 1);
8142 return clone;
8143 }
8144
8145
8146 /* Structure to map a DIE in one CU to its copy in a comdat type unit. */
8147
8148 struct decl_table_entry
8149 {
8150 dw_die_ref orig;
8151 dw_die_ref copy;
8152 };
8153
8154 /* Helpers to manipulate hash table of copied declarations. */
8155
8156 /* Hashtable helpers. */
8157
8158 struct decl_table_entry_hasher : free_ptr_hash <decl_table_entry>
8159 {
8160 typedef die_struct *compare_type;
8161 static inline hashval_t hash (const decl_table_entry *);
8162 static inline bool equal (const decl_table_entry *, const die_struct *);
8163 };
8164
8165 inline hashval_t
8166 decl_table_entry_hasher::hash (const decl_table_entry *entry)
8167 {
8168 return htab_hash_pointer (entry->orig);
8169 }
8170
8171 inline bool
8172 decl_table_entry_hasher::equal (const decl_table_entry *entry1,
8173 const die_struct *entry2)
8174 {
8175 return entry1->orig == entry2;
8176 }
8177
8178 typedef hash_table<decl_table_entry_hasher> decl_hash_type;
8179
8180 /* Copy DIE and its ancestors, up to, but not including, the compile unit
8181 or type unit entry, to a new tree. Adds the new tree to UNIT and returns
8182 a pointer to the copy of DIE. If DECL_TABLE is provided, it is used
8183 to check if the ancestor has already been copied into UNIT. */
8184
8185 static dw_die_ref
8186 copy_ancestor_tree (dw_die_ref unit, dw_die_ref die,
8187 decl_hash_type *decl_table)
8188 {
8189 dw_die_ref parent = die->die_parent;
8190 dw_die_ref new_parent = unit;
8191 dw_die_ref copy;
8192 decl_table_entry **slot = NULL;
8193 struct decl_table_entry *entry = NULL;
8194
8195 /* If DIE refers to a stub unfold that so we get the appropriate
8196 DIE registered as orig in decl_table. */
8197 if (dw_die_ref c = get_AT_ref (die, DW_AT_signature))
8198 die = c;
8199
8200 if (decl_table)
8201 {
8202 /* Check if the entry has already been copied to UNIT. */
8203 slot = decl_table->find_slot_with_hash (die, htab_hash_pointer (die),
8204 INSERT);
8205 if (*slot != HTAB_EMPTY_ENTRY)
8206 {
8207 entry = *slot;
8208 return entry->copy;
8209 }
8210
8211 /* Record in DECL_TABLE that DIE has been copied to UNIT. */
8212 entry = XCNEW (struct decl_table_entry);
8213 entry->orig = die;
8214 entry->copy = NULL;
8215 *slot = entry;
8216 }
8217
8218 if (parent != NULL)
8219 {
8220 dw_die_ref spec = get_AT_ref (parent, DW_AT_specification);
8221 if (spec != NULL)
8222 parent = spec;
8223 if (!is_unit_die (parent))
8224 new_parent = copy_ancestor_tree (unit, parent, decl_table);
8225 }
8226
8227 copy = clone_as_declaration (die);
8228 add_child_die (new_parent, copy);
8229
8230 if (decl_table)
8231 {
8232 /* Record the pointer to the copy. */
8233 entry->copy = copy;
8234 }
8235
8236 return copy;
8237 }
8238 /* Copy the declaration context to the new type unit DIE. This includes
8239 any surrounding namespace or type declarations. If the DIE has an
8240 AT_specification attribute, it also includes attributes and children
8241 attached to the specification, and returns a pointer to the original
8242 parent of the declaration DIE. Returns NULL otherwise. */
8243
8244 static dw_die_ref
8245 copy_declaration_context (dw_die_ref unit, dw_die_ref die)
8246 {
8247 dw_die_ref decl;
8248 dw_die_ref new_decl;
8249 dw_die_ref orig_parent = NULL;
8250
8251 decl = get_AT_ref (die, DW_AT_specification);
8252 if (decl == NULL)
8253 decl = die;
8254 else
8255 {
8256 unsigned ix;
8257 dw_die_ref c;
8258 dw_attr_node *a;
8259
8260 /* The original DIE will be changed to a declaration, and must
8261 be moved to be a child of the original declaration DIE. */
8262 orig_parent = decl->die_parent;
8263
8264 /* Copy the type node pointer from the new DIE to the original
8265 declaration DIE so we can forward references later. */
8266 decl->comdat_type_p = true;
8267 decl->die_id.die_type_node = die->die_id.die_type_node;
8268
8269 remove_AT (die, DW_AT_specification);
8270
8271 FOR_EACH_VEC_SAFE_ELT (decl->die_attr, ix, a)
8272 {
8273 if (a->dw_attr != DW_AT_name
8274 && a->dw_attr != DW_AT_declaration
8275 && a->dw_attr != DW_AT_external)
8276 add_dwarf_attr (die, a);
8277 }
8278
8279 FOR_EACH_CHILD (decl, c, add_child_die (die, clone_tree (c)));
8280 }
8281
8282 if (decl->die_parent != NULL
8283 && !is_unit_die (decl->die_parent))
8284 {
8285 new_decl = copy_ancestor_tree (unit, decl, NULL);
8286 if (new_decl != NULL)
8287 {
8288 remove_AT (new_decl, DW_AT_signature);
8289 add_AT_specification (die, new_decl);
8290 }
8291 }
8292
8293 return orig_parent;
8294 }
8295
8296 /* Generate the skeleton ancestor tree for the given NODE, then clone
8297 the DIE and add the clone into the tree. */
8298
8299 static void
8300 generate_skeleton_ancestor_tree (skeleton_chain_node *node)
8301 {
8302 if (node->new_die != NULL)
8303 return;
8304
8305 node->new_die = clone_as_declaration (node->old_die);
8306
8307 if (node->parent != NULL)
8308 {
8309 generate_skeleton_ancestor_tree (node->parent);
8310 add_child_die (node->parent->new_die, node->new_die);
8311 }
8312 }
8313
8314 /* Generate a skeleton tree of DIEs containing any declarations that are
8315 found in the original tree. We traverse the tree looking for declaration
8316 DIEs, and construct the skeleton from the bottom up whenever we find one. */
8317
8318 static void
8319 generate_skeleton_bottom_up (skeleton_chain_node *parent)
8320 {
8321 skeleton_chain_node node;
8322 dw_die_ref c;
8323 dw_die_ref first;
8324 dw_die_ref prev = NULL;
8325 dw_die_ref next = NULL;
8326
8327 node.parent = parent;
8328
8329 first = c = parent->old_die->die_child;
8330 if (c)
8331 next = c->die_sib;
8332 if (c) do {
8333 if (prev == NULL || prev->die_sib == c)
8334 prev = c;
8335 c = next;
8336 next = (c == first ? NULL : c->die_sib);
8337 node.old_die = c;
8338 node.new_die = NULL;
8339 if (is_declaration_die (c))
8340 {
8341 if (is_template_instantiation (c))
8342 {
8343 /* Instantiated templates do not need to be cloned into the
8344 type unit. Just move the DIE and its children back to
8345 the skeleton tree (in the main CU). */
8346 remove_child_with_prev (c, prev);
8347 add_child_die (parent->new_die, c);
8348 c = prev;
8349 }
8350 else if (c->comdat_type_p)
8351 {
8352 /* This is the skeleton of earlier break_out_comdat_types
8353 type. Clone the existing DIE, but keep the children
8354 under the original (which is in the main CU). */
8355 dw_die_ref clone = clone_die (c);
8356
8357 replace_child (c, clone, prev);
8358 generate_skeleton_ancestor_tree (parent);
8359 add_child_die (parent->new_die, c);
8360 c = clone;
8361 continue;
8362 }
8363 else
8364 {
8365 /* Clone the existing DIE, move the original to the skeleton
8366 tree (which is in the main CU), and put the clone, with
8367 all the original's children, where the original came from
8368 (which is about to be moved to the type unit). */
8369 dw_die_ref clone = clone_die (c);
8370 move_all_children (c, clone);
8371
8372 /* If the original has a DW_AT_object_pointer attribute,
8373 it would now point to a child DIE just moved to the
8374 cloned tree, so we need to remove that attribute from
8375 the original. */
8376 remove_AT (c, DW_AT_object_pointer);
8377
8378 replace_child (c, clone, prev);
8379 generate_skeleton_ancestor_tree (parent);
8380 add_child_die (parent->new_die, c);
8381 node.old_die = clone;
8382 node.new_die = c;
8383 c = clone;
8384 }
8385 }
8386 generate_skeleton_bottom_up (&node);
8387 } while (next != NULL);
8388 }
8389
8390 /* Wrapper function for generate_skeleton_bottom_up. */
8391
8392 static dw_die_ref
8393 generate_skeleton (dw_die_ref die)
8394 {
8395 skeleton_chain_node node;
8396
8397 node.old_die = die;
8398 node.new_die = NULL;
8399 node.parent = NULL;
8400
8401 /* If this type definition is nested inside another type,
8402 and is not an instantiation of a template, always leave
8403 at least a declaration in its place. */
8404 if (die->die_parent != NULL
8405 && is_type_die (die->die_parent)
8406 && !is_template_instantiation (die))
8407 node.new_die = clone_as_declaration (die);
8408
8409 generate_skeleton_bottom_up (&node);
8410 return node.new_die;
8411 }
8412
8413 /* Remove the CHILD DIE from its parent, possibly replacing it with a cloned
8414 declaration. The original DIE is moved to a new compile unit so that
8415 existing references to it follow it to the new location. If any of the
8416 original DIE's descendants is a declaration, we need to replace the
8417 original DIE with a skeleton tree and move the declarations back into the
8418 skeleton tree. */
8419
8420 static dw_die_ref
8421 remove_child_or_replace_with_skeleton (dw_die_ref unit, dw_die_ref child,
8422 dw_die_ref prev)
8423 {
8424 dw_die_ref skeleton, orig_parent;
8425
8426 /* Copy the declaration context to the type unit DIE. If the returned
8427 ORIG_PARENT is not NULL, the skeleton needs to be added as a child of
8428 that DIE. */
8429 orig_parent = copy_declaration_context (unit, child);
8430
8431 skeleton = generate_skeleton (child);
8432 if (skeleton == NULL)
8433 remove_child_with_prev (child, prev);
8434 else
8435 {
8436 skeleton->comdat_type_p = true;
8437 skeleton->die_id.die_type_node = child->die_id.die_type_node;
8438
8439 /* If the original DIE was a specification, we need to put
8440 the skeleton under the parent DIE of the declaration.
8441 This leaves the original declaration in the tree, but
8442 it will be pruned later since there are no longer any
8443 references to it. */
8444 if (orig_parent != NULL)
8445 {
8446 remove_child_with_prev (child, prev);
8447 add_child_die (orig_parent, skeleton);
8448 }
8449 else
8450 replace_child (child, skeleton, prev);
8451 }
8452
8453 return skeleton;
8454 }
8455
8456 static void
8457 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8458 comdat_type_node *type_node,
8459 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs);
8460
8461 /* Helper for copy_dwarf_procs_ref_in_dies. Make a copy of the DIE DWARF
8462 procedure, put it under TYPE_NODE and return the copy. Continue looking for
8463 DWARF procedure references in the DW_AT_location attribute. */
8464
8465 static dw_die_ref
8466 copy_dwarf_procedure (dw_die_ref die,
8467 comdat_type_node *type_node,
8468 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8469 {
8470 gcc_assert (die->die_tag == DW_TAG_dwarf_procedure);
8471
8472 /* DWARF procedures are not supposed to have children... */
8473 gcc_assert (die->die_child == NULL);
8474
8475 /* ... and they are supposed to have only one attribute: DW_AT_location. */
8476 gcc_assert (vec_safe_length (die->die_attr) == 1
8477 && ((*die->die_attr)[0].dw_attr == DW_AT_location));
8478
8479 /* Do not copy more than once DWARF procedures. */
8480 bool existed;
8481 dw_die_ref &die_copy = copied_dwarf_procs.get_or_insert (die, &existed);
8482 if (existed)
8483 return die_copy;
8484
8485 die_copy = clone_die (die);
8486 add_child_die (type_node->root_die, die_copy);
8487 copy_dwarf_procs_ref_in_attrs (die_copy, type_node, copied_dwarf_procs);
8488 return die_copy;
8489 }
8490
8491 /* Helper for copy_dwarf_procs_ref_in_dies. Look for references to DWARF
8492 procedures in DIE's attributes. */
8493
8494 static void
8495 copy_dwarf_procs_ref_in_attrs (dw_die_ref die,
8496 comdat_type_node *type_node,
8497 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8498 {
8499 dw_attr_node *a;
8500 unsigned i;
8501
8502 FOR_EACH_VEC_SAFE_ELT (die->die_attr, i, a)
8503 {
8504 dw_loc_descr_ref loc;
8505
8506 if (a->dw_attr_val.val_class != dw_val_class_loc)
8507 continue;
8508
8509 for (loc = a->dw_attr_val.v.val_loc; loc != NULL; loc = loc->dw_loc_next)
8510 {
8511 switch (loc->dw_loc_opc)
8512 {
8513 case DW_OP_call2:
8514 case DW_OP_call4:
8515 case DW_OP_call_ref:
8516 gcc_assert (loc->dw_loc_oprnd1.val_class
8517 == dw_val_class_die_ref);
8518 loc->dw_loc_oprnd1.v.val_die_ref.die
8519 = copy_dwarf_procedure (loc->dw_loc_oprnd1.v.val_die_ref.die,
8520 type_node,
8521 copied_dwarf_procs);
8522
8523 default:
8524 break;
8525 }
8526 }
8527 }
8528 }
8529
8530 /* Copy DWARF procedures that are referenced by the DIE tree to TREE_NODE and
8531 rewrite references to point to the copies.
8532
8533 References are looked for in DIE's attributes and recursively in all its
8534 children attributes that are location descriptions. COPIED_DWARF_PROCS is a
8535 mapping from old DWARF procedures to their copy. It is used not to copy
8536 twice the same DWARF procedure under TYPE_NODE. */
8537
8538 static void
8539 copy_dwarf_procs_ref_in_dies (dw_die_ref die,
8540 comdat_type_node *type_node,
8541 hash_map<dw_die_ref, dw_die_ref> &copied_dwarf_procs)
8542 {
8543 dw_die_ref c;
8544
8545 copy_dwarf_procs_ref_in_attrs (die, type_node, copied_dwarf_procs);
8546 FOR_EACH_CHILD (die, c, copy_dwarf_procs_ref_in_dies (c,
8547 type_node,
8548 copied_dwarf_procs));
8549 }
8550
8551 /* Traverse the DIE and set up additional .debug_types or .debug_info
8552 DW_UT_*type sections for each type worthy of being placed in a COMDAT
8553 section. */
8554
8555 static void
8556 break_out_comdat_types (dw_die_ref die)
8557 {
8558 dw_die_ref c;
8559 dw_die_ref first;
8560 dw_die_ref prev = NULL;
8561 dw_die_ref next = NULL;
8562 dw_die_ref unit = NULL;
8563
8564 first = c = die->die_child;
8565 if (c)
8566 next = c->die_sib;
8567 if (c) do {
8568 if (prev == NULL || prev->die_sib == c)
8569 prev = c;
8570 c = next;
8571 next = (c == first ? NULL : c->die_sib);
8572 if (should_move_die_to_comdat (c))
8573 {
8574 dw_die_ref replacement;
8575 comdat_type_node *type_node;
8576
8577 /* Break out nested types into their own type units. */
8578 break_out_comdat_types (c);
8579
8580 /* Create a new type unit DIE as the root for the new tree. */
8581 unit = new_die (DW_TAG_type_unit, NULL, NULL);
8582 add_AT_unsigned (unit, DW_AT_language,
8583 get_AT_unsigned (comp_unit_die (), DW_AT_language));
8584
8585 /* Add the new unit's type DIE into the comdat type list. */
8586 type_node = ggc_cleared_alloc<comdat_type_node> ();
8587 type_node->root_die = unit;
8588 type_node->next = comdat_type_list;
8589 comdat_type_list = type_node;
8590
8591 /* Generate the type signature. */
8592 generate_type_signature (c, type_node);
8593
8594 /* Copy the declaration context, attributes, and children of the
8595 declaration into the new type unit DIE, then remove this DIE
8596 from the main CU (or replace it with a skeleton if necessary). */
8597 replacement = remove_child_or_replace_with_skeleton (unit, c, prev);
8598 type_node->skeleton_die = replacement;
8599
8600 /* Add the DIE to the new compunit. */
8601 add_child_die (unit, c);
8602
8603 /* Types can reference DWARF procedures for type size or data location
8604 expressions. Calls in DWARF expressions cannot target procedures
8605 that are not in the same section. So we must copy DWARF procedures
8606 along with this type and then rewrite references to them. */
8607 hash_map<dw_die_ref, dw_die_ref> copied_dwarf_procs;
8608 copy_dwarf_procs_ref_in_dies (c, type_node, copied_dwarf_procs);
8609
8610 if (replacement != NULL)
8611 c = replacement;
8612 }
8613 else if (c->die_tag == DW_TAG_namespace
8614 || c->die_tag == DW_TAG_class_type
8615 || c->die_tag == DW_TAG_structure_type
8616 || c->die_tag == DW_TAG_union_type)
8617 {
8618 /* Look for nested types that can be broken out. */
8619 break_out_comdat_types (c);
8620 }
8621 } while (next != NULL);
8622 }
8623
8624 /* Like clone_tree, but copy DW_TAG_subprogram DIEs as declarations.
8625 Enter all the cloned children into the hash table decl_table. */
8626
8627 static dw_die_ref
8628 clone_tree_partial (dw_die_ref die, decl_hash_type *decl_table)
8629 {
8630 dw_die_ref c;
8631 dw_die_ref clone;
8632 struct decl_table_entry *entry;
8633 decl_table_entry **slot;
8634
8635 if (die->die_tag == DW_TAG_subprogram)
8636 clone = clone_as_declaration (die);
8637 else
8638 clone = clone_die (die);
8639
8640 slot = decl_table->find_slot_with_hash (die,
8641 htab_hash_pointer (die), INSERT);
8642
8643 /* Assert that DIE isn't in the hash table yet. If it would be there
8644 before, the ancestors would be necessarily there as well, therefore
8645 clone_tree_partial wouldn't be called. */
8646 gcc_assert (*slot == HTAB_EMPTY_ENTRY);
8647
8648 entry = XCNEW (struct decl_table_entry);
8649 entry->orig = die;
8650 entry->copy = clone;
8651 *slot = entry;
8652
8653 if (die->die_tag != DW_TAG_subprogram)
8654 FOR_EACH_CHILD (die, c,
8655 add_child_die (clone, clone_tree_partial (c, decl_table)));
8656
8657 return clone;
8658 }
8659
8660 /* Walk the DIE and its children, looking for references to incomplete
8661 or trivial types that are unmarked (i.e., that are not in the current
8662 type_unit). */
8663
8664 static void
8665 copy_decls_walk (dw_die_ref unit, dw_die_ref die, decl_hash_type *decl_table)
8666 {
8667 dw_die_ref c;
8668 dw_attr_node *a;
8669 unsigned ix;
8670
8671 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8672 {
8673 if (AT_class (a) == dw_val_class_die_ref)
8674 {
8675 dw_die_ref targ = AT_ref (a);
8676 decl_table_entry **slot;
8677 struct decl_table_entry *entry;
8678
8679 if (targ->die_mark != 0 || targ->comdat_type_p)
8680 continue;
8681
8682 slot = decl_table->find_slot_with_hash (targ,
8683 htab_hash_pointer (targ),
8684 INSERT);
8685
8686 if (*slot != HTAB_EMPTY_ENTRY)
8687 {
8688 /* TARG has already been copied, so we just need to
8689 modify the reference to point to the copy. */
8690 entry = *slot;
8691 a->dw_attr_val.v.val_die_ref.die = entry->copy;
8692 }
8693 else
8694 {
8695 dw_die_ref parent = unit;
8696 dw_die_ref copy = clone_die (targ);
8697
8698 /* Record in DECL_TABLE that TARG has been copied.
8699 Need to do this now, before the recursive call,
8700 because DECL_TABLE may be expanded and SLOT
8701 would no longer be a valid pointer. */
8702 entry = XCNEW (struct decl_table_entry);
8703 entry->orig = targ;
8704 entry->copy = copy;
8705 *slot = entry;
8706
8707 /* If TARG is not a declaration DIE, we need to copy its
8708 children. */
8709 if (!is_declaration_die (targ))
8710 {
8711 FOR_EACH_CHILD (
8712 targ, c,
8713 add_child_die (copy,
8714 clone_tree_partial (c, decl_table)));
8715 }
8716
8717 /* Make sure the cloned tree is marked as part of the
8718 type unit. */
8719 mark_dies (copy);
8720
8721 /* If TARG has surrounding context, copy its ancestor tree
8722 into the new type unit. */
8723 if (targ->die_parent != NULL
8724 && !is_unit_die (targ->die_parent))
8725 parent = copy_ancestor_tree (unit, targ->die_parent,
8726 decl_table);
8727
8728 add_child_die (parent, copy);
8729 a->dw_attr_val.v.val_die_ref.die = copy;
8730
8731 /* Make sure the newly-copied DIE is walked. If it was
8732 installed in a previously-added context, it won't
8733 get visited otherwise. */
8734 if (parent != unit)
8735 {
8736 /* Find the highest point of the newly-added tree,
8737 mark each node along the way, and walk from there. */
8738 parent->die_mark = 1;
8739 while (parent->die_parent
8740 && parent->die_parent->die_mark == 0)
8741 {
8742 parent = parent->die_parent;
8743 parent->die_mark = 1;
8744 }
8745 copy_decls_walk (unit, parent, decl_table);
8746 }
8747 }
8748 }
8749 }
8750
8751 FOR_EACH_CHILD (die, c, copy_decls_walk (unit, c, decl_table));
8752 }
8753
8754 /* Collect skeleton dies in DIE created by break_out_comdat_types already
8755 and record them in DECL_TABLE. */
8756
8757 static void
8758 collect_skeleton_dies (dw_die_ref die, decl_hash_type *decl_table)
8759 {
8760 dw_die_ref c;
8761
8762 if (dw_attr_node *a = get_AT (die, DW_AT_signature))
8763 {
8764 dw_die_ref targ = AT_ref (a);
8765 gcc_assert (targ->die_mark == 0 && targ->comdat_type_p);
8766 decl_table_entry **slot
8767 = decl_table->find_slot_with_hash (targ,
8768 htab_hash_pointer (targ),
8769 INSERT);
8770 gcc_assert (*slot == HTAB_EMPTY_ENTRY);
8771 /* Record in DECL_TABLE that TARG has been already copied
8772 by remove_child_or_replace_with_skeleton. */
8773 decl_table_entry *entry = XCNEW (struct decl_table_entry);
8774 entry->orig = targ;
8775 entry->copy = die;
8776 *slot = entry;
8777 }
8778 FOR_EACH_CHILD (die, c, collect_skeleton_dies (c, decl_table));
8779 }
8780
8781 /* Copy declarations for "unworthy" types into the new comdat section.
8782 Incomplete types, modified types, and certain other types aren't broken
8783 out into comdat sections of their own, so they don't have a signature,
8784 and we need to copy the declaration into the same section so that we
8785 don't have an external reference. */
8786
8787 static void
8788 copy_decls_for_unworthy_types (dw_die_ref unit)
8789 {
8790 mark_dies (unit);
8791 decl_hash_type decl_table (10);
8792 collect_skeleton_dies (unit, &decl_table);
8793 copy_decls_walk (unit, unit, &decl_table);
8794 unmark_dies (unit);
8795 }
8796
8797 /* Traverse the DIE and add a sibling attribute if it may have the
8798 effect of speeding up access to siblings. To save some space,
8799 avoid generating sibling attributes for DIE's without children. */
8800
8801 static void
8802 add_sibling_attributes (dw_die_ref die)
8803 {
8804 dw_die_ref c;
8805
8806 if (! die->die_child)
8807 return;
8808
8809 if (die->die_parent && die != die->die_parent->die_child)
8810 add_AT_die_ref (die, DW_AT_sibling, die->die_sib);
8811
8812 FOR_EACH_CHILD (die, c, add_sibling_attributes (c));
8813 }
8814
8815 /* Output all location lists for the DIE and its children. */
8816
8817 static void
8818 output_location_lists (dw_die_ref die)
8819 {
8820 dw_die_ref c;
8821 dw_attr_node *a;
8822 unsigned ix;
8823
8824 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8825 if (AT_class (a) == dw_val_class_loc_list)
8826 output_loc_list (AT_loc_list (a));
8827
8828 FOR_EACH_CHILD (die, c, output_location_lists (c));
8829 }
8830
8831 /* During assign_location_list_indexes and output_loclists_offset the
8832 current index, after it the number of assigned indexes (i.e. how
8833 large the .debug_loclists* offset table should be). */
8834 static unsigned int loc_list_idx;
8835
8836 /* Output all location list offsets for the DIE and its children. */
8837
8838 static void
8839 output_loclists_offsets (dw_die_ref die)
8840 {
8841 dw_die_ref c;
8842 dw_attr_node *a;
8843 unsigned ix;
8844
8845 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8846 if (AT_class (a) == dw_val_class_loc_list)
8847 {
8848 dw_loc_list_ref l = AT_loc_list (a);
8849 if (l->offset_emitted)
8850 continue;
8851 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l->ll_symbol,
8852 loc_section_label, NULL);
8853 gcc_assert (l->hash == loc_list_idx);
8854 loc_list_idx++;
8855 l->offset_emitted = true;
8856 }
8857
8858 FOR_EACH_CHILD (die, c, output_loclists_offsets (c));
8859 }
8860
8861 /* Recursively set indexes of location lists. */
8862
8863 static void
8864 assign_location_list_indexes (dw_die_ref die)
8865 {
8866 dw_die_ref c;
8867 dw_attr_node *a;
8868 unsigned ix;
8869
8870 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8871 if (AT_class (a) == dw_val_class_loc_list)
8872 {
8873 dw_loc_list_ref list = AT_loc_list (a);
8874 if (!list->num_assigned)
8875 {
8876 list->num_assigned = true;
8877 list->hash = loc_list_idx++;
8878 }
8879 }
8880
8881 FOR_EACH_CHILD (die, c, assign_location_list_indexes (c));
8882 }
8883
8884 /* We want to limit the number of external references, because they are
8885 larger than local references: a relocation takes multiple words, and
8886 even a sig8 reference is always eight bytes, whereas a local reference
8887 can be as small as one byte (though DW_FORM_ref is usually 4 in GCC).
8888 So if we encounter multiple external references to the same type DIE, we
8889 make a local typedef stub for it and redirect all references there.
8890
8891 This is the element of the hash table for keeping track of these
8892 references. */
8893
8894 struct external_ref
8895 {
8896 dw_die_ref type;
8897 dw_die_ref stub;
8898 unsigned n_refs;
8899 };
8900
8901 /* Hashtable helpers. */
8902
8903 struct external_ref_hasher : free_ptr_hash <external_ref>
8904 {
8905 static inline hashval_t hash (const external_ref *);
8906 static inline bool equal (const external_ref *, const external_ref *);
8907 };
8908
8909 inline hashval_t
8910 external_ref_hasher::hash (const external_ref *r)
8911 {
8912 dw_die_ref die = r->type;
8913 hashval_t h = 0;
8914
8915 /* We can't use the address of the DIE for hashing, because
8916 that will make the order of the stub DIEs non-deterministic. */
8917 if (! die->comdat_type_p)
8918 /* We have a symbol; use it to compute a hash. */
8919 h = htab_hash_string (die->die_id.die_symbol);
8920 else
8921 {
8922 /* We have a type signature; use a subset of the bits as the hash.
8923 The 8-byte signature is at least as large as hashval_t. */
8924 comdat_type_node *type_node = die->die_id.die_type_node;
8925 memcpy (&h, type_node->signature, sizeof (h));
8926 }
8927 return h;
8928 }
8929
8930 inline bool
8931 external_ref_hasher::equal (const external_ref *r1, const external_ref *r2)
8932 {
8933 return r1->type == r2->type;
8934 }
8935
8936 typedef hash_table<external_ref_hasher> external_ref_hash_type;
8937
8938 /* Return a pointer to the external_ref for references to DIE. */
8939
8940 static struct external_ref *
8941 lookup_external_ref (external_ref_hash_type *map, dw_die_ref die)
8942 {
8943 struct external_ref ref, *ref_p;
8944 external_ref **slot;
8945
8946 ref.type = die;
8947 slot = map->find_slot (&ref, INSERT);
8948 if (*slot != HTAB_EMPTY_ENTRY)
8949 return *slot;
8950
8951 ref_p = XCNEW (struct external_ref);
8952 ref_p->type = die;
8953 *slot = ref_p;
8954 return ref_p;
8955 }
8956
8957 /* Subroutine of optimize_external_refs, below.
8958
8959 If we see a type skeleton, record it as our stub. If we see external
8960 references, remember how many we've seen. */
8961
8962 static void
8963 optimize_external_refs_1 (dw_die_ref die, external_ref_hash_type *map)
8964 {
8965 dw_die_ref c;
8966 dw_attr_node *a;
8967 unsigned ix;
8968 struct external_ref *ref_p;
8969
8970 if (is_type_die (die)
8971 && (c = get_AT_ref (die, DW_AT_signature)))
8972 {
8973 /* This is a local skeleton; use it for local references. */
8974 ref_p = lookup_external_ref (map, c);
8975 ref_p->stub = die;
8976 }
8977
8978 /* Scan the DIE references, and remember any that refer to DIEs from
8979 other CUs (i.e. those which are not marked). */
8980 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
8981 if (AT_class (a) == dw_val_class_die_ref
8982 && (c = AT_ref (a))->die_mark == 0
8983 && is_type_die (c))
8984 {
8985 ref_p = lookup_external_ref (map, c);
8986 ref_p->n_refs++;
8987 }
8988
8989 FOR_EACH_CHILD (die, c, optimize_external_refs_1 (c, map));
8990 }
8991
8992 /* htab_traverse callback function for optimize_external_refs, below. SLOT
8993 points to an external_ref, DATA is the CU we're processing. If we don't
8994 already have a local stub, and we have multiple refs, build a stub. */
8995
8996 int
8997 dwarf2_build_local_stub (external_ref **slot, dw_die_ref data)
8998 {
8999 struct external_ref *ref_p = *slot;
9000
9001 if (ref_p->stub == NULL && ref_p->n_refs > 1 && !dwarf_strict)
9002 {
9003 /* We have multiple references to this type, so build a small stub.
9004 Both of these forms are a bit dodgy from the perspective of the
9005 DWARF standard, since technically they should have names. */
9006 dw_die_ref cu = data;
9007 dw_die_ref type = ref_p->type;
9008 dw_die_ref stub = NULL;
9009
9010 if (type->comdat_type_p)
9011 {
9012 /* If we refer to this type via sig8, use AT_signature. */
9013 stub = new_die (type->die_tag, cu, NULL_TREE);
9014 add_AT_die_ref (stub, DW_AT_signature, type);
9015 }
9016 else
9017 {
9018 /* Otherwise, use a typedef with no name. */
9019 stub = new_die (DW_TAG_typedef, cu, NULL_TREE);
9020 add_AT_die_ref (stub, DW_AT_type, type);
9021 }
9022
9023 stub->die_mark++;
9024 ref_p->stub = stub;
9025 }
9026 return 1;
9027 }
9028
9029 /* DIE is a unit; look through all the DIE references to see if there are
9030 any external references to types, and if so, create local stubs for
9031 them which will be applied in build_abbrev_table. This is useful because
9032 references to local DIEs are smaller. */
9033
9034 static external_ref_hash_type *
9035 optimize_external_refs (dw_die_ref die)
9036 {
9037 external_ref_hash_type *map = new external_ref_hash_type (10);
9038 optimize_external_refs_1 (die, map);
9039 map->traverse <dw_die_ref, dwarf2_build_local_stub> (die);
9040 return map;
9041 }
9042
9043 /* The following 3 variables are temporaries that are computed only during the
9044 build_abbrev_table call and used and released during the following
9045 optimize_abbrev_table call. */
9046
9047 /* First abbrev_id that can be optimized based on usage. */
9048 static unsigned int abbrev_opt_start;
9049
9050 /* Maximum abbrev_id of a base type plus one (we can't optimize DIEs with
9051 abbrev_id smaller than this, because they must be already sized
9052 during build_abbrev_table). */
9053 static unsigned int abbrev_opt_base_type_end;
9054
9055 /* Vector of usage counts during build_abbrev_table. Indexed by
9056 abbrev_id - abbrev_opt_start. */
9057 static vec<unsigned int> abbrev_usage_count;
9058
9059 /* Vector of all DIEs added with die_abbrev >= abbrev_opt_start. */
9060 static vec<dw_die_ref> sorted_abbrev_dies;
9061
9062 /* The format of each DIE (and its attribute value pairs) is encoded in an
9063 abbreviation table. This routine builds the abbreviation table and assigns
9064 a unique abbreviation id for each abbreviation entry. The children of each
9065 die are visited recursively. */
9066
9067 static void
9068 build_abbrev_table (dw_die_ref die, external_ref_hash_type *extern_map)
9069 {
9070 unsigned int abbrev_id = 0;
9071 dw_die_ref c;
9072 dw_attr_node *a;
9073 unsigned ix;
9074 dw_die_ref abbrev;
9075
9076 /* Scan the DIE references, and replace any that refer to
9077 DIEs from other CUs (i.e. those which are not marked) with
9078 the local stubs we built in optimize_external_refs. */
9079 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9080 if (AT_class (a) == dw_val_class_die_ref
9081 && (c = AT_ref (a))->die_mark == 0)
9082 {
9083 struct external_ref *ref_p;
9084 gcc_assert (AT_ref (a)->comdat_type_p || AT_ref (a)->die_id.die_symbol);
9085
9086 if (is_type_die (c)
9087 && (ref_p = lookup_external_ref (extern_map, c))
9088 && ref_p->stub && ref_p->stub != die)
9089 {
9090 gcc_assert (a->dw_attr != DW_AT_signature);
9091 change_AT_die_ref (a, ref_p->stub);
9092 }
9093 else
9094 /* We aren't changing this reference, so mark it external. */
9095 set_AT_ref_external (a, 1);
9096 }
9097
9098 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
9099 {
9100 dw_attr_node *die_a, *abbrev_a;
9101 unsigned ix;
9102 bool ok = true;
9103
9104 if (abbrev_id == 0)
9105 continue;
9106 if (abbrev->die_tag != die->die_tag)
9107 continue;
9108 if ((abbrev->die_child != NULL) != (die->die_child != NULL))
9109 continue;
9110
9111 if (vec_safe_length (abbrev->die_attr) != vec_safe_length (die->die_attr))
9112 continue;
9113
9114 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, die_a)
9115 {
9116 abbrev_a = &(*abbrev->die_attr)[ix];
9117 if ((abbrev_a->dw_attr != die_a->dw_attr)
9118 || (value_format (abbrev_a) != value_format (die_a)))
9119 {
9120 ok = false;
9121 break;
9122 }
9123 }
9124 if (ok)
9125 break;
9126 }
9127
9128 if (abbrev_id >= vec_safe_length (abbrev_die_table))
9129 {
9130 vec_safe_push (abbrev_die_table, die);
9131 if (abbrev_opt_start)
9132 abbrev_usage_count.safe_push (0);
9133 }
9134 if (abbrev_opt_start && abbrev_id >= abbrev_opt_start)
9135 {
9136 abbrev_usage_count[abbrev_id - abbrev_opt_start]++;
9137 sorted_abbrev_dies.safe_push (die);
9138 }
9139
9140 die->die_abbrev = abbrev_id;
9141 FOR_EACH_CHILD (die, c, build_abbrev_table (c, extern_map));
9142 }
9143
9144 /* Callback function for sorted_abbrev_dies vector sorting. We sort
9145 by die_abbrev's usage count, from the most commonly used
9146 abbreviation to the least. */
9147
9148 static int
9149 die_abbrev_cmp (const void *p1, const void *p2)
9150 {
9151 dw_die_ref die1 = *(const dw_die_ref *) p1;
9152 dw_die_ref die2 = *(const dw_die_ref *) p2;
9153
9154 gcc_checking_assert (die1->die_abbrev >= abbrev_opt_start);
9155 gcc_checking_assert (die2->die_abbrev >= abbrev_opt_start);
9156
9157 if (die1->die_abbrev >= abbrev_opt_base_type_end
9158 && die2->die_abbrev >= abbrev_opt_base_type_end)
9159 {
9160 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9161 > abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9162 return -1;
9163 if (abbrev_usage_count[die1->die_abbrev - abbrev_opt_start]
9164 < abbrev_usage_count[die2->die_abbrev - abbrev_opt_start])
9165 return 1;
9166 }
9167
9168 /* Stabilize the sort. */
9169 if (die1->die_abbrev < die2->die_abbrev)
9170 return -1;
9171 if (die1->die_abbrev > die2->die_abbrev)
9172 return 1;
9173
9174 return 0;
9175 }
9176
9177 /* Convert dw_val_class_const and dw_val_class_unsigned_const class attributes
9178 of DIEs in between sorted_abbrev_dies[first_id] and abbrev_dies[end_id - 1]
9179 into dw_val_class_const_implicit or
9180 dw_val_class_unsigned_const_implicit. */
9181
9182 static void
9183 optimize_implicit_const (unsigned int first_id, unsigned int end,
9184 vec<bool> &implicit_consts)
9185 {
9186 /* It never makes sense if there is just one DIE using the abbreviation. */
9187 if (end < first_id + 2)
9188 return;
9189
9190 dw_attr_node *a;
9191 unsigned ix, i;
9192 dw_die_ref die = sorted_abbrev_dies[first_id];
9193 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9194 if (implicit_consts[ix])
9195 {
9196 enum dw_val_class new_class = dw_val_class_none;
9197 switch (AT_class (a))
9198 {
9199 case dw_val_class_unsigned_const:
9200 if ((HOST_WIDE_INT) AT_unsigned (a) < 0)
9201 continue;
9202
9203 /* The .debug_abbrev section will grow by
9204 size_of_sleb128 (AT_unsigned (a)) and we avoid the constants
9205 in all the DIEs using that abbreviation. */
9206 if (constant_size (AT_unsigned (a)) * (end - first_id)
9207 <= (unsigned) size_of_sleb128 (AT_unsigned (a)))
9208 continue;
9209
9210 new_class = dw_val_class_unsigned_const_implicit;
9211 break;
9212
9213 case dw_val_class_const:
9214 new_class = dw_val_class_const_implicit;
9215 break;
9216
9217 case dw_val_class_file:
9218 new_class = dw_val_class_file_implicit;
9219 break;
9220
9221 default:
9222 continue;
9223 }
9224 for (i = first_id; i < end; i++)
9225 (*sorted_abbrev_dies[i]->die_attr)[ix].dw_attr_val.val_class
9226 = new_class;
9227 }
9228 }
9229
9230 /* Attempt to optimize abbreviation table from abbrev_opt_start
9231 abbreviation above. */
9232
9233 static void
9234 optimize_abbrev_table (void)
9235 {
9236 if (abbrev_opt_start
9237 && vec_safe_length (abbrev_die_table) > abbrev_opt_start
9238 && (dwarf_version >= 5 || vec_safe_length (abbrev_die_table) > 127))
9239 {
9240 auto_vec<bool, 32> implicit_consts;
9241 sorted_abbrev_dies.qsort (die_abbrev_cmp);
9242
9243 unsigned int abbrev_id = abbrev_opt_start - 1;
9244 unsigned int first_id = ~0U;
9245 unsigned int last_abbrev_id = 0;
9246 unsigned int i;
9247 dw_die_ref die;
9248 if (abbrev_opt_base_type_end > abbrev_opt_start)
9249 abbrev_id = abbrev_opt_base_type_end - 1;
9250 /* Reassign abbreviation ids from abbrev_opt_start above, so that
9251 most commonly used abbreviations come first. */
9252 FOR_EACH_VEC_ELT (sorted_abbrev_dies, i, die)
9253 {
9254 dw_attr_node *a;
9255 unsigned ix;
9256
9257 /* If calc_base_type_die_sizes has been called, the CU and
9258 base types after it can't be optimized, because we've already
9259 calculated their DIE offsets. We've sorted them first. */
9260 if (die->die_abbrev < abbrev_opt_base_type_end)
9261 continue;
9262 if (die->die_abbrev != last_abbrev_id)
9263 {
9264 last_abbrev_id = die->die_abbrev;
9265 if (dwarf_version >= 5 && first_id != ~0U)
9266 optimize_implicit_const (first_id, i, implicit_consts);
9267 abbrev_id++;
9268 (*abbrev_die_table)[abbrev_id] = die;
9269 if (dwarf_version >= 5)
9270 {
9271 first_id = i;
9272 implicit_consts.truncate (0);
9273
9274 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9275 switch (AT_class (a))
9276 {
9277 case dw_val_class_const:
9278 case dw_val_class_unsigned_const:
9279 case dw_val_class_file:
9280 implicit_consts.safe_push (true);
9281 break;
9282 default:
9283 implicit_consts.safe_push (false);
9284 break;
9285 }
9286 }
9287 }
9288 else if (dwarf_version >= 5)
9289 {
9290 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9291 if (!implicit_consts[ix])
9292 continue;
9293 else
9294 {
9295 dw_attr_node *other_a
9296 = &(*(*abbrev_die_table)[abbrev_id]->die_attr)[ix];
9297 if (!dw_val_equal_p (&a->dw_attr_val,
9298 &other_a->dw_attr_val))
9299 implicit_consts[ix] = false;
9300 }
9301 }
9302 die->die_abbrev = abbrev_id;
9303 }
9304 gcc_assert (abbrev_id == vec_safe_length (abbrev_die_table) - 1);
9305 if (dwarf_version >= 5 && first_id != ~0U)
9306 optimize_implicit_const (first_id, i, implicit_consts);
9307 }
9308
9309 abbrev_opt_start = 0;
9310 abbrev_opt_base_type_end = 0;
9311 abbrev_usage_count.release ();
9312 sorted_abbrev_dies.release ();
9313 }
9314 \f
9315 /* Return the power-of-two number of bytes necessary to represent VALUE. */
9316
9317 static int
9318 constant_size (unsigned HOST_WIDE_INT value)
9319 {
9320 int log;
9321
9322 if (value == 0)
9323 log = 0;
9324 else
9325 log = floor_log2 (value);
9326
9327 log = log / 8;
9328 log = 1 << (floor_log2 (log) + 1);
9329
9330 return log;
9331 }
9332
9333 /* Return the size of a DIE as it is represented in the
9334 .debug_info section. */
9335
9336 static unsigned long
9337 size_of_die (dw_die_ref die)
9338 {
9339 unsigned long size = 0;
9340 dw_attr_node *a;
9341 unsigned ix;
9342 enum dwarf_form form;
9343
9344 size += size_of_uleb128 (die->die_abbrev);
9345 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9346 {
9347 switch (AT_class (a))
9348 {
9349 case dw_val_class_addr:
9350 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9351 {
9352 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9353 size += size_of_uleb128 (AT_index (a));
9354 }
9355 else
9356 size += DWARF2_ADDR_SIZE;
9357 break;
9358 case dw_val_class_offset:
9359 size += DWARF_OFFSET_SIZE;
9360 break;
9361 case dw_val_class_loc:
9362 {
9363 unsigned long lsize = size_of_locs (AT_loc (a));
9364
9365 /* Block length. */
9366 if (dwarf_version >= 4)
9367 size += size_of_uleb128 (lsize);
9368 else
9369 size += constant_size (lsize);
9370 size += lsize;
9371 }
9372 break;
9373 case dw_val_class_loc_list:
9374 if (dwarf_split_debug_info && dwarf_version >= 5)
9375 {
9376 gcc_assert (AT_loc_list (a)->num_assigned);
9377 size += size_of_uleb128 (AT_loc_list (a)->hash);
9378 }
9379 else
9380 size += DWARF_OFFSET_SIZE;
9381 break;
9382 case dw_val_class_view_list:
9383 size += DWARF_OFFSET_SIZE;
9384 break;
9385 case dw_val_class_range_list:
9386 if (value_format (a) == DW_FORM_rnglistx)
9387 {
9388 gcc_assert (rnglist_idx);
9389 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
9390 size += size_of_uleb128 (r->idx);
9391 }
9392 else
9393 size += DWARF_OFFSET_SIZE;
9394 break;
9395 case dw_val_class_const:
9396 size += size_of_sleb128 (AT_int (a));
9397 break;
9398 case dw_val_class_unsigned_const:
9399 {
9400 int csize = constant_size (AT_unsigned (a));
9401 if (dwarf_version == 3
9402 && a->dw_attr == DW_AT_data_member_location
9403 && csize >= 4)
9404 size += size_of_uleb128 (AT_unsigned (a));
9405 else
9406 size += csize;
9407 }
9408 break;
9409 case dw_val_class_symview:
9410 if (symview_upper_bound <= 0xff)
9411 size += 1;
9412 else if (symview_upper_bound <= 0xffff)
9413 size += 2;
9414 else if (symview_upper_bound <= 0xffffffff)
9415 size += 4;
9416 else
9417 size += 8;
9418 break;
9419 case dw_val_class_const_implicit:
9420 case dw_val_class_unsigned_const_implicit:
9421 case dw_val_class_file_implicit:
9422 /* These occupy no size in the DIE, just an extra sleb128 in
9423 .debug_abbrev. */
9424 break;
9425 case dw_val_class_const_double:
9426 size += HOST_BITS_PER_DOUBLE_INT / HOST_BITS_PER_CHAR;
9427 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
9428 size++; /* block */
9429 break;
9430 case dw_val_class_wide_int:
9431 size += (get_full_len (*a->dw_attr_val.v.val_wide)
9432 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
9433 if (get_full_len (*a->dw_attr_val.v.val_wide)
9434 * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
9435 size++; /* block */
9436 break;
9437 case dw_val_class_vec:
9438 size += constant_size (a->dw_attr_val.v.val_vec.length
9439 * a->dw_attr_val.v.val_vec.elt_size)
9440 + a->dw_attr_val.v.val_vec.length
9441 * a->dw_attr_val.v.val_vec.elt_size; /* block */
9442 break;
9443 case dw_val_class_flag:
9444 if (dwarf_version >= 4)
9445 /* Currently all add_AT_flag calls pass in 1 as last argument,
9446 so DW_FORM_flag_present can be used. If that ever changes,
9447 we'll need to use DW_FORM_flag and have some optimization
9448 in build_abbrev_table that will change those to
9449 DW_FORM_flag_present if it is set to 1 in all DIEs using
9450 the same abbrev entry. */
9451 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9452 else
9453 size += 1;
9454 break;
9455 case dw_val_class_die_ref:
9456 if (AT_ref_external (a))
9457 {
9458 /* In DWARF4, we use DW_FORM_ref_sig8; for earlier versions
9459 we use DW_FORM_ref_addr. In DWARF2, DW_FORM_ref_addr
9460 is sized by target address length, whereas in DWARF3
9461 it's always sized as an offset. */
9462 if (AT_ref (a)->comdat_type_p)
9463 size += DWARF_TYPE_SIGNATURE_SIZE;
9464 else if (dwarf_version == 2)
9465 size += DWARF2_ADDR_SIZE;
9466 else
9467 size += DWARF_OFFSET_SIZE;
9468 }
9469 else
9470 size += DWARF_OFFSET_SIZE;
9471 break;
9472 case dw_val_class_fde_ref:
9473 size += DWARF_OFFSET_SIZE;
9474 break;
9475 case dw_val_class_lbl_id:
9476 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
9477 {
9478 gcc_assert (AT_index (a) != NO_INDEX_ASSIGNED);
9479 size += size_of_uleb128 (AT_index (a));
9480 }
9481 else
9482 size += DWARF2_ADDR_SIZE;
9483 break;
9484 case dw_val_class_lineptr:
9485 case dw_val_class_macptr:
9486 case dw_val_class_loclistsptr:
9487 size += DWARF_OFFSET_SIZE;
9488 break;
9489 case dw_val_class_str:
9490 form = AT_string_form (a);
9491 if (form == DW_FORM_strp || form == DW_FORM_line_strp)
9492 size += DWARF_OFFSET_SIZE;
9493 else if (form == dwarf_FORM (DW_FORM_strx))
9494 size += size_of_uleb128 (AT_index (a));
9495 else
9496 size += strlen (a->dw_attr_val.v.val_str->str) + 1;
9497 break;
9498 case dw_val_class_file:
9499 size += constant_size (maybe_emit_file (a->dw_attr_val.v.val_file));
9500 break;
9501 case dw_val_class_data8:
9502 size += 8;
9503 break;
9504 case dw_val_class_vms_delta:
9505 size += DWARF_OFFSET_SIZE;
9506 break;
9507 case dw_val_class_high_pc:
9508 size += DWARF2_ADDR_SIZE;
9509 break;
9510 case dw_val_class_discr_value:
9511 size += size_of_discr_value (&a->dw_attr_val.v.val_discr_value);
9512 break;
9513 case dw_val_class_discr_list:
9514 {
9515 unsigned block_size = size_of_discr_list (AT_discr_list (a));
9516
9517 /* This is a block, so we have the block length and then its
9518 data. */
9519 size += constant_size (block_size) + block_size;
9520 }
9521 break;
9522 default:
9523 gcc_unreachable ();
9524 }
9525 }
9526
9527 return size;
9528 }
9529
9530 /* Size the debugging information associated with a given DIE. Visits the
9531 DIE's children recursively. Updates the global variable next_die_offset, on
9532 each time through. Uses the current value of next_die_offset to update the
9533 die_offset field in each DIE. */
9534
9535 static void
9536 calc_die_sizes (dw_die_ref die)
9537 {
9538 dw_die_ref c;
9539
9540 gcc_assert (die->die_offset == 0
9541 || (unsigned long int) die->die_offset == next_die_offset);
9542 die->die_offset = next_die_offset;
9543 next_die_offset += size_of_die (die);
9544
9545 FOR_EACH_CHILD (die, c, calc_die_sizes (c));
9546
9547 if (die->die_child != NULL)
9548 /* Count the null byte used to terminate sibling lists. */
9549 next_die_offset += 1;
9550 }
9551
9552 /* Size just the base type children at the start of the CU.
9553 This is needed because build_abbrev needs to size locs
9554 and sizing of type based stack ops needs to know die_offset
9555 values for the base types. */
9556
9557 static void
9558 calc_base_type_die_sizes (void)
9559 {
9560 unsigned long die_offset = (dwarf_split_debug_info
9561 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
9562 : DWARF_COMPILE_UNIT_HEADER_SIZE);
9563 unsigned int i;
9564 dw_die_ref base_type;
9565 #if ENABLE_ASSERT_CHECKING
9566 dw_die_ref prev = comp_unit_die ()->die_child;
9567 #endif
9568
9569 die_offset += size_of_die (comp_unit_die ());
9570 for (i = 0; base_types.iterate (i, &base_type); i++)
9571 {
9572 #if ENABLE_ASSERT_CHECKING
9573 gcc_assert (base_type->die_offset == 0
9574 && prev->die_sib == base_type
9575 && base_type->die_child == NULL
9576 && base_type->die_abbrev);
9577 prev = base_type;
9578 #endif
9579 if (abbrev_opt_start
9580 && base_type->die_abbrev >= abbrev_opt_base_type_end)
9581 abbrev_opt_base_type_end = base_type->die_abbrev + 1;
9582 base_type->die_offset = die_offset;
9583 die_offset += size_of_die (base_type);
9584 }
9585 }
9586
9587 /* Set the marks for a die and its children. We do this so
9588 that we know whether or not a reference needs to use FORM_ref_addr; only
9589 DIEs in the same CU will be marked. We used to clear out the offset
9590 and use that as the flag, but ran into ordering problems. */
9591
9592 static void
9593 mark_dies (dw_die_ref die)
9594 {
9595 dw_die_ref c;
9596
9597 gcc_assert (!die->die_mark);
9598
9599 die->die_mark = 1;
9600 FOR_EACH_CHILD (die, c, mark_dies (c));
9601 }
9602
9603 /* Clear the marks for a die and its children. */
9604
9605 static void
9606 unmark_dies (dw_die_ref die)
9607 {
9608 dw_die_ref c;
9609
9610 if (! use_debug_types)
9611 gcc_assert (die->die_mark);
9612
9613 die->die_mark = 0;
9614 FOR_EACH_CHILD (die, c, unmark_dies (c));
9615 }
9616
9617 /* Clear the marks for a die, its children and referred dies. */
9618
9619 static void
9620 unmark_all_dies (dw_die_ref die)
9621 {
9622 dw_die_ref c;
9623 dw_attr_node *a;
9624 unsigned ix;
9625
9626 if (!die->die_mark)
9627 return;
9628 die->die_mark = 0;
9629
9630 FOR_EACH_CHILD (die, c, unmark_all_dies (c));
9631
9632 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
9633 if (AT_class (a) == dw_val_class_die_ref)
9634 unmark_all_dies (AT_ref (a));
9635 }
9636
9637 /* Calculate if the entry should appear in the final output file. It may be
9638 from a pruned a type. */
9639
9640 static bool
9641 include_pubname_in_output (vec<pubname_entry, va_gc> *table, pubname_entry *p)
9642 {
9643 /* By limiting gnu pubnames to definitions only, gold can generate a
9644 gdb index without entries for declarations, which don't include
9645 enough information to be useful. */
9646 if (debug_generate_pub_sections == 2 && is_declaration_die (p->die))
9647 return false;
9648
9649 if (table == pubname_table)
9650 {
9651 /* Enumerator names are part of the pubname table, but the
9652 parent DW_TAG_enumeration_type die may have been pruned.
9653 Don't output them if that is the case. */
9654 if (p->die->die_tag == DW_TAG_enumerator &&
9655 (p->die->die_parent == NULL
9656 || !p->die->die_parent->die_perennial_p))
9657 return false;
9658
9659 /* Everything else in the pubname table is included. */
9660 return true;
9661 }
9662
9663 /* The pubtypes table shouldn't include types that have been
9664 pruned. */
9665 return (p->die->die_offset != 0
9666 || !flag_eliminate_unused_debug_types);
9667 }
9668
9669 /* Return the size of the .debug_pubnames or .debug_pubtypes table
9670 generated for the compilation unit. */
9671
9672 static unsigned long
9673 size_of_pubnames (vec<pubname_entry, va_gc> *names)
9674 {
9675 unsigned long size;
9676 unsigned i;
9677 pubname_entry *p;
9678 int space_for_flags = (debug_generate_pub_sections == 2) ? 1 : 0;
9679
9680 size = DWARF_PUBNAMES_HEADER_SIZE;
9681 FOR_EACH_VEC_ELT (*names, i, p)
9682 if (include_pubname_in_output (names, p))
9683 size += strlen (p->name) + DWARF_OFFSET_SIZE + 1 + space_for_flags;
9684
9685 size += DWARF_OFFSET_SIZE;
9686 return size;
9687 }
9688
9689 /* Return the size of the information in the .debug_aranges section. */
9690
9691 static unsigned long
9692 size_of_aranges (void)
9693 {
9694 unsigned long size;
9695
9696 size = DWARF_ARANGES_HEADER_SIZE;
9697
9698 /* Count the address/length pair for this compilation unit. */
9699 if (text_section_used)
9700 size += 2 * DWARF2_ADDR_SIZE;
9701 if (cold_text_section_used)
9702 size += 2 * DWARF2_ADDR_SIZE;
9703 if (have_multiple_function_sections)
9704 {
9705 unsigned fde_idx;
9706 dw_fde_ref fde;
9707
9708 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
9709 {
9710 if (DECL_IGNORED_P (fde->decl))
9711 continue;
9712 if (!fde->in_std_section)
9713 size += 2 * DWARF2_ADDR_SIZE;
9714 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
9715 size += 2 * DWARF2_ADDR_SIZE;
9716 }
9717 }
9718
9719 /* Count the two zero words used to terminated the address range table. */
9720 size += 2 * DWARF2_ADDR_SIZE;
9721 return size;
9722 }
9723 \f
9724 /* Select the encoding of an attribute value. */
9725
9726 static enum dwarf_form
9727 value_format (dw_attr_node *a)
9728 {
9729 switch (AT_class (a))
9730 {
9731 case dw_val_class_addr:
9732 /* Only very few attributes allow DW_FORM_addr. */
9733 switch (a->dw_attr)
9734 {
9735 case DW_AT_low_pc:
9736 case DW_AT_high_pc:
9737 case DW_AT_entry_pc:
9738 case DW_AT_trampoline:
9739 return (AT_index (a) == NOT_INDEXED
9740 ? DW_FORM_addr : dwarf_FORM (DW_FORM_addrx));
9741 default:
9742 break;
9743 }
9744 switch (DWARF2_ADDR_SIZE)
9745 {
9746 case 1:
9747 return DW_FORM_data1;
9748 case 2:
9749 return DW_FORM_data2;
9750 case 4:
9751 return DW_FORM_data4;
9752 case 8:
9753 return DW_FORM_data8;
9754 default:
9755 gcc_unreachable ();
9756 }
9757 case dw_val_class_loc_list:
9758 if (dwarf_split_debug_info
9759 && dwarf_version >= 5
9760 && AT_loc_list (a)->num_assigned)
9761 return DW_FORM_loclistx;
9762 /* FALLTHRU */
9763 case dw_val_class_view_list:
9764 case dw_val_class_range_list:
9765 /* For range lists in DWARF 5, use DW_FORM_rnglistx from .debug_info.dwo
9766 but in .debug_info use DW_FORM_sec_offset, which is shorter if we
9767 care about sizes of .debug* sections in shared libraries and
9768 executables and don't take into account relocations that affect just
9769 relocatable objects - for DW_FORM_rnglistx we'd have to emit offset
9770 table in the .debug_rnglists section. */
9771 if (dwarf_split_debug_info
9772 && dwarf_version >= 5
9773 && AT_class (a) == dw_val_class_range_list
9774 && rnglist_idx
9775 && a->dw_attr_val.val_entry != RELOCATED_OFFSET)
9776 return DW_FORM_rnglistx;
9777 if (dwarf_version >= 4)
9778 return DW_FORM_sec_offset;
9779 /* FALLTHRU */
9780 case dw_val_class_vms_delta:
9781 case dw_val_class_offset:
9782 switch (DWARF_OFFSET_SIZE)
9783 {
9784 case 4:
9785 return DW_FORM_data4;
9786 case 8:
9787 return DW_FORM_data8;
9788 default:
9789 gcc_unreachable ();
9790 }
9791 case dw_val_class_loc:
9792 if (dwarf_version >= 4)
9793 return DW_FORM_exprloc;
9794 switch (constant_size (size_of_locs (AT_loc (a))))
9795 {
9796 case 1:
9797 return DW_FORM_block1;
9798 case 2:
9799 return DW_FORM_block2;
9800 case 4:
9801 return DW_FORM_block4;
9802 default:
9803 gcc_unreachable ();
9804 }
9805 case dw_val_class_const:
9806 return DW_FORM_sdata;
9807 case dw_val_class_unsigned_const:
9808 switch (constant_size (AT_unsigned (a)))
9809 {
9810 case 1:
9811 return DW_FORM_data1;
9812 case 2:
9813 return DW_FORM_data2;
9814 case 4:
9815 /* In DWARF3 DW_AT_data_member_location with
9816 DW_FORM_data4 or DW_FORM_data8 is a loclistptr, not
9817 constant, so we need to use DW_FORM_udata if we need
9818 a large constant. */
9819 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9820 return DW_FORM_udata;
9821 return DW_FORM_data4;
9822 case 8:
9823 if (dwarf_version == 3 && a->dw_attr == DW_AT_data_member_location)
9824 return DW_FORM_udata;
9825 return DW_FORM_data8;
9826 default:
9827 gcc_unreachable ();
9828 }
9829 case dw_val_class_const_implicit:
9830 case dw_val_class_unsigned_const_implicit:
9831 case dw_val_class_file_implicit:
9832 return DW_FORM_implicit_const;
9833 case dw_val_class_const_double:
9834 switch (HOST_BITS_PER_WIDE_INT)
9835 {
9836 case 8:
9837 return DW_FORM_data2;
9838 case 16:
9839 return DW_FORM_data4;
9840 case 32:
9841 return DW_FORM_data8;
9842 case 64:
9843 if (dwarf_version >= 5)
9844 return DW_FORM_data16;
9845 /* FALLTHRU */
9846 default:
9847 return DW_FORM_block1;
9848 }
9849 case dw_val_class_wide_int:
9850 switch (get_full_len (*a->dw_attr_val.v.val_wide) * HOST_BITS_PER_WIDE_INT)
9851 {
9852 case 8:
9853 return DW_FORM_data1;
9854 case 16:
9855 return DW_FORM_data2;
9856 case 32:
9857 return DW_FORM_data4;
9858 case 64:
9859 return DW_FORM_data8;
9860 case 128:
9861 if (dwarf_version >= 5)
9862 return DW_FORM_data16;
9863 /* FALLTHRU */
9864 default:
9865 return DW_FORM_block1;
9866 }
9867 case dw_val_class_symview:
9868 /* ??? We might use uleb128, but then we'd have to compute
9869 .debug_info offsets in the assembler. */
9870 if (symview_upper_bound <= 0xff)
9871 return DW_FORM_data1;
9872 else if (symview_upper_bound <= 0xffff)
9873 return DW_FORM_data2;
9874 else if (symview_upper_bound <= 0xffffffff)
9875 return DW_FORM_data4;
9876 else
9877 return DW_FORM_data8;
9878 case dw_val_class_vec:
9879 switch (constant_size (a->dw_attr_val.v.val_vec.length
9880 * a->dw_attr_val.v.val_vec.elt_size))
9881 {
9882 case 1:
9883 return DW_FORM_block1;
9884 case 2:
9885 return DW_FORM_block2;
9886 case 4:
9887 return DW_FORM_block4;
9888 default:
9889 gcc_unreachable ();
9890 }
9891 case dw_val_class_flag:
9892 if (dwarf_version >= 4)
9893 {
9894 /* Currently all add_AT_flag calls pass in 1 as last argument,
9895 so DW_FORM_flag_present can be used. If that ever changes,
9896 we'll need to use DW_FORM_flag and have some optimization
9897 in build_abbrev_table that will change those to
9898 DW_FORM_flag_present if it is set to 1 in all DIEs using
9899 the same abbrev entry. */
9900 gcc_assert (a->dw_attr_val.v.val_flag == 1);
9901 return DW_FORM_flag_present;
9902 }
9903 return DW_FORM_flag;
9904 case dw_val_class_die_ref:
9905 if (AT_ref_external (a))
9906 {
9907 if (AT_ref (a)->comdat_type_p)
9908 return DW_FORM_ref_sig8;
9909 else
9910 return DW_FORM_ref_addr;
9911 }
9912 else
9913 return DW_FORM_ref;
9914 case dw_val_class_fde_ref:
9915 return DW_FORM_data;
9916 case dw_val_class_lbl_id:
9917 return (AT_index (a) == NOT_INDEXED
9918 ? DW_FORM_addr : dwarf_FORM (DW_FORM_addrx));
9919 case dw_val_class_lineptr:
9920 case dw_val_class_macptr:
9921 case dw_val_class_loclistsptr:
9922 return dwarf_version >= 4 ? DW_FORM_sec_offset : DW_FORM_data;
9923 case dw_val_class_str:
9924 return AT_string_form (a);
9925 case dw_val_class_file:
9926 switch (constant_size (maybe_emit_file (a->dw_attr_val.v.val_file)))
9927 {
9928 case 1:
9929 return DW_FORM_data1;
9930 case 2:
9931 return DW_FORM_data2;
9932 case 4:
9933 return DW_FORM_data4;
9934 default:
9935 gcc_unreachable ();
9936 }
9937
9938 case dw_val_class_data8:
9939 return DW_FORM_data8;
9940
9941 case dw_val_class_high_pc:
9942 switch (DWARF2_ADDR_SIZE)
9943 {
9944 case 1:
9945 return DW_FORM_data1;
9946 case 2:
9947 return DW_FORM_data2;
9948 case 4:
9949 return DW_FORM_data4;
9950 case 8:
9951 return DW_FORM_data8;
9952 default:
9953 gcc_unreachable ();
9954 }
9955
9956 case dw_val_class_discr_value:
9957 return (a->dw_attr_val.v.val_discr_value.pos
9958 ? DW_FORM_udata
9959 : DW_FORM_sdata);
9960 case dw_val_class_discr_list:
9961 switch (constant_size (size_of_discr_list (AT_discr_list (a))))
9962 {
9963 case 1:
9964 return DW_FORM_block1;
9965 case 2:
9966 return DW_FORM_block2;
9967 case 4:
9968 return DW_FORM_block4;
9969 default:
9970 gcc_unreachable ();
9971 }
9972
9973 default:
9974 gcc_unreachable ();
9975 }
9976 }
9977
9978 /* Output the encoding of an attribute value. */
9979
9980 static void
9981 output_value_format (dw_attr_node *a)
9982 {
9983 enum dwarf_form form = value_format (a);
9984
9985 dw2_asm_output_data_uleb128 (form, "(%s)", dwarf_form_name (form));
9986 }
9987
9988 /* Given a die and id, produce the appropriate abbreviations. */
9989
9990 static void
9991 output_die_abbrevs (unsigned long abbrev_id, dw_die_ref abbrev)
9992 {
9993 unsigned ix;
9994 dw_attr_node *a_attr;
9995
9996 dw2_asm_output_data_uleb128 (abbrev_id, "(abbrev code)");
9997 dw2_asm_output_data_uleb128 (abbrev->die_tag, "(TAG: %s)",
9998 dwarf_tag_name (abbrev->die_tag));
9999
10000 if (abbrev->die_child != NULL)
10001 dw2_asm_output_data (1, DW_children_yes, "DW_children_yes");
10002 else
10003 dw2_asm_output_data (1, DW_children_no, "DW_children_no");
10004
10005 for (ix = 0; vec_safe_iterate (abbrev->die_attr, ix, &a_attr); ix++)
10006 {
10007 dw2_asm_output_data_uleb128 (a_attr->dw_attr, "(%s)",
10008 dwarf_attr_name (a_attr->dw_attr));
10009 output_value_format (a_attr);
10010 if (value_format (a_attr) == DW_FORM_implicit_const)
10011 {
10012 if (AT_class (a_attr) == dw_val_class_file_implicit)
10013 {
10014 int f = maybe_emit_file (a_attr->dw_attr_val.v.val_file);
10015 const char *filename = a_attr->dw_attr_val.v.val_file->filename;
10016 dw2_asm_output_data_sleb128 (f, "(%s)", filename);
10017 }
10018 else
10019 dw2_asm_output_data_sleb128 (a_attr->dw_attr_val.v.val_int, NULL);
10020 }
10021 }
10022
10023 dw2_asm_output_data (1, 0, NULL);
10024 dw2_asm_output_data (1, 0, NULL);
10025 }
10026
10027
10028 /* Output the .debug_abbrev section which defines the DIE abbreviation
10029 table. */
10030
10031 static void
10032 output_abbrev_section (void)
10033 {
10034 unsigned int abbrev_id;
10035 dw_die_ref abbrev;
10036
10037 FOR_EACH_VEC_SAFE_ELT (abbrev_die_table, abbrev_id, abbrev)
10038 if (abbrev_id != 0)
10039 output_die_abbrevs (abbrev_id, abbrev);
10040
10041 /* Terminate the table. */
10042 dw2_asm_output_data (1, 0, NULL);
10043 }
10044
10045 /* Return a new location list, given the begin and end range, and the
10046 expression. */
10047
10048 static inline dw_loc_list_ref
10049 new_loc_list (dw_loc_descr_ref expr, const char *begin, var_loc_view vbegin,
10050 const char *end, var_loc_view vend,
10051 const char *section)
10052 {
10053 dw_loc_list_ref retlist = ggc_cleared_alloc<dw_loc_list_node> ();
10054
10055 retlist->begin = begin;
10056 retlist->begin_entry = NULL;
10057 retlist->end = end;
10058 retlist->expr = expr;
10059 retlist->section = section;
10060 retlist->vbegin = vbegin;
10061 retlist->vend = vend;
10062
10063 return retlist;
10064 }
10065
10066 /* Return true iff there's any nonzero view number in the loc list.
10067
10068 ??? When views are not enabled, we'll often extend a single range
10069 to the entire function, so that we emit a single location
10070 expression rather than a location list. With views, even with a
10071 single range, we'll output a list if start or end have a nonzero
10072 view. If we change this, we may want to stop splitting a single
10073 range in dw_loc_list just because of a nonzero view, even if it
10074 straddles across hot/cold partitions. */
10075
10076 static bool
10077 loc_list_has_views (dw_loc_list_ref list)
10078 {
10079 if (!debug_variable_location_views)
10080 return false;
10081
10082 for (dw_loc_list_ref loc = list;
10083 loc != NULL; loc = loc->dw_loc_next)
10084 if (!ZERO_VIEW_P (loc->vbegin) || !ZERO_VIEW_P (loc->vend))
10085 return true;
10086
10087 return false;
10088 }
10089
10090 /* Generate a new internal symbol for this location list node, if it
10091 hasn't got one yet. */
10092
10093 static inline void
10094 gen_llsym (dw_loc_list_ref list)
10095 {
10096 gcc_assert (!list->ll_symbol);
10097 list->ll_symbol = gen_internal_sym ("LLST");
10098
10099 if (!loc_list_has_views (list))
10100 return;
10101
10102 if (dwarf2out_locviews_in_attribute ())
10103 {
10104 /* Use the same label_num for the view list. */
10105 label_num--;
10106 list->vl_symbol = gen_internal_sym ("LVUS");
10107 }
10108 else
10109 list->vl_symbol = list->ll_symbol;
10110 }
10111
10112 /* Generate a symbol for the list, but only if we really want to emit
10113 it as a list. */
10114
10115 static inline void
10116 maybe_gen_llsym (dw_loc_list_ref list)
10117 {
10118 if (!list || (!list->dw_loc_next && !loc_list_has_views (list)))
10119 return;
10120
10121 gen_llsym (list);
10122 }
10123
10124 /* Determine whether or not to skip loc_list entry CURR. If SIZEP is
10125 NULL, don't consider size of the location expression. If we're not
10126 to skip it, and SIZEP is non-null, store the size of CURR->expr's
10127 representation in *SIZEP. */
10128
10129 static bool
10130 skip_loc_list_entry (dw_loc_list_ref curr, unsigned long *sizep = NULL)
10131 {
10132 /* Don't output an entry that starts and ends at the same address. */
10133 if (strcmp (curr->begin, curr->end) == 0
10134 && curr->vbegin == curr->vend && !curr->force)
10135 return true;
10136
10137 if (!sizep)
10138 return false;
10139
10140 unsigned long size = size_of_locs (curr->expr);
10141
10142 /* If the expression is too large, drop it on the floor. We could
10143 perhaps put it into DW_TAG_dwarf_procedure and refer to that
10144 in the expression, but >= 64KB expressions for a single value
10145 in a single range are unlikely very useful. */
10146 if (dwarf_version < 5 && size > 0xffff)
10147 return true;
10148
10149 *sizep = size;
10150
10151 return false;
10152 }
10153
10154 /* Output a view pair loclist entry for CURR, if it requires one. */
10155
10156 static void
10157 dwarf2out_maybe_output_loclist_view_pair (dw_loc_list_ref curr)
10158 {
10159 if (!dwarf2out_locviews_in_loclist ())
10160 return;
10161
10162 if (ZERO_VIEW_P (curr->vbegin) && ZERO_VIEW_P (curr->vend))
10163 return;
10164
10165 #ifdef DW_LLE_view_pair
10166 dw2_asm_output_data (1, DW_LLE_view_pair, "DW_LLE_view_pair");
10167
10168 if (dwarf2out_as_locview_support)
10169 {
10170 if (ZERO_VIEW_P (curr->vbegin))
10171 dw2_asm_output_data_uleb128 (0, "Location view begin");
10172 else
10173 {
10174 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10175 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10176 dw2_asm_output_symname_uleb128 (label, "Location view begin");
10177 }
10178
10179 if (ZERO_VIEW_P (curr->vend))
10180 dw2_asm_output_data_uleb128 (0, "Location view end");
10181 else
10182 {
10183 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10184 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10185 dw2_asm_output_symname_uleb128 (label, "Location view end");
10186 }
10187 }
10188 else
10189 {
10190 dw2_asm_output_data_uleb128 (curr->vbegin, "Location view begin");
10191 dw2_asm_output_data_uleb128 (curr->vend, "Location view end");
10192 }
10193 #endif /* DW_LLE_view_pair */
10194
10195 return;
10196 }
10197
10198 /* Output the location list given to us. */
10199
10200 static void
10201 output_loc_list (dw_loc_list_ref list_head)
10202 {
10203 int vcount = 0, lcount = 0;
10204
10205 if (list_head->emitted)
10206 return;
10207 list_head->emitted = true;
10208
10209 if (list_head->vl_symbol && dwarf2out_locviews_in_attribute ())
10210 {
10211 ASM_OUTPUT_LABEL (asm_out_file, list_head->vl_symbol);
10212
10213 for (dw_loc_list_ref curr = list_head; curr != NULL;
10214 curr = curr->dw_loc_next)
10215 {
10216 unsigned long size;
10217
10218 if (skip_loc_list_entry (curr, &size))
10219 continue;
10220
10221 vcount++;
10222
10223 /* ?? dwarf_split_debug_info? */
10224 if (dwarf2out_as_locview_support)
10225 {
10226 char label[MAX_ARTIFICIAL_LABEL_BYTES];
10227
10228 if (!ZERO_VIEW_P (curr->vbegin))
10229 {
10230 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vbegin);
10231 dw2_asm_output_symname_uleb128 (label,
10232 "View list begin (%s)",
10233 list_head->vl_symbol);
10234 }
10235 else
10236 dw2_asm_output_data_uleb128 (0,
10237 "View list begin (%s)",
10238 list_head->vl_symbol);
10239
10240 if (!ZERO_VIEW_P (curr->vend))
10241 {
10242 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", curr->vend);
10243 dw2_asm_output_symname_uleb128 (label,
10244 "View list end (%s)",
10245 list_head->vl_symbol);
10246 }
10247 else
10248 dw2_asm_output_data_uleb128 (0,
10249 "View list end (%s)",
10250 list_head->vl_symbol);
10251 }
10252 else
10253 {
10254 dw2_asm_output_data_uleb128 (curr->vbegin,
10255 "View list begin (%s)",
10256 list_head->vl_symbol);
10257 dw2_asm_output_data_uleb128 (curr->vend,
10258 "View list end (%s)",
10259 list_head->vl_symbol);
10260 }
10261 }
10262 }
10263
10264 ASM_OUTPUT_LABEL (asm_out_file, list_head->ll_symbol);
10265
10266 const char *last_section = NULL;
10267 const char *base_label = NULL;
10268
10269 /* Walk the location list, and output each range + expression. */
10270 for (dw_loc_list_ref curr = list_head; curr != NULL;
10271 curr = curr->dw_loc_next)
10272 {
10273 unsigned long size;
10274
10275 /* Skip this entry? If we skip it here, we must skip it in the
10276 view list above as well. */
10277 if (skip_loc_list_entry (curr, &size))
10278 continue;
10279
10280 lcount++;
10281
10282 if (dwarf_version >= 5)
10283 {
10284 if (dwarf_split_debug_info)
10285 {
10286 dwarf2out_maybe_output_loclist_view_pair (curr);
10287 /* For -gsplit-dwarf, emit DW_LLE_starx_length, which has
10288 uleb128 index into .debug_addr and uleb128 length. */
10289 dw2_asm_output_data (1, DW_LLE_startx_length,
10290 "DW_LLE_startx_length (%s)",
10291 list_head->ll_symbol);
10292 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10293 "Location list range start index "
10294 "(%s)", curr->begin);
10295 /* FIXME: This will ICE ifndef HAVE_AS_LEB128.
10296 For that case we probably need to emit DW_LLE_startx_endx,
10297 but we'd need 2 .debug_addr entries rather than just one. */
10298 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10299 "Location list length (%s)",
10300 list_head->ll_symbol);
10301 }
10302 else if (!have_multiple_function_sections && HAVE_AS_LEB128)
10303 {
10304 dwarf2out_maybe_output_loclist_view_pair (curr);
10305 /* If all code is in .text section, the base address is
10306 already provided by the CU attributes. Use
10307 DW_LLE_offset_pair where both addresses are uleb128 encoded
10308 offsets against that base. */
10309 dw2_asm_output_data (1, DW_LLE_offset_pair,
10310 "DW_LLE_offset_pair (%s)",
10311 list_head->ll_symbol);
10312 dw2_asm_output_delta_uleb128 (curr->begin, curr->section,
10313 "Location list begin address (%s)",
10314 list_head->ll_symbol);
10315 dw2_asm_output_delta_uleb128 (curr->end, curr->section,
10316 "Location list end address (%s)",
10317 list_head->ll_symbol);
10318 }
10319 else if (HAVE_AS_LEB128)
10320 {
10321 /* Otherwise, find out how many consecutive entries could share
10322 the same base entry. If just one, emit DW_LLE_start_length,
10323 otherwise emit DW_LLE_base_address for the base address
10324 followed by a series of DW_LLE_offset_pair. */
10325 if (last_section == NULL || curr->section != last_section)
10326 {
10327 dw_loc_list_ref curr2;
10328 for (curr2 = curr->dw_loc_next; curr2 != NULL;
10329 curr2 = curr2->dw_loc_next)
10330 {
10331 if (strcmp (curr2->begin, curr2->end) == 0
10332 && !curr2->force)
10333 continue;
10334 break;
10335 }
10336 if (curr2 == NULL || curr->section != curr2->section)
10337 last_section = NULL;
10338 else
10339 {
10340 last_section = curr->section;
10341 base_label = curr->begin;
10342 dw2_asm_output_data (1, DW_LLE_base_address,
10343 "DW_LLE_base_address (%s)",
10344 list_head->ll_symbol);
10345 dw2_asm_output_addr (DWARF2_ADDR_SIZE, base_label,
10346 "Base address (%s)",
10347 list_head->ll_symbol);
10348 }
10349 }
10350 /* Only one entry with the same base address. Use
10351 DW_LLE_start_length with absolute address and uleb128
10352 length. */
10353 if (last_section == NULL)
10354 {
10355 dwarf2out_maybe_output_loclist_view_pair (curr);
10356 dw2_asm_output_data (1, DW_LLE_start_length,
10357 "DW_LLE_start_length (%s)",
10358 list_head->ll_symbol);
10359 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10360 "Location list begin address (%s)",
10361 list_head->ll_symbol);
10362 dw2_asm_output_delta_uleb128 (curr->end, curr->begin,
10363 "Location list length "
10364 "(%s)", list_head->ll_symbol);
10365 }
10366 /* Otherwise emit DW_LLE_offset_pair, relative to above emitted
10367 DW_LLE_base_address. */
10368 else
10369 {
10370 dwarf2out_maybe_output_loclist_view_pair (curr);
10371 dw2_asm_output_data (1, DW_LLE_offset_pair,
10372 "DW_LLE_offset_pair (%s)",
10373 list_head->ll_symbol);
10374 dw2_asm_output_delta_uleb128 (curr->begin, base_label,
10375 "Location list begin address "
10376 "(%s)", list_head->ll_symbol);
10377 dw2_asm_output_delta_uleb128 (curr->end, base_label,
10378 "Location list end address "
10379 "(%s)", list_head->ll_symbol);
10380 }
10381 }
10382 /* The assembler does not support .uleb128 directive. Emit
10383 DW_LLE_start_end with a pair of absolute addresses. */
10384 else
10385 {
10386 dwarf2out_maybe_output_loclist_view_pair (curr);
10387 dw2_asm_output_data (1, DW_LLE_start_end,
10388 "DW_LLE_start_end (%s)",
10389 list_head->ll_symbol);
10390 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10391 "Location list begin address (%s)",
10392 list_head->ll_symbol);
10393 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10394 "Location list end address (%s)",
10395 list_head->ll_symbol);
10396 }
10397 }
10398 else if (dwarf_split_debug_info)
10399 {
10400 /* For -gsplit-dwarf -gdwarf-{2,3,4} emit index into .debug_addr
10401 and 4 byte length. */
10402 dw2_asm_output_data (1, DW_LLE_GNU_start_length_entry,
10403 "Location list start/length entry (%s)",
10404 list_head->ll_symbol);
10405 dw2_asm_output_data_uleb128 (curr->begin_entry->index,
10406 "Location list range start index (%s)",
10407 curr->begin);
10408 /* The length field is 4 bytes. If we ever need to support
10409 an 8-byte length, we can add a new DW_LLE code or fall back
10410 to DW_LLE_GNU_start_end_entry. */
10411 dw2_asm_output_delta (4, curr->end, curr->begin,
10412 "Location list range length (%s)",
10413 list_head->ll_symbol);
10414 }
10415 else if (!have_multiple_function_sections)
10416 {
10417 /* Pair of relative addresses against start of text section. */
10418 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->begin, curr->section,
10419 "Location list begin address (%s)",
10420 list_head->ll_symbol);
10421 dw2_asm_output_delta (DWARF2_ADDR_SIZE, curr->end, curr->section,
10422 "Location list end address (%s)",
10423 list_head->ll_symbol);
10424 }
10425 else
10426 {
10427 /* Pair of absolute addresses. */
10428 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->begin,
10429 "Location list begin address (%s)",
10430 list_head->ll_symbol);
10431 dw2_asm_output_addr (DWARF2_ADDR_SIZE, curr->end,
10432 "Location list end address (%s)",
10433 list_head->ll_symbol);
10434 }
10435
10436 /* Output the block length for this list of location operations. */
10437 if (dwarf_version >= 5)
10438 dw2_asm_output_data_uleb128 (size, "Location expression size");
10439 else
10440 {
10441 gcc_assert (size <= 0xffff);
10442 dw2_asm_output_data (2, size, "Location expression size");
10443 }
10444
10445 output_loc_sequence (curr->expr, -1);
10446 }
10447
10448 /* And finally list termination. */
10449 if (dwarf_version >= 5)
10450 dw2_asm_output_data (1, DW_LLE_end_of_list,
10451 "DW_LLE_end_of_list (%s)", list_head->ll_symbol);
10452 else if (dwarf_split_debug_info)
10453 dw2_asm_output_data (1, DW_LLE_GNU_end_of_list_entry,
10454 "Location list terminator (%s)",
10455 list_head->ll_symbol);
10456 else
10457 {
10458 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10459 "Location list terminator begin (%s)",
10460 list_head->ll_symbol);
10461 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0,
10462 "Location list terminator end (%s)",
10463 list_head->ll_symbol);
10464 }
10465
10466 gcc_assert (!list_head->vl_symbol
10467 || vcount == lcount * (dwarf2out_locviews_in_attribute () ? 1 : 0));
10468 }
10469
10470 /* Output a range_list offset into the .debug_ranges or .debug_rnglists
10471 section. Emit a relocated reference if val_entry is NULL, otherwise,
10472 emit an indirect reference. */
10473
10474 static void
10475 output_range_list_offset (dw_attr_node *a)
10476 {
10477 const char *name = dwarf_attr_name (a->dw_attr);
10478
10479 if (a->dw_attr_val.val_entry == RELOCATED_OFFSET)
10480 {
10481 if (dwarf_version >= 5)
10482 {
10483 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10484 dw2_asm_output_offset (DWARF_OFFSET_SIZE, r->label,
10485 debug_ranges_section, "%s", name);
10486 }
10487 else
10488 {
10489 char *p = strchr (ranges_section_label, '\0');
10490 sprintf (p, "+" HOST_WIDE_INT_PRINT_HEX,
10491 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE);
10492 dw2_asm_output_offset (DWARF_OFFSET_SIZE, ranges_section_label,
10493 debug_ranges_section, "%s", name);
10494 *p = '\0';
10495 }
10496 }
10497 else if (dwarf_version >= 5)
10498 {
10499 dw_ranges *r = &(*ranges_table)[a->dw_attr_val.v.val_offset];
10500 gcc_assert (rnglist_idx);
10501 dw2_asm_output_data_uleb128 (r->idx, "%s", name);
10502 }
10503 else
10504 dw2_asm_output_data (DWARF_OFFSET_SIZE,
10505 a->dw_attr_val.v.val_offset * 2 * DWARF2_ADDR_SIZE,
10506 "%s (offset from %s)", name, ranges_section_label);
10507 }
10508
10509 /* Output the offset into the debug_loc section. */
10510
10511 static void
10512 output_loc_list_offset (dw_attr_node *a)
10513 {
10514 char *sym = AT_loc_list (a)->ll_symbol;
10515
10516 gcc_assert (sym);
10517 if (!dwarf_split_debug_info)
10518 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10519 "%s", dwarf_attr_name (a->dw_attr));
10520 else if (dwarf_version >= 5)
10521 {
10522 gcc_assert (AT_loc_list (a)->num_assigned);
10523 dw2_asm_output_data_uleb128 (AT_loc_list (a)->hash, "%s (%s)",
10524 dwarf_attr_name (a->dw_attr),
10525 sym);
10526 }
10527 else
10528 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10529 "%s", dwarf_attr_name (a->dw_attr));
10530 }
10531
10532 /* Output the offset into the debug_loc section. */
10533
10534 static void
10535 output_view_list_offset (dw_attr_node *a)
10536 {
10537 char *sym = (*AT_loc_list_ptr (a))->vl_symbol;
10538
10539 gcc_assert (sym);
10540 if (dwarf_split_debug_info)
10541 dw2_asm_output_delta (DWARF_OFFSET_SIZE, sym, loc_section_label,
10542 "%s", dwarf_attr_name (a->dw_attr));
10543 else
10544 dw2_asm_output_offset (DWARF_OFFSET_SIZE, sym, debug_loc_section,
10545 "%s", dwarf_attr_name (a->dw_attr));
10546 }
10547
10548 /* Output an attribute's index or value appropriately. */
10549
10550 static void
10551 output_attr_index_or_value (dw_attr_node *a)
10552 {
10553 const char *name = dwarf_attr_name (a->dw_attr);
10554
10555 if (dwarf_split_debug_info && AT_index (a) != NOT_INDEXED)
10556 {
10557 dw2_asm_output_data_uleb128 (AT_index (a), "%s", name);
10558 return;
10559 }
10560 switch (AT_class (a))
10561 {
10562 case dw_val_class_addr:
10563 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, AT_addr (a), "%s", name);
10564 break;
10565 case dw_val_class_high_pc:
10566 case dw_val_class_lbl_id:
10567 dw2_asm_output_addr (DWARF2_ADDR_SIZE, AT_lbl (a), "%s", name);
10568 break;
10569 default:
10570 gcc_unreachable ();
10571 }
10572 }
10573
10574 /* Output a type signature. */
10575
10576 static inline void
10577 output_signature (const char *sig, const char *name)
10578 {
10579 int i;
10580
10581 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
10582 dw2_asm_output_data (1, sig[i], i == 0 ? "%s" : NULL, name);
10583 }
10584
10585 /* Output a discriminant value. */
10586
10587 static inline void
10588 output_discr_value (dw_discr_value *discr_value, const char *name)
10589 {
10590 if (discr_value->pos)
10591 dw2_asm_output_data_uleb128 (discr_value->v.uval, "%s", name);
10592 else
10593 dw2_asm_output_data_sleb128 (discr_value->v.sval, "%s", name);
10594 }
10595
10596 /* Output the DIE and its attributes. Called recursively to generate
10597 the definitions of each child DIE. */
10598
10599 static void
10600 output_die (dw_die_ref die)
10601 {
10602 dw_attr_node *a;
10603 dw_die_ref c;
10604 unsigned long size;
10605 unsigned ix;
10606
10607 dw2_asm_output_data_uleb128 (die->die_abbrev, "(DIE (%#lx) %s)",
10608 (unsigned long)die->die_offset,
10609 dwarf_tag_name (die->die_tag));
10610
10611 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
10612 {
10613 const char *name = dwarf_attr_name (a->dw_attr);
10614
10615 switch (AT_class (a))
10616 {
10617 case dw_val_class_addr:
10618 output_attr_index_or_value (a);
10619 break;
10620
10621 case dw_val_class_offset:
10622 dw2_asm_output_data (DWARF_OFFSET_SIZE, a->dw_attr_val.v.val_offset,
10623 "%s", name);
10624 break;
10625
10626 case dw_val_class_range_list:
10627 output_range_list_offset (a);
10628 break;
10629
10630 case dw_val_class_loc:
10631 size = size_of_locs (AT_loc (a));
10632
10633 /* Output the block length for this list of location operations. */
10634 if (dwarf_version >= 4)
10635 dw2_asm_output_data_uleb128 (size, "%s", name);
10636 else
10637 dw2_asm_output_data (constant_size (size), size, "%s", name);
10638
10639 output_loc_sequence (AT_loc (a), -1);
10640 break;
10641
10642 case dw_val_class_const:
10643 /* ??? It would be slightly more efficient to use a scheme like is
10644 used for unsigned constants below, but gdb 4.x does not sign
10645 extend. Gdb 5.x does sign extend. */
10646 dw2_asm_output_data_sleb128 (AT_int (a), "%s", name);
10647 break;
10648
10649 case dw_val_class_unsigned_const:
10650 {
10651 int csize = constant_size (AT_unsigned (a));
10652 if (dwarf_version == 3
10653 && a->dw_attr == DW_AT_data_member_location
10654 && csize >= 4)
10655 dw2_asm_output_data_uleb128 (AT_unsigned (a), "%s", name);
10656 else
10657 dw2_asm_output_data (csize, AT_unsigned (a), "%s", name);
10658 }
10659 break;
10660
10661 case dw_val_class_symview:
10662 {
10663 int vsize;
10664 if (symview_upper_bound <= 0xff)
10665 vsize = 1;
10666 else if (symview_upper_bound <= 0xffff)
10667 vsize = 2;
10668 else if (symview_upper_bound <= 0xffffffff)
10669 vsize = 4;
10670 else
10671 vsize = 8;
10672 dw2_asm_output_addr (vsize, a->dw_attr_val.v.val_symbolic_view,
10673 "%s", name);
10674 }
10675 break;
10676
10677 case dw_val_class_const_implicit:
10678 if (flag_debug_asm)
10679 fprintf (asm_out_file, "\t\t\t%s %s ("
10680 HOST_WIDE_INT_PRINT_DEC ")\n",
10681 ASM_COMMENT_START, name, AT_int (a));
10682 break;
10683
10684 case dw_val_class_unsigned_const_implicit:
10685 if (flag_debug_asm)
10686 fprintf (asm_out_file, "\t\t\t%s %s ("
10687 HOST_WIDE_INT_PRINT_HEX ")\n",
10688 ASM_COMMENT_START, name, AT_unsigned (a));
10689 break;
10690
10691 case dw_val_class_const_double:
10692 {
10693 unsigned HOST_WIDE_INT first, second;
10694
10695 if (HOST_BITS_PER_WIDE_INT >= DWARF_LARGEST_DATA_FORM_BITS)
10696 dw2_asm_output_data (1,
10697 HOST_BITS_PER_DOUBLE_INT
10698 / HOST_BITS_PER_CHAR,
10699 NULL);
10700
10701 if (WORDS_BIG_ENDIAN)
10702 {
10703 first = a->dw_attr_val.v.val_double.high;
10704 second = a->dw_attr_val.v.val_double.low;
10705 }
10706 else
10707 {
10708 first = a->dw_attr_val.v.val_double.low;
10709 second = a->dw_attr_val.v.val_double.high;
10710 }
10711
10712 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10713 first, "%s", name);
10714 dw2_asm_output_data (HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR,
10715 second, NULL);
10716 }
10717 break;
10718
10719 case dw_val_class_wide_int:
10720 {
10721 int i;
10722 int len = get_full_len (*a->dw_attr_val.v.val_wide);
10723 int l = HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR;
10724 if (len * HOST_BITS_PER_WIDE_INT > DWARF_LARGEST_DATA_FORM_BITS)
10725 dw2_asm_output_data (1, get_full_len (*a->dw_attr_val.v.val_wide)
10726 * l, NULL);
10727
10728 if (WORDS_BIG_ENDIAN)
10729 for (i = len - 1; i >= 0; --i)
10730 {
10731 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10732 "%s", name);
10733 name = "";
10734 }
10735 else
10736 for (i = 0; i < len; ++i)
10737 {
10738 dw2_asm_output_data (l, a->dw_attr_val.v.val_wide->elt (i),
10739 "%s", name);
10740 name = "";
10741 }
10742 }
10743 break;
10744
10745 case dw_val_class_vec:
10746 {
10747 unsigned int elt_size = a->dw_attr_val.v.val_vec.elt_size;
10748 unsigned int len = a->dw_attr_val.v.val_vec.length;
10749 unsigned int i;
10750 unsigned char *p;
10751
10752 dw2_asm_output_data (constant_size (len * elt_size),
10753 len * elt_size, "%s", name);
10754 if (elt_size > sizeof (HOST_WIDE_INT))
10755 {
10756 elt_size /= 2;
10757 len *= 2;
10758 }
10759 for (i = 0, p = (unsigned char *) a->dw_attr_val.v.val_vec.array;
10760 i < len;
10761 i++, p += elt_size)
10762 dw2_asm_output_data (elt_size, extract_int (p, elt_size),
10763 "fp or vector constant word %u", i);
10764 break;
10765 }
10766
10767 case dw_val_class_flag:
10768 if (dwarf_version >= 4)
10769 {
10770 /* Currently all add_AT_flag calls pass in 1 as last argument,
10771 so DW_FORM_flag_present can be used. If that ever changes,
10772 we'll need to use DW_FORM_flag and have some optimization
10773 in build_abbrev_table that will change those to
10774 DW_FORM_flag_present if it is set to 1 in all DIEs using
10775 the same abbrev entry. */
10776 gcc_assert (AT_flag (a) == 1);
10777 if (flag_debug_asm)
10778 fprintf (asm_out_file, "\t\t\t%s %s\n",
10779 ASM_COMMENT_START, name);
10780 break;
10781 }
10782 dw2_asm_output_data (1, AT_flag (a), "%s", name);
10783 break;
10784
10785 case dw_val_class_loc_list:
10786 output_loc_list_offset (a);
10787 break;
10788
10789 case dw_val_class_view_list:
10790 output_view_list_offset (a);
10791 break;
10792
10793 case dw_val_class_die_ref:
10794 if (AT_ref_external (a))
10795 {
10796 if (AT_ref (a)->comdat_type_p)
10797 {
10798 comdat_type_node *type_node
10799 = AT_ref (a)->die_id.die_type_node;
10800
10801 gcc_assert (type_node);
10802 output_signature (type_node->signature, name);
10803 }
10804 else
10805 {
10806 const char *sym = AT_ref (a)->die_id.die_symbol;
10807 int size;
10808
10809 gcc_assert (sym);
10810 /* In DWARF2, DW_FORM_ref_addr is sized by target address
10811 length, whereas in DWARF3 it's always sized as an
10812 offset. */
10813 if (dwarf_version == 2)
10814 size = DWARF2_ADDR_SIZE;
10815 else
10816 size = DWARF_OFFSET_SIZE;
10817 /* ??? We cannot unconditionally output die_offset if
10818 non-zero - others might create references to those
10819 DIEs via symbols.
10820 And we do not clear its DIE offset after outputting it
10821 (and the label refers to the actual DIEs, not the
10822 DWARF CU unit header which is when using label + offset
10823 would be the correct thing to do).
10824 ??? This is the reason for the with_offset flag. */
10825 if (AT_ref (a)->with_offset)
10826 dw2_asm_output_offset (size, sym, AT_ref (a)->die_offset,
10827 debug_info_section, "%s", name);
10828 else
10829 dw2_asm_output_offset (size, sym, debug_info_section, "%s",
10830 name);
10831 }
10832 }
10833 else
10834 {
10835 gcc_assert (AT_ref (a)->die_offset);
10836 dw2_asm_output_data (DWARF_OFFSET_SIZE, AT_ref (a)->die_offset,
10837 "%s", name);
10838 }
10839 break;
10840
10841 case dw_val_class_fde_ref:
10842 {
10843 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
10844
10845 ASM_GENERATE_INTERNAL_LABEL (l1, FDE_LABEL,
10846 a->dw_attr_val.v.val_fde_index * 2);
10847 dw2_asm_output_offset (DWARF_OFFSET_SIZE, l1, debug_frame_section,
10848 "%s", name);
10849 }
10850 break;
10851
10852 case dw_val_class_vms_delta:
10853 #ifdef ASM_OUTPUT_DWARF_VMS_DELTA
10854 dw2_asm_output_vms_delta (DWARF_OFFSET_SIZE,
10855 AT_vms_delta2 (a), AT_vms_delta1 (a),
10856 "%s", name);
10857 #else
10858 dw2_asm_output_delta (DWARF_OFFSET_SIZE,
10859 AT_vms_delta2 (a), AT_vms_delta1 (a),
10860 "%s", name);
10861 #endif
10862 break;
10863
10864 case dw_val_class_lbl_id:
10865 output_attr_index_or_value (a);
10866 break;
10867
10868 case dw_val_class_lineptr:
10869 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10870 debug_line_section, "%s", name);
10871 break;
10872
10873 case dw_val_class_macptr:
10874 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10875 debug_macinfo_section, "%s", name);
10876 break;
10877
10878 case dw_val_class_loclistsptr:
10879 dw2_asm_output_offset (DWARF_OFFSET_SIZE, AT_lbl (a),
10880 debug_loc_section, "%s", name);
10881 break;
10882
10883 case dw_val_class_str:
10884 if (a->dw_attr_val.v.val_str->form == DW_FORM_strp)
10885 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10886 a->dw_attr_val.v.val_str->label,
10887 debug_str_section,
10888 "%s: \"%s\"", name, AT_string (a));
10889 else if (a->dw_attr_val.v.val_str->form == DW_FORM_line_strp)
10890 dw2_asm_output_offset (DWARF_OFFSET_SIZE,
10891 a->dw_attr_val.v.val_str->label,
10892 debug_line_str_section,
10893 "%s: \"%s\"", name, AT_string (a));
10894 else if (a->dw_attr_val.v.val_str->form == dwarf_FORM (DW_FORM_strx))
10895 dw2_asm_output_data_uleb128 (AT_index (a),
10896 "%s: \"%s\"", name, AT_string (a));
10897 else
10898 dw2_asm_output_nstring (AT_string (a), -1, "%s", name);
10899 break;
10900
10901 case dw_val_class_file:
10902 {
10903 int f = maybe_emit_file (a->dw_attr_val.v.val_file);
10904
10905 dw2_asm_output_data (constant_size (f), f, "%s (%s)", name,
10906 a->dw_attr_val.v.val_file->filename);
10907 break;
10908 }
10909
10910 case dw_val_class_file_implicit:
10911 if (flag_debug_asm)
10912 fprintf (asm_out_file, "\t\t\t%s %s (%d, %s)\n",
10913 ASM_COMMENT_START, name,
10914 maybe_emit_file (a->dw_attr_val.v.val_file),
10915 a->dw_attr_val.v.val_file->filename);
10916 break;
10917
10918 case dw_val_class_data8:
10919 {
10920 int i;
10921
10922 for (i = 0; i < 8; i++)
10923 dw2_asm_output_data (1, a->dw_attr_val.v.val_data8[i],
10924 i == 0 ? "%s" : NULL, name);
10925 break;
10926 }
10927
10928 case dw_val_class_high_pc:
10929 dw2_asm_output_delta (DWARF2_ADDR_SIZE, AT_lbl (a),
10930 get_AT_low_pc (die), "DW_AT_high_pc");
10931 break;
10932
10933 case dw_val_class_discr_value:
10934 output_discr_value (&a->dw_attr_val.v.val_discr_value, name);
10935 break;
10936
10937 case dw_val_class_discr_list:
10938 {
10939 dw_discr_list_ref list = AT_discr_list (a);
10940 const int size = size_of_discr_list (list);
10941
10942 /* This is a block, so output its length first. */
10943 dw2_asm_output_data (constant_size (size), size,
10944 "%s: block size", name);
10945
10946 for (; list != NULL; list = list->dw_discr_next)
10947 {
10948 /* One byte for the discriminant value descriptor, and then as
10949 many LEB128 numbers as required. */
10950 if (list->dw_discr_range)
10951 dw2_asm_output_data (1, DW_DSC_range,
10952 "%s: DW_DSC_range", name);
10953 else
10954 dw2_asm_output_data (1, DW_DSC_label,
10955 "%s: DW_DSC_label", name);
10956
10957 output_discr_value (&list->dw_discr_lower_bound, name);
10958 if (list->dw_discr_range)
10959 output_discr_value (&list->dw_discr_upper_bound, name);
10960 }
10961 break;
10962 }
10963
10964 default:
10965 gcc_unreachable ();
10966 }
10967 }
10968
10969 FOR_EACH_CHILD (die, c, output_die (c));
10970
10971 /* Add null byte to terminate sibling list. */
10972 if (die->die_child != NULL)
10973 dw2_asm_output_data (1, 0, "end of children of DIE %#lx",
10974 (unsigned long) die->die_offset);
10975 }
10976
10977 /* Output the dwarf version number. */
10978
10979 static void
10980 output_dwarf_version ()
10981 {
10982 /* ??? For now, if -gdwarf-6 is specified, we output version 5 with
10983 views in loclist. That will change eventually. */
10984 if (dwarf_version == 6)
10985 {
10986 static bool once;
10987 if (!once)
10988 {
10989 warning (0, "%<-gdwarf-6%> is output as version 5 with "
10990 "incompatibilities");
10991 once = true;
10992 }
10993 dw2_asm_output_data (2, 5, "DWARF version number");
10994 }
10995 else
10996 dw2_asm_output_data (2, dwarf_version, "DWARF version number");
10997 }
10998
10999 /* Output the compilation unit that appears at the beginning of the
11000 .debug_info section, and precedes the DIE descriptions. */
11001
11002 static void
11003 output_compilation_unit_header (enum dwarf_unit_type ut)
11004 {
11005 if (!XCOFF_DEBUGGING_INFO)
11006 {
11007 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11008 dw2_asm_output_data (4, 0xffffffff,
11009 "Initial length escape value indicating 64-bit DWARF extension");
11010 dw2_asm_output_data (DWARF_OFFSET_SIZE,
11011 next_die_offset - DWARF_INITIAL_LENGTH_SIZE,
11012 "Length of Compilation Unit Info");
11013 }
11014
11015 output_dwarf_version ();
11016 if (dwarf_version >= 5)
11017 {
11018 const char *name;
11019 switch (ut)
11020 {
11021 case DW_UT_compile: name = "DW_UT_compile"; break;
11022 case DW_UT_type: name = "DW_UT_type"; break;
11023 case DW_UT_split_compile: name = "DW_UT_split_compile"; break;
11024 case DW_UT_split_type: name = "DW_UT_split_type"; break;
11025 default: gcc_unreachable ();
11026 }
11027 dw2_asm_output_data (1, ut, "%s", name);
11028 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11029 }
11030 dw2_asm_output_offset (DWARF_OFFSET_SIZE, abbrev_section_label,
11031 debug_abbrev_section,
11032 "Offset Into Abbrev. Section");
11033 if (dwarf_version < 5)
11034 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11035 }
11036
11037 /* Output the compilation unit DIE and its children. */
11038
11039 static void
11040 output_comp_unit (dw_die_ref die, int output_if_empty,
11041 const unsigned char *dwo_id)
11042 {
11043 const char *secname, *oldsym;
11044 char *tmp;
11045
11046 /* Unless we are outputting main CU, we may throw away empty ones. */
11047 if (!output_if_empty && die->die_child == NULL)
11048 return;
11049
11050 /* Even if there are no children of this DIE, we must output the information
11051 about the compilation unit. Otherwise, on an empty translation unit, we
11052 will generate a present, but empty, .debug_info section. IRIX 6.5 `nm'
11053 will then complain when examining the file. First mark all the DIEs in
11054 this CU so we know which get local refs. */
11055 mark_dies (die);
11056
11057 external_ref_hash_type *extern_map = optimize_external_refs (die);
11058
11059 /* For now, optimize only the main CU, in order to optimize the rest
11060 we'd need to see all of them earlier. Leave the rest for post-linking
11061 tools like DWZ. */
11062 if (die == comp_unit_die ())
11063 abbrev_opt_start = vec_safe_length (abbrev_die_table);
11064
11065 build_abbrev_table (die, extern_map);
11066
11067 optimize_abbrev_table ();
11068
11069 delete extern_map;
11070
11071 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11072 next_die_offset = (dwo_id
11073 ? DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11074 : DWARF_COMPILE_UNIT_HEADER_SIZE);
11075 calc_die_sizes (die);
11076
11077 oldsym = die->die_id.die_symbol;
11078 if (oldsym && die->comdat_type_p)
11079 {
11080 tmp = XALLOCAVEC (char, strlen (oldsym) + 24);
11081
11082 sprintf (tmp, ".gnu.linkonce.wi.%s", oldsym);
11083 secname = tmp;
11084 die->die_id.die_symbol = NULL;
11085 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11086 }
11087 else
11088 {
11089 switch_to_section (debug_info_section);
11090 ASM_OUTPUT_LABEL (asm_out_file, debug_info_section_label);
11091 info_section_emitted = true;
11092 }
11093
11094 /* For LTO cross unit DIE refs we want a symbol on the start of the
11095 debuginfo section, not on the CU DIE. */
11096 if ((flag_generate_lto || flag_generate_offload) && oldsym)
11097 {
11098 /* ??? No way to get visibility assembled without a decl. */
11099 tree decl = build_decl (UNKNOWN_LOCATION, VAR_DECL,
11100 get_identifier (oldsym), char_type_node);
11101 TREE_PUBLIC (decl) = true;
11102 TREE_STATIC (decl) = true;
11103 DECL_ARTIFICIAL (decl) = true;
11104 DECL_VISIBILITY (decl) = VISIBILITY_HIDDEN;
11105 DECL_VISIBILITY_SPECIFIED (decl) = true;
11106 targetm.asm_out.assemble_visibility (decl, VISIBILITY_HIDDEN);
11107 #ifdef ASM_WEAKEN_LABEL
11108 /* We prefer a .weak because that handles duplicates from duplicate
11109 archive members in a graceful way. */
11110 ASM_WEAKEN_LABEL (asm_out_file, oldsym);
11111 #else
11112 targetm.asm_out.globalize_label (asm_out_file, oldsym);
11113 #endif
11114 ASM_OUTPUT_LABEL (asm_out_file, oldsym);
11115 }
11116
11117 /* Output debugging information. */
11118 output_compilation_unit_header (dwo_id
11119 ? DW_UT_split_compile : DW_UT_compile);
11120 if (dwarf_version >= 5)
11121 {
11122 if (dwo_id != NULL)
11123 for (int i = 0; i < 8; i++)
11124 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11125 }
11126 output_die (die);
11127
11128 /* Leave the marks on the main CU, so we can check them in
11129 output_pubnames. */
11130 if (oldsym)
11131 {
11132 unmark_dies (die);
11133 die->die_id.die_symbol = oldsym;
11134 }
11135 }
11136
11137 /* Whether to generate the DWARF accelerator tables in .debug_pubnames
11138 and .debug_pubtypes. This is configured per-target, but can be
11139 overridden by the -gpubnames or -gno-pubnames options. */
11140
11141 static inline bool
11142 want_pubnames (void)
11143 {
11144 if (debug_info_level <= DINFO_LEVEL_TERSE
11145 /* Names and types go to the early debug part only. */
11146 || in_lto_p)
11147 return false;
11148 if (debug_generate_pub_sections != -1)
11149 return debug_generate_pub_sections;
11150 return targetm.want_debug_pub_sections;
11151 }
11152
11153 /* Add the DW_AT_GNU_pubnames and DW_AT_GNU_pubtypes attributes. */
11154
11155 static void
11156 add_AT_pubnames (dw_die_ref die)
11157 {
11158 if (want_pubnames ())
11159 add_AT_flag (die, DW_AT_GNU_pubnames, 1);
11160 }
11161
11162 /* Add a string attribute value to a skeleton DIE. */
11163
11164 static inline void
11165 add_skeleton_AT_string (dw_die_ref die, enum dwarf_attribute attr_kind,
11166 const char *str)
11167 {
11168 dw_attr_node attr;
11169 struct indirect_string_node *node;
11170
11171 if (! skeleton_debug_str_hash)
11172 skeleton_debug_str_hash
11173 = hash_table<indirect_string_hasher>::create_ggc (10);
11174
11175 node = find_AT_string_in_table (str, skeleton_debug_str_hash);
11176 find_string_form (node);
11177 if (node->form == dwarf_FORM (DW_FORM_strx))
11178 node->form = DW_FORM_strp;
11179
11180 attr.dw_attr = attr_kind;
11181 attr.dw_attr_val.val_class = dw_val_class_str;
11182 attr.dw_attr_val.val_entry = NULL;
11183 attr.dw_attr_val.v.val_str = node;
11184 add_dwarf_attr (die, &attr);
11185 }
11186
11187 /* Helper function to generate top-level dies for skeleton debug_info and
11188 debug_types. */
11189
11190 static void
11191 add_top_level_skeleton_die_attrs (dw_die_ref die)
11192 {
11193 const char *dwo_file_name = concat (aux_base_name, ".dwo", NULL);
11194 const char *comp_dir = comp_dir_string ();
11195
11196 add_skeleton_AT_string (die, dwarf_AT (DW_AT_dwo_name), dwo_file_name);
11197 if (comp_dir != NULL)
11198 add_skeleton_AT_string (die, DW_AT_comp_dir, comp_dir);
11199 add_AT_pubnames (die);
11200 if (addr_index_table != NULL && addr_index_table->size () > 0)
11201 add_AT_lineptr (die, dwarf_AT (DW_AT_addr_base), debug_addr_section_label);
11202 }
11203
11204 /* Output skeleton debug sections that point to the dwo file. */
11205
11206 static void
11207 output_skeleton_debug_sections (dw_die_ref comp_unit,
11208 const unsigned char *dwo_id)
11209 {
11210 /* These attributes will be found in the full debug_info section. */
11211 remove_AT (comp_unit, DW_AT_producer);
11212 remove_AT (comp_unit, DW_AT_language);
11213
11214 switch_to_section (debug_skeleton_info_section);
11215 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_info_section_label);
11216
11217 /* Produce the skeleton compilation-unit header. This one differs enough from
11218 a normal CU header that it's better not to call output_compilation_unit
11219 header. */
11220 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11221 dw2_asm_output_data (4, 0xffffffff,
11222 "Initial length escape value indicating 64-bit "
11223 "DWARF extension");
11224
11225 dw2_asm_output_data (DWARF_OFFSET_SIZE,
11226 DWARF_COMPILE_UNIT_SKELETON_HEADER_SIZE
11227 - DWARF_INITIAL_LENGTH_SIZE
11228 + size_of_die (comp_unit),
11229 "Length of Compilation Unit Info");
11230 output_dwarf_version ();
11231 if (dwarf_version >= 5)
11232 {
11233 dw2_asm_output_data (1, DW_UT_skeleton, "DW_UT_skeleton");
11234 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11235 }
11236 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_abbrev_section_label,
11237 debug_skeleton_abbrev_section,
11238 "Offset Into Abbrev. Section");
11239 if (dwarf_version < 5)
11240 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Pointer Size (in bytes)");
11241 else
11242 for (int i = 0; i < 8; i++)
11243 dw2_asm_output_data (1, dwo_id[i], i == 0 ? "DWO id" : NULL);
11244
11245 comp_unit->die_abbrev = SKELETON_COMP_DIE_ABBREV;
11246 output_die (comp_unit);
11247
11248 /* Build the skeleton debug_abbrev section. */
11249 switch_to_section (debug_skeleton_abbrev_section);
11250 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_abbrev_section_label);
11251
11252 output_die_abbrevs (SKELETON_COMP_DIE_ABBREV, comp_unit);
11253
11254 dw2_asm_output_data (1, 0, "end of skeleton .debug_abbrev");
11255 }
11256
11257 /* Output a comdat type unit DIE and its children. */
11258
11259 static void
11260 output_comdat_type_unit (comdat_type_node *node,
11261 bool early_lto_debug ATTRIBUTE_UNUSED)
11262 {
11263 const char *secname;
11264 char *tmp;
11265 int i;
11266 #if defined (OBJECT_FORMAT_ELF)
11267 tree comdat_key;
11268 #endif
11269
11270 /* First mark all the DIEs in this CU so we know which get local refs. */
11271 mark_dies (node->root_die);
11272
11273 external_ref_hash_type *extern_map = optimize_external_refs (node->root_die);
11274
11275 build_abbrev_table (node->root_die, extern_map);
11276
11277 delete extern_map;
11278 extern_map = NULL;
11279
11280 /* Initialize the beginning DIE offset - and calculate sizes/offsets. */
11281 next_die_offset = DWARF_COMDAT_TYPE_UNIT_HEADER_SIZE;
11282 calc_die_sizes (node->root_die);
11283
11284 #if defined (OBJECT_FORMAT_ELF)
11285 if (dwarf_version >= 5)
11286 {
11287 if (!dwarf_split_debug_info)
11288 secname = early_lto_debug ? DEBUG_LTO_INFO_SECTION : DEBUG_INFO_SECTION;
11289 else
11290 secname = (early_lto_debug
11291 ? DEBUG_LTO_DWO_INFO_SECTION : DEBUG_DWO_INFO_SECTION);
11292 }
11293 else if (!dwarf_split_debug_info)
11294 secname = early_lto_debug ? ".gnu.debuglto_.debug_types" : ".debug_types";
11295 else
11296 secname = (early_lto_debug
11297 ? ".gnu.debuglto_.debug_types.dwo" : ".debug_types.dwo");
11298
11299 tmp = XALLOCAVEC (char, 4 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11300 sprintf (tmp, dwarf_version >= 5 ? "wi." : "wt.");
11301 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11302 sprintf (tmp + 3 + i * 2, "%02x", node->signature[i] & 0xff);
11303 comdat_key = get_identifier (tmp);
11304 targetm.asm_out.named_section (secname,
11305 SECTION_DEBUG | SECTION_LINKONCE,
11306 comdat_key);
11307 #else
11308 tmp = XALLOCAVEC (char, 18 + DWARF_TYPE_SIGNATURE_SIZE * 2);
11309 sprintf (tmp, (dwarf_version >= 5
11310 ? ".gnu.linkonce.wi." : ".gnu.linkonce.wt."));
11311 for (i = 0; i < DWARF_TYPE_SIGNATURE_SIZE; i++)
11312 sprintf (tmp + 17 + i * 2, "%02x", node->signature[i] & 0xff);
11313 secname = tmp;
11314 switch_to_section (get_section (secname, SECTION_DEBUG, NULL));
11315 #endif
11316
11317 /* Output debugging information. */
11318 output_compilation_unit_header (dwarf_split_debug_info
11319 ? DW_UT_split_type : DW_UT_type);
11320 output_signature (node->signature, "Type Signature");
11321 dw2_asm_output_data (DWARF_OFFSET_SIZE, node->type_die->die_offset,
11322 "Offset to Type DIE");
11323 output_die (node->root_die);
11324
11325 unmark_dies (node->root_die);
11326 }
11327
11328 /* Return the DWARF2/3 pubname associated with a decl. */
11329
11330 static const char *
11331 dwarf2_name (tree decl, int scope)
11332 {
11333 if (DECL_NAMELESS (decl))
11334 return NULL;
11335 return lang_hooks.dwarf_name (decl, scope ? 1 : 0);
11336 }
11337
11338 /* Add a new entry to .debug_pubnames if appropriate. */
11339
11340 static void
11341 add_pubname_string (const char *str, dw_die_ref die)
11342 {
11343 pubname_entry e;
11344
11345 e.die = die;
11346 e.name = xstrdup (str);
11347 vec_safe_push (pubname_table, e);
11348 }
11349
11350 static void
11351 add_pubname (tree decl, dw_die_ref die)
11352 {
11353 if (!want_pubnames ())
11354 return;
11355
11356 /* Don't add items to the table when we expect that the consumer will have
11357 just read the enclosing die. For example, if the consumer is looking at a
11358 class_member, it will either be inside the class already, or will have just
11359 looked up the class to find the member. Either way, searching the class is
11360 faster than searching the index. */
11361 if ((TREE_PUBLIC (decl) && !class_scope_p (die->die_parent))
11362 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11363 {
11364 const char *name = dwarf2_name (decl, 1);
11365
11366 if (name)
11367 add_pubname_string (name, die);
11368 }
11369 }
11370
11371 /* Add an enumerator to the pubnames section. */
11372
11373 static void
11374 add_enumerator_pubname (const char *scope_name, dw_die_ref die)
11375 {
11376 pubname_entry e;
11377
11378 gcc_assert (scope_name);
11379 e.name = concat (scope_name, get_AT_string (die, DW_AT_name), NULL);
11380 e.die = die;
11381 vec_safe_push (pubname_table, e);
11382 }
11383
11384 /* Add a new entry to .debug_pubtypes if appropriate. */
11385
11386 static void
11387 add_pubtype (tree decl, dw_die_ref die)
11388 {
11389 pubname_entry e;
11390
11391 if (!want_pubnames ())
11392 return;
11393
11394 if ((TREE_PUBLIC (decl)
11395 || is_cu_die (die->die_parent) || is_namespace_die (die->die_parent))
11396 && (die->die_tag == DW_TAG_typedef || COMPLETE_TYPE_P (decl)))
11397 {
11398 tree scope = NULL;
11399 const char *scope_name = "";
11400 const char *sep = is_cxx () ? "::" : ".";
11401 const char *name;
11402
11403 scope = TYPE_P (decl) ? TYPE_CONTEXT (decl) : NULL;
11404 if (scope && TREE_CODE (scope) == NAMESPACE_DECL)
11405 {
11406 scope_name = lang_hooks.dwarf_name (scope, 1);
11407 if (scope_name != NULL && scope_name[0] != '\0')
11408 scope_name = concat (scope_name, sep, NULL);
11409 else
11410 scope_name = "";
11411 }
11412
11413 if (TYPE_P (decl))
11414 name = type_tag (decl);
11415 else
11416 name = lang_hooks.dwarf_name (decl, 1);
11417
11418 /* If we don't have a name for the type, there's no point in adding
11419 it to the table. */
11420 if (name != NULL && name[0] != '\0')
11421 {
11422 e.die = die;
11423 e.name = concat (scope_name, name, NULL);
11424 vec_safe_push (pubtype_table, e);
11425 }
11426
11427 /* Although it might be more consistent to add the pubinfo for the
11428 enumerators as their dies are created, they should only be added if the
11429 enum type meets the criteria above. So rather than re-check the parent
11430 enum type whenever an enumerator die is created, just output them all
11431 here. This isn't protected by the name conditional because anonymous
11432 enums don't have names. */
11433 if (die->die_tag == DW_TAG_enumeration_type)
11434 {
11435 dw_die_ref c;
11436
11437 FOR_EACH_CHILD (die, c, add_enumerator_pubname (scope_name, c));
11438 }
11439 }
11440 }
11441
11442 /* Output a single entry in the pubnames table. */
11443
11444 static void
11445 output_pubname (dw_offset die_offset, pubname_entry *entry)
11446 {
11447 dw_die_ref die = entry->die;
11448 int is_static = get_AT_flag (die, DW_AT_external) ? 0 : 1;
11449
11450 dw2_asm_output_data (DWARF_OFFSET_SIZE, die_offset, "DIE offset");
11451
11452 if (debug_generate_pub_sections == 2)
11453 {
11454 /* This logic follows gdb's method for determining the value of the flag
11455 byte. */
11456 uint32_t flags = GDB_INDEX_SYMBOL_KIND_NONE;
11457 switch (die->die_tag)
11458 {
11459 case DW_TAG_typedef:
11460 case DW_TAG_base_type:
11461 case DW_TAG_subrange_type:
11462 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11463 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11464 break;
11465 case DW_TAG_enumerator:
11466 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11467 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11468 if (!is_cxx ())
11469 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11470 break;
11471 case DW_TAG_subprogram:
11472 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11473 GDB_INDEX_SYMBOL_KIND_FUNCTION);
11474 if (!is_ada ())
11475 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11476 break;
11477 case DW_TAG_constant:
11478 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11479 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11480 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11481 break;
11482 case DW_TAG_variable:
11483 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags,
11484 GDB_INDEX_SYMBOL_KIND_VARIABLE);
11485 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, is_static);
11486 break;
11487 case DW_TAG_namespace:
11488 case DW_TAG_imported_declaration:
11489 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11490 break;
11491 case DW_TAG_class_type:
11492 case DW_TAG_interface_type:
11493 case DW_TAG_structure_type:
11494 case DW_TAG_union_type:
11495 case DW_TAG_enumeration_type:
11496 GDB_INDEX_SYMBOL_KIND_SET_VALUE(flags, GDB_INDEX_SYMBOL_KIND_TYPE);
11497 if (!is_cxx ())
11498 GDB_INDEX_SYMBOL_STATIC_SET_VALUE(flags, 1);
11499 break;
11500 default:
11501 /* An unusual tag. Leave the flag-byte empty. */
11502 break;
11503 }
11504 dw2_asm_output_data (1, flags >> GDB_INDEX_CU_BITSIZE,
11505 "GDB-index flags");
11506 }
11507
11508 dw2_asm_output_nstring (entry->name, -1, "external name");
11509 }
11510
11511
11512 /* Output the public names table used to speed up access to externally
11513 visible names; or the public types table used to find type definitions. */
11514
11515 static void
11516 output_pubnames (vec<pubname_entry, va_gc> *names)
11517 {
11518 unsigned i;
11519 unsigned long pubnames_length = size_of_pubnames (names);
11520 pubname_entry *pub;
11521
11522 if (!XCOFF_DEBUGGING_INFO)
11523 {
11524 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11525 dw2_asm_output_data (4, 0xffffffff,
11526 "Initial length escape value indicating 64-bit DWARF extension");
11527 dw2_asm_output_data (DWARF_OFFSET_SIZE, pubnames_length,
11528 "Pub Info Length");
11529 }
11530
11531 /* Version number for pubnames/pubtypes is independent of dwarf version. */
11532 dw2_asm_output_data (2, 2, "DWARF pubnames/pubtypes version");
11533
11534 if (dwarf_split_debug_info)
11535 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11536 debug_skeleton_info_section,
11537 "Offset of Compilation Unit Info");
11538 else
11539 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11540 debug_info_section,
11541 "Offset of Compilation Unit Info");
11542 dw2_asm_output_data (DWARF_OFFSET_SIZE, next_die_offset,
11543 "Compilation Unit Length");
11544
11545 FOR_EACH_VEC_ELT (*names, i, pub)
11546 {
11547 if (include_pubname_in_output (names, pub))
11548 {
11549 dw_offset die_offset = pub->die->die_offset;
11550
11551 /* We shouldn't see pubnames for DIEs outside of the main CU. */
11552 if (names == pubname_table && pub->die->die_tag != DW_TAG_enumerator)
11553 gcc_assert (pub->die->die_mark);
11554
11555 /* If we're putting types in their own .debug_types sections,
11556 the .debug_pubtypes table will still point to the compile
11557 unit (not the type unit), so we want to use the offset of
11558 the skeleton DIE (if there is one). */
11559 if (pub->die->comdat_type_p && names == pubtype_table)
11560 {
11561 comdat_type_node *type_node = pub->die->die_id.die_type_node;
11562
11563 if (type_node != NULL)
11564 die_offset = (type_node->skeleton_die != NULL
11565 ? type_node->skeleton_die->die_offset
11566 : comp_unit_die ()->die_offset);
11567 }
11568
11569 output_pubname (die_offset, pub);
11570 }
11571 }
11572
11573 dw2_asm_output_data (DWARF_OFFSET_SIZE, 0, NULL);
11574 }
11575
11576 /* Output public names and types tables if necessary. */
11577
11578 static void
11579 output_pubtables (void)
11580 {
11581 if (!want_pubnames () || !info_section_emitted)
11582 return;
11583
11584 switch_to_section (debug_pubnames_section);
11585 output_pubnames (pubname_table);
11586 /* ??? Only defined by DWARF3, but emitted by Darwin for DWARF2.
11587 It shouldn't hurt to emit it always, since pure DWARF2 consumers
11588 simply won't look for the section. */
11589 switch_to_section (debug_pubtypes_section);
11590 output_pubnames (pubtype_table);
11591 }
11592
11593
11594 /* Output the information that goes into the .debug_aranges table.
11595 Namely, define the beginning and ending address range of the
11596 text section generated for this compilation unit. */
11597
11598 static void
11599 output_aranges (void)
11600 {
11601 unsigned i;
11602 unsigned long aranges_length = size_of_aranges ();
11603
11604 if (!XCOFF_DEBUGGING_INFO)
11605 {
11606 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11607 dw2_asm_output_data (4, 0xffffffff,
11608 "Initial length escape value indicating 64-bit DWARF extension");
11609 dw2_asm_output_data (DWARF_OFFSET_SIZE, aranges_length,
11610 "Length of Address Ranges Info");
11611 }
11612
11613 /* Version number for aranges is still 2, even up to DWARF5. */
11614 dw2_asm_output_data (2, 2, "DWARF aranges version");
11615 if (dwarf_split_debug_info)
11616 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_skeleton_info_section_label,
11617 debug_skeleton_info_section,
11618 "Offset of Compilation Unit Info");
11619 else
11620 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_info_section_label,
11621 debug_info_section,
11622 "Offset of Compilation Unit Info");
11623 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
11624 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
11625
11626 /* We need to align to twice the pointer size here. */
11627 if (DWARF_ARANGES_PAD_SIZE)
11628 {
11629 /* Pad using a 2 byte words so that padding is correct for any
11630 pointer size. */
11631 dw2_asm_output_data (2, 0, "Pad to %d byte boundary",
11632 2 * DWARF2_ADDR_SIZE);
11633 for (i = 2; i < (unsigned) DWARF_ARANGES_PAD_SIZE; i += 2)
11634 dw2_asm_output_data (2, 0, NULL);
11635 }
11636
11637 /* It is necessary not to output these entries if the sections were
11638 not used; if the sections were not used, the length will be 0 and
11639 the address may end up as 0 if the section is discarded by ld
11640 --gc-sections, leaving an invalid (0, 0) entry that can be
11641 confused with the terminator. */
11642 if (text_section_used)
11643 {
11644 dw2_asm_output_addr (DWARF2_ADDR_SIZE, text_section_label, "Address");
11645 dw2_asm_output_delta (DWARF2_ADDR_SIZE, text_end_label,
11646 text_section_label, "Length");
11647 }
11648 if (cold_text_section_used)
11649 {
11650 dw2_asm_output_addr (DWARF2_ADDR_SIZE, cold_text_section_label,
11651 "Address");
11652 dw2_asm_output_delta (DWARF2_ADDR_SIZE, cold_end_label,
11653 cold_text_section_label, "Length");
11654 }
11655
11656 if (have_multiple_function_sections)
11657 {
11658 unsigned fde_idx;
11659 dw_fde_ref fde;
11660
11661 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
11662 {
11663 if (DECL_IGNORED_P (fde->decl))
11664 continue;
11665 if (!fde->in_std_section)
11666 {
11667 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_begin,
11668 "Address");
11669 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_end,
11670 fde->dw_fde_begin, "Length");
11671 }
11672 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
11673 {
11674 dw2_asm_output_addr (DWARF2_ADDR_SIZE, fde->dw_fde_second_begin,
11675 "Address");
11676 dw2_asm_output_delta (DWARF2_ADDR_SIZE, fde->dw_fde_second_end,
11677 fde->dw_fde_second_begin, "Length");
11678 }
11679 }
11680 }
11681
11682 /* Output the terminator words. */
11683 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11684 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11685 }
11686
11687 /* Add a new entry to .debug_ranges. Return its index into
11688 ranges_table vector. */
11689
11690 static unsigned int
11691 add_ranges_num (int num, bool maybe_new_sec)
11692 {
11693 dw_ranges r = { NULL, num, 0, maybe_new_sec };
11694 vec_safe_push (ranges_table, r);
11695 return vec_safe_length (ranges_table) - 1;
11696 }
11697
11698 /* Add a new entry to .debug_ranges corresponding to a block, or a
11699 range terminator if BLOCK is NULL. MAYBE_NEW_SEC is true if
11700 this entry might be in a different section from previous range. */
11701
11702 static unsigned int
11703 add_ranges (const_tree block, bool maybe_new_sec)
11704 {
11705 return add_ranges_num (block ? BLOCK_NUMBER (block) : 0, maybe_new_sec);
11706 }
11707
11708 /* Note that (*rnglist_table)[offset] is either a head of a rnglist
11709 chain, or middle entry of a chain that will be directly referred to. */
11710
11711 static void
11712 note_rnglist_head (unsigned int offset)
11713 {
11714 if (dwarf_version < 5 || (*ranges_table)[offset].label)
11715 return;
11716 (*ranges_table)[offset].label = gen_internal_sym ("LLRL");
11717 }
11718
11719 /* Add a new entry to .debug_ranges corresponding to a pair of labels.
11720 When using dwarf_split_debug_info, address attributes in dies destined
11721 for the final executable should be direct references--setting the
11722 parameter force_direct ensures this behavior. */
11723
11724 static void
11725 add_ranges_by_labels (dw_die_ref die, const char *begin, const char *end,
11726 bool *added, bool force_direct)
11727 {
11728 unsigned int in_use = vec_safe_length (ranges_by_label);
11729 unsigned int offset;
11730 dw_ranges_by_label rbl = { begin, end };
11731 vec_safe_push (ranges_by_label, rbl);
11732 offset = add_ranges_num (-(int)in_use - 1, true);
11733 if (!*added)
11734 {
11735 add_AT_range_list (die, DW_AT_ranges, offset, force_direct);
11736 *added = true;
11737 note_rnglist_head (offset);
11738 }
11739 }
11740
11741 /* Emit .debug_ranges section. */
11742
11743 static void
11744 output_ranges (void)
11745 {
11746 unsigned i;
11747 static const char *const start_fmt = "Offset %#x";
11748 const char *fmt = start_fmt;
11749 dw_ranges *r;
11750
11751 switch_to_section (debug_ranges_section);
11752 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11753 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11754 {
11755 int block_num = r->num;
11756
11757 if (block_num > 0)
11758 {
11759 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11760 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11761
11762 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11763 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11764
11765 /* If all code is in the text section, then the compilation
11766 unit base address defaults to DW_AT_low_pc, which is the
11767 base of the text section. */
11768 if (!have_multiple_function_sections)
11769 {
11770 dw2_asm_output_delta (DWARF2_ADDR_SIZE, blabel,
11771 text_section_label,
11772 fmt, i * 2 * DWARF2_ADDR_SIZE);
11773 dw2_asm_output_delta (DWARF2_ADDR_SIZE, elabel,
11774 text_section_label, NULL);
11775 }
11776
11777 /* Otherwise, the compilation unit base address is zero,
11778 which allows us to use absolute addresses, and not worry
11779 about whether the target supports cross-section
11780 arithmetic. */
11781 else
11782 {
11783 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11784 fmt, i * 2 * DWARF2_ADDR_SIZE);
11785 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel, NULL);
11786 }
11787
11788 fmt = NULL;
11789 }
11790
11791 /* Negative block_num stands for an index into ranges_by_label. */
11792 else if (block_num < 0)
11793 {
11794 int lab_idx = - block_num - 1;
11795
11796 if (!have_multiple_function_sections)
11797 {
11798 gcc_unreachable ();
11799 #if 0
11800 /* If we ever use add_ranges_by_labels () for a single
11801 function section, all we have to do is to take out
11802 the #if 0 above. */
11803 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11804 (*ranges_by_label)[lab_idx].begin,
11805 text_section_label,
11806 fmt, i * 2 * DWARF2_ADDR_SIZE);
11807 dw2_asm_output_delta (DWARF2_ADDR_SIZE,
11808 (*ranges_by_label)[lab_idx].end,
11809 text_section_label, NULL);
11810 #endif
11811 }
11812 else
11813 {
11814 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11815 (*ranges_by_label)[lab_idx].begin,
11816 fmt, i * 2 * DWARF2_ADDR_SIZE);
11817 dw2_asm_output_addr (DWARF2_ADDR_SIZE,
11818 (*ranges_by_label)[lab_idx].end,
11819 NULL);
11820 }
11821 }
11822 else
11823 {
11824 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11825 dw2_asm_output_data (DWARF2_ADDR_SIZE, 0, NULL);
11826 fmt = start_fmt;
11827 }
11828 }
11829 }
11830
11831 /* Non-zero if .debug_line_str should be used for .debug_line section
11832 strings or strings that are likely shareable with those. */
11833 #define DWARF5_USE_DEBUG_LINE_STR \
11834 (!DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET \
11835 && (DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) != 0 \
11836 /* FIXME: there is no .debug_line_str.dwo section, \
11837 for -gsplit-dwarf we should use DW_FORM_strx instead. */ \
11838 && !dwarf_split_debug_info)
11839
11840 /* Assign .debug_rnglists indexes. */
11841
11842 static void
11843 index_rnglists (void)
11844 {
11845 unsigned i;
11846 dw_ranges *r;
11847
11848 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11849 if (r->label)
11850 r->idx = rnglist_idx++;
11851 }
11852
11853 /* Emit .debug_rnglists section. */
11854
11855 static void
11856 output_rnglists (unsigned generation)
11857 {
11858 unsigned i;
11859 dw_ranges *r;
11860 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
11861 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
11862 char basebuf[MAX_ARTIFICIAL_LABEL_BYTES];
11863
11864 switch_to_section (debug_ranges_section);
11865 ASM_OUTPUT_LABEL (asm_out_file, ranges_section_label);
11866 /* There are up to 4 unique ranges labels per generation.
11867 See also init_sections_and_labels. */
11868 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_RANGES_SECTION_LABEL,
11869 2 + generation * 4);
11870 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_RANGES_SECTION_LABEL,
11871 3 + generation * 4);
11872 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
11873 dw2_asm_output_data (4, 0xffffffff,
11874 "Initial length escape value indicating "
11875 "64-bit DWARF extension");
11876 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
11877 "Length of Range Lists");
11878 ASM_OUTPUT_LABEL (asm_out_file, l1);
11879 output_dwarf_version ();
11880 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
11881 dw2_asm_output_data (1, 0, "Segment Size");
11882 /* Emit the offset table only for -gsplit-dwarf. If we don't care
11883 about relocation sizes and primarily care about the size of .debug*
11884 sections in linked shared libraries and executables, then
11885 the offset table plus corresponding DW_FORM_rnglistx uleb128 indexes
11886 into it are usually larger than just DW_FORM_sec_offset offsets
11887 into the .debug_rnglists section. */
11888 dw2_asm_output_data (4, dwarf_split_debug_info ? rnglist_idx : 0,
11889 "Offset Entry Count");
11890 if (dwarf_split_debug_info)
11891 {
11892 ASM_OUTPUT_LABEL (asm_out_file, ranges_base_label);
11893 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11894 if (r->label)
11895 dw2_asm_output_delta (DWARF_OFFSET_SIZE, r->label,
11896 ranges_base_label, NULL);
11897 }
11898
11899 const char *lab = "";
11900 unsigned int len = vec_safe_length (ranges_table);
11901 const char *base = NULL;
11902 FOR_EACH_VEC_SAFE_ELT (ranges_table, i, r)
11903 {
11904 int block_num = r->num;
11905
11906 if (r->label)
11907 {
11908 ASM_OUTPUT_LABEL (asm_out_file, r->label);
11909 lab = r->label;
11910 }
11911 if (HAVE_AS_LEB128 && (r->label || r->maybe_new_sec))
11912 base = NULL;
11913 if (block_num > 0)
11914 {
11915 char blabel[MAX_ARTIFICIAL_LABEL_BYTES];
11916 char elabel[MAX_ARTIFICIAL_LABEL_BYTES];
11917
11918 ASM_GENERATE_INTERNAL_LABEL (blabel, BLOCK_BEGIN_LABEL, block_num);
11919 ASM_GENERATE_INTERNAL_LABEL (elabel, BLOCK_END_LABEL, block_num);
11920
11921 if (HAVE_AS_LEB128)
11922 {
11923 /* If all code is in the text section, then the compilation
11924 unit base address defaults to DW_AT_low_pc, which is the
11925 base of the text section. */
11926 if (!have_multiple_function_sections)
11927 {
11928 dw2_asm_output_data (1, DW_RLE_offset_pair,
11929 "DW_RLE_offset_pair (%s)", lab);
11930 dw2_asm_output_delta_uleb128 (blabel, text_section_label,
11931 "Range begin address (%s)", lab);
11932 dw2_asm_output_delta_uleb128 (elabel, text_section_label,
11933 "Range end address (%s)", lab);
11934 continue;
11935 }
11936 if (base == NULL)
11937 {
11938 dw_ranges *r2 = NULL;
11939 if (i < len - 1)
11940 r2 = &(*ranges_table)[i + 1];
11941 if (r2
11942 && r2->num != 0
11943 && r2->label == NULL
11944 && !r2->maybe_new_sec)
11945 {
11946 dw2_asm_output_data (1, DW_RLE_base_address,
11947 "DW_RLE_base_address (%s)", lab);
11948 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11949 "Base address (%s)", lab);
11950 strcpy (basebuf, blabel);
11951 base = basebuf;
11952 }
11953 }
11954 if (base)
11955 {
11956 dw2_asm_output_data (1, DW_RLE_offset_pair,
11957 "DW_RLE_offset_pair (%s)", lab);
11958 dw2_asm_output_delta_uleb128 (blabel, base,
11959 "Range begin address (%s)", lab);
11960 dw2_asm_output_delta_uleb128 (elabel, base,
11961 "Range end address (%s)", lab);
11962 continue;
11963 }
11964 dw2_asm_output_data (1, DW_RLE_start_length,
11965 "DW_RLE_start_length (%s)", lab);
11966 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11967 "Range begin address (%s)", lab);
11968 dw2_asm_output_delta_uleb128 (elabel, blabel,
11969 "Range length (%s)", lab);
11970 }
11971 else
11972 {
11973 dw2_asm_output_data (1, DW_RLE_start_end,
11974 "DW_RLE_start_end (%s)", lab);
11975 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11976 "Range begin address (%s)", lab);
11977 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
11978 "Range end address (%s)", lab);
11979 }
11980 }
11981
11982 /* Negative block_num stands for an index into ranges_by_label. */
11983 else if (block_num < 0)
11984 {
11985 int lab_idx = - block_num - 1;
11986 const char *blabel = (*ranges_by_label)[lab_idx].begin;
11987 const char *elabel = (*ranges_by_label)[lab_idx].end;
11988
11989 if (!have_multiple_function_sections)
11990 gcc_unreachable ();
11991 if (HAVE_AS_LEB128)
11992 {
11993 dw2_asm_output_data (1, DW_RLE_start_length,
11994 "DW_RLE_start_length (%s)", lab);
11995 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
11996 "Range begin address (%s)", lab);
11997 dw2_asm_output_delta_uleb128 (elabel, blabel,
11998 "Range length (%s)", lab);
11999 }
12000 else
12001 {
12002 dw2_asm_output_data (1, DW_RLE_start_end,
12003 "DW_RLE_start_end (%s)", lab);
12004 dw2_asm_output_addr (DWARF2_ADDR_SIZE, blabel,
12005 "Range begin address (%s)", lab);
12006 dw2_asm_output_addr (DWARF2_ADDR_SIZE, elabel,
12007 "Range end address (%s)", lab);
12008 }
12009 }
12010 else
12011 dw2_asm_output_data (1, DW_RLE_end_of_list,
12012 "DW_RLE_end_of_list (%s)", lab);
12013 }
12014 ASM_OUTPUT_LABEL (asm_out_file, l2);
12015 }
12016
12017 /* Data structure containing information about input files. */
12018 struct file_info
12019 {
12020 const char *path; /* Complete file name. */
12021 const char *fname; /* File name part. */
12022 int length; /* Length of entire string. */
12023 struct dwarf_file_data * file_idx; /* Index in input file table. */
12024 int dir_idx; /* Index in directory table. */
12025 };
12026
12027 /* Data structure containing information about directories with source
12028 files. */
12029 struct dir_info
12030 {
12031 const char *path; /* Path including directory name. */
12032 int length; /* Path length. */
12033 int prefix; /* Index of directory entry which is a prefix. */
12034 int count; /* Number of files in this directory. */
12035 int dir_idx; /* Index of directory used as base. */
12036 };
12037
12038 /* Callback function for file_info comparison. We sort by looking at
12039 the directories in the path. */
12040
12041 static int
12042 file_info_cmp (const void *p1, const void *p2)
12043 {
12044 const struct file_info *const s1 = (const struct file_info *) p1;
12045 const struct file_info *const s2 = (const struct file_info *) p2;
12046 const unsigned char *cp1;
12047 const unsigned char *cp2;
12048
12049 /* Take care of file names without directories. We need to make sure that
12050 we return consistent values to qsort since some will get confused if
12051 we return the same value when identical operands are passed in opposite
12052 orders. So if neither has a directory, return 0 and otherwise return
12053 1 or -1 depending on which one has the directory. We want the one with
12054 the directory to sort after the one without, so all no directory files
12055 are at the start (normally only the compilation unit file). */
12056 if ((s1->path == s1->fname || s2->path == s2->fname))
12057 return (s2->path == s2->fname) - (s1->path == s1->fname);
12058
12059 cp1 = (const unsigned char *) s1->path;
12060 cp2 = (const unsigned char *) s2->path;
12061
12062 while (1)
12063 {
12064 ++cp1;
12065 ++cp2;
12066 /* Reached the end of the first path? If so, handle like above,
12067 but now we want longer directory prefixes before shorter ones. */
12068 if ((cp1 == (const unsigned char *) s1->fname)
12069 || (cp2 == (const unsigned char *) s2->fname))
12070 return ((cp1 == (const unsigned char *) s1->fname)
12071 - (cp2 == (const unsigned char *) s2->fname));
12072
12073 /* Character of current path component the same? */
12074 else if (*cp1 != *cp2)
12075 return *cp1 - *cp2;
12076 }
12077 }
12078
12079 struct file_name_acquire_data
12080 {
12081 struct file_info *files;
12082 int used_files;
12083 int max_files;
12084 };
12085
12086 /* Traversal function for the hash table. */
12087
12088 int
12089 file_name_acquire (dwarf_file_data **slot, file_name_acquire_data *fnad)
12090 {
12091 struct dwarf_file_data *d = *slot;
12092 struct file_info *fi;
12093 const char *f;
12094
12095 gcc_assert (fnad->max_files >= d->emitted_number);
12096
12097 if (! d->emitted_number)
12098 return 1;
12099
12100 gcc_assert (fnad->max_files != fnad->used_files);
12101
12102 fi = fnad->files + fnad->used_files++;
12103
12104 /* Skip all leading "./". */
12105 f = d->filename;
12106 while (f[0] == '.' && IS_DIR_SEPARATOR (f[1]))
12107 f += 2;
12108
12109 /* Create a new array entry. */
12110 fi->path = f;
12111 fi->length = strlen (f);
12112 fi->file_idx = d;
12113
12114 /* Search for the file name part. */
12115 f = strrchr (f, DIR_SEPARATOR);
12116 #if defined (DIR_SEPARATOR_2)
12117 {
12118 char *g = strrchr (fi->path, DIR_SEPARATOR_2);
12119
12120 if (g != NULL)
12121 {
12122 if (f == NULL || f < g)
12123 f = g;
12124 }
12125 }
12126 #endif
12127
12128 fi->fname = f == NULL ? fi->path : f + 1;
12129 return 1;
12130 }
12131
12132 /* Helper function for output_file_names. Emit a FORM encoded
12133 string STR, with assembly comment start ENTRY_KIND and
12134 index IDX */
12135
12136 static void
12137 output_line_string (enum dwarf_form form, const char *str,
12138 const char *entry_kind, unsigned int idx)
12139 {
12140 switch (form)
12141 {
12142 case DW_FORM_string:
12143 dw2_asm_output_nstring (str, -1, "%s: %#x", entry_kind, idx);
12144 break;
12145 case DW_FORM_line_strp:
12146 if (!debug_line_str_hash)
12147 debug_line_str_hash
12148 = hash_table<indirect_string_hasher>::create_ggc (10);
12149
12150 struct indirect_string_node *node;
12151 node = find_AT_string_in_table (str, debug_line_str_hash);
12152 set_indirect_string (node);
12153 node->form = form;
12154 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
12155 debug_line_str_section, "%s: %#x: \"%s\"",
12156 entry_kind, 0, node->str);
12157 break;
12158 default:
12159 gcc_unreachable ();
12160 }
12161 }
12162
12163 /* Output the directory table and the file name table. We try to minimize
12164 the total amount of memory needed. A heuristic is used to avoid large
12165 slowdowns with many input files. */
12166
12167 static void
12168 output_file_names (void)
12169 {
12170 struct file_name_acquire_data fnad;
12171 int numfiles;
12172 struct file_info *files;
12173 struct dir_info *dirs;
12174 int *saved;
12175 int *savehere;
12176 int *backmap;
12177 int ndirs;
12178 int idx_offset;
12179 int i;
12180
12181 if (!last_emitted_file)
12182 {
12183 if (dwarf_version >= 5)
12184 {
12185 dw2_asm_output_data (1, 0, "Directory entry format count");
12186 dw2_asm_output_data_uleb128 (0, "Directories count");
12187 dw2_asm_output_data (1, 0, "File name entry format count");
12188 dw2_asm_output_data_uleb128 (0, "File names count");
12189 }
12190 else
12191 {
12192 dw2_asm_output_data (1, 0, "End directory table");
12193 dw2_asm_output_data (1, 0, "End file name table");
12194 }
12195 return;
12196 }
12197
12198 numfiles = last_emitted_file->emitted_number;
12199
12200 /* Allocate the various arrays we need. */
12201 files = XALLOCAVEC (struct file_info, numfiles);
12202 dirs = XALLOCAVEC (struct dir_info, numfiles);
12203
12204 fnad.files = files;
12205 fnad.used_files = 0;
12206 fnad.max_files = numfiles;
12207 file_table->traverse<file_name_acquire_data *, file_name_acquire> (&fnad);
12208 gcc_assert (fnad.used_files == fnad.max_files);
12209
12210 qsort (files, numfiles, sizeof (files[0]), file_info_cmp);
12211
12212 /* Find all the different directories used. */
12213 dirs[0].path = files[0].path;
12214 dirs[0].length = files[0].fname - files[0].path;
12215 dirs[0].prefix = -1;
12216 dirs[0].count = 1;
12217 dirs[0].dir_idx = 0;
12218 files[0].dir_idx = 0;
12219 ndirs = 1;
12220
12221 for (i = 1; i < numfiles; i++)
12222 if (files[i].fname - files[i].path == dirs[ndirs - 1].length
12223 && memcmp (dirs[ndirs - 1].path, files[i].path,
12224 dirs[ndirs - 1].length) == 0)
12225 {
12226 /* Same directory as last entry. */
12227 files[i].dir_idx = ndirs - 1;
12228 ++dirs[ndirs - 1].count;
12229 }
12230 else
12231 {
12232 int j;
12233
12234 /* This is a new directory. */
12235 dirs[ndirs].path = files[i].path;
12236 dirs[ndirs].length = files[i].fname - files[i].path;
12237 dirs[ndirs].count = 1;
12238 dirs[ndirs].dir_idx = ndirs;
12239 files[i].dir_idx = ndirs;
12240
12241 /* Search for a prefix. */
12242 dirs[ndirs].prefix = -1;
12243 for (j = 0; j < ndirs; j++)
12244 if (dirs[j].length < dirs[ndirs].length
12245 && dirs[j].length > 1
12246 && (dirs[ndirs].prefix == -1
12247 || dirs[j].length > dirs[dirs[ndirs].prefix].length)
12248 && memcmp (dirs[j].path, dirs[ndirs].path, dirs[j].length) == 0)
12249 dirs[ndirs].prefix = j;
12250
12251 ++ndirs;
12252 }
12253
12254 /* Now to the actual work. We have to find a subset of the directories which
12255 allow expressing the file name using references to the directory table
12256 with the least amount of characters. We do not do an exhaustive search
12257 where we would have to check out every combination of every single
12258 possible prefix. Instead we use a heuristic which provides nearly optimal
12259 results in most cases and never is much off. */
12260 saved = XALLOCAVEC (int, ndirs);
12261 savehere = XALLOCAVEC (int, ndirs);
12262
12263 memset (saved, '\0', ndirs * sizeof (saved[0]));
12264 for (i = 0; i < ndirs; i++)
12265 {
12266 int j;
12267 int total;
12268
12269 /* We can always save some space for the current directory. But this
12270 does not mean it will be enough to justify adding the directory. */
12271 savehere[i] = dirs[i].length;
12272 total = (savehere[i] - saved[i]) * dirs[i].count;
12273
12274 for (j = i + 1; j < ndirs; j++)
12275 {
12276 savehere[j] = 0;
12277 if (saved[j] < dirs[i].length)
12278 {
12279 /* Determine whether the dirs[i] path is a prefix of the
12280 dirs[j] path. */
12281 int k;
12282
12283 k = dirs[j].prefix;
12284 while (k != -1 && k != (int) i)
12285 k = dirs[k].prefix;
12286
12287 if (k == (int) i)
12288 {
12289 /* Yes it is. We can possibly save some memory by
12290 writing the filenames in dirs[j] relative to
12291 dirs[i]. */
12292 savehere[j] = dirs[i].length;
12293 total += (savehere[j] - saved[j]) * dirs[j].count;
12294 }
12295 }
12296 }
12297
12298 /* Check whether we can save enough to justify adding the dirs[i]
12299 directory. */
12300 if (total > dirs[i].length + 1)
12301 {
12302 /* It's worthwhile adding. */
12303 for (j = i; j < ndirs; j++)
12304 if (savehere[j] > 0)
12305 {
12306 /* Remember how much we saved for this directory so far. */
12307 saved[j] = savehere[j];
12308
12309 /* Remember the prefix directory. */
12310 dirs[j].dir_idx = i;
12311 }
12312 }
12313 }
12314
12315 /* Emit the directory name table. */
12316 idx_offset = dirs[0].length > 0 ? 1 : 0;
12317 enum dwarf_form str_form = DW_FORM_string;
12318 enum dwarf_form idx_form = DW_FORM_udata;
12319 if (dwarf_version >= 5)
12320 {
12321 const char *comp_dir = comp_dir_string ();
12322 if (comp_dir == NULL)
12323 comp_dir = "";
12324 dw2_asm_output_data (1, 1, "Directory entry format count");
12325 if (DWARF5_USE_DEBUG_LINE_STR)
12326 str_form = DW_FORM_line_strp;
12327 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12328 dw2_asm_output_data_uleb128 (str_form, "%s",
12329 get_DW_FORM_name (str_form));
12330 dw2_asm_output_data_uleb128 (ndirs + idx_offset, "Directories count");
12331 if (str_form == DW_FORM_string)
12332 {
12333 dw2_asm_output_nstring (comp_dir, -1, "Directory Entry: %#x", 0);
12334 for (i = 1 - idx_offset; i < ndirs; i++)
12335 dw2_asm_output_nstring (dirs[i].path,
12336 dirs[i].length
12337 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12338 "Directory Entry: %#x", i + idx_offset);
12339 }
12340 else
12341 {
12342 output_line_string (str_form, comp_dir, "Directory Entry", 0);
12343 for (i = 1 - idx_offset; i < ndirs; i++)
12344 {
12345 const char *str
12346 = ggc_alloc_string (dirs[i].path,
12347 dirs[i].length
12348 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR);
12349 output_line_string (str_form, str, "Directory Entry",
12350 (unsigned) i + idx_offset);
12351 }
12352 }
12353 }
12354 else
12355 {
12356 for (i = 1 - idx_offset; i < ndirs; i++)
12357 dw2_asm_output_nstring (dirs[i].path,
12358 dirs[i].length
12359 - !DWARF2_DIR_SHOULD_END_WITH_SEPARATOR,
12360 "Directory Entry: %#x", i + idx_offset);
12361
12362 dw2_asm_output_data (1, 0, "End directory table");
12363 }
12364
12365 /* We have to emit them in the order of emitted_number since that's
12366 used in the debug info generation. To do this efficiently we
12367 generate a back-mapping of the indices first. */
12368 backmap = XALLOCAVEC (int, numfiles);
12369 for (i = 0; i < numfiles; i++)
12370 backmap[files[i].file_idx->emitted_number - 1] = i;
12371
12372 if (dwarf_version >= 5)
12373 {
12374 const char *filename0 = get_AT_string (comp_unit_die (), DW_AT_name);
12375 if (filename0 == NULL)
12376 filename0 = "";
12377 /* DW_LNCT_directory_index can use DW_FORM_udata, DW_FORM_data1 and
12378 DW_FORM_data2. Choose one based on the number of directories
12379 and how much space would they occupy in each encoding.
12380 If we have at most 256 directories, all indexes fit into
12381 a single byte, so DW_FORM_data1 is most compact (if there
12382 are at most 128 directories, DW_FORM_udata would be as
12383 compact as that, but not shorter and slower to decode). */
12384 if (ndirs + idx_offset <= 256)
12385 idx_form = DW_FORM_data1;
12386 /* If there are more than 65536 directories, we have to use
12387 DW_FORM_udata, DW_FORM_data2 can't refer to them.
12388 Otherwise, compute what space would occupy if all the indexes
12389 used DW_FORM_udata - sum - and compare that to how large would
12390 be DW_FORM_data2 encoding, and pick the more efficient one. */
12391 else if (ndirs + idx_offset <= 65536)
12392 {
12393 unsigned HOST_WIDE_INT sum = 1;
12394 for (i = 0; i < numfiles; i++)
12395 {
12396 int file_idx = backmap[i];
12397 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12398 sum += size_of_uleb128 (dir_idx);
12399 }
12400 if (sum >= HOST_WIDE_INT_UC (2) * (numfiles + 1))
12401 idx_form = DW_FORM_data2;
12402 }
12403 #ifdef VMS_DEBUGGING_INFO
12404 dw2_asm_output_data (1, 4, "File name entry format count");
12405 #else
12406 dw2_asm_output_data (1, 2, "File name entry format count");
12407 #endif
12408 dw2_asm_output_data_uleb128 (DW_LNCT_path, "DW_LNCT_path");
12409 dw2_asm_output_data_uleb128 (str_form, "%s",
12410 get_DW_FORM_name (str_form));
12411 dw2_asm_output_data_uleb128 (DW_LNCT_directory_index,
12412 "DW_LNCT_directory_index");
12413 dw2_asm_output_data_uleb128 (idx_form, "%s",
12414 get_DW_FORM_name (idx_form));
12415 #ifdef VMS_DEBUGGING_INFO
12416 dw2_asm_output_data_uleb128 (DW_LNCT_timestamp, "DW_LNCT_timestamp");
12417 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12418 dw2_asm_output_data_uleb128 (DW_LNCT_size, "DW_LNCT_size");
12419 dw2_asm_output_data_uleb128 (DW_FORM_udata, "DW_FORM_udata");
12420 #endif
12421 dw2_asm_output_data_uleb128 (numfiles + 1, "File names count");
12422
12423 output_line_string (str_form, filename0, "File Entry", 0);
12424
12425 /* Include directory index. */
12426 if (idx_form != DW_FORM_udata)
12427 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12428 0, NULL);
12429 else
12430 dw2_asm_output_data_uleb128 (0, NULL);
12431
12432 #ifdef VMS_DEBUGGING_INFO
12433 dw2_asm_output_data_uleb128 (0, NULL);
12434 dw2_asm_output_data_uleb128 (0, NULL);
12435 #endif
12436 }
12437
12438 /* Now write all the file names. */
12439 for (i = 0; i < numfiles; i++)
12440 {
12441 int file_idx = backmap[i];
12442 int dir_idx = dirs[files[file_idx].dir_idx].dir_idx;
12443
12444 #ifdef VMS_DEBUGGING_INFO
12445 #define MAX_VMS_VERSION_LEN 6 /* ";32768" */
12446
12447 /* Setting these fields can lead to debugger miscomparisons,
12448 but VMS Debug requires them to be set correctly. */
12449
12450 int ver;
12451 long long cdt;
12452 long siz;
12453 int maxfilelen = (strlen (files[file_idx].path)
12454 + dirs[dir_idx].length
12455 + MAX_VMS_VERSION_LEN + 1);
12456 char *filebuf = XALLOCAVEC (char, maxfilelen);
12457
12458 vms_file_stats_name (files[file_idx].path, 0, 0, 0, &ver);
12459 snprintf (filebuf, maxfilelen, "%s;%d",
12460 files[file_idx].path + dirs[dir_idx].length, ver);
12461
12462 output_line_string (str_form, filebuf, "File Entry", (unsigned) i + 1);
12463
12464 /* Include directory index. */
12465 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12466 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12467 dir_idx + idx_offset, NULL);
12468 else
12469 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12470
12471 /* Modification time. */
12472 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12473 &cdt, 0, 0, 0) == 0)
12474 ? cdt : 0, NULL);
12475
12476 /* File length in bytes. */
12477 dw2_asm_output_data_uleb128 ((vms_file_stats_name (files[file_idx].path,
12478 0, &siz, 0, 0) == 0)
12479 ? siz : 0, NULL);
12480 #else
12481 output_line_string (str_form,
12482 files[file_idx].path + dirs[dir_idx].length,
12483 "File Entry", (unsigned) i + 1);
12484
12485 /* Include directory index. */
12486 if (dwarf_version >= 5 && idx_form != DW_FORM_udata)
12487 dw2_asm_output_data (idx_form == DW_FORM_data1 ? 1 : 2,
12488 dir_idx + idx_offset, NULL);
12489 else
12490 dw2_asm_output_data_uleb128 (dir_idx + idx_offset, NULL);
12491
12492 if (dwarf_version >= 5)
12493 continue;
12494
12495 /* Modification time. */
12496 dw2_asm_output_data_uleb128 (0, NULL);
12497
12498 /* File length in bytes. */
12499 dw2_asm_output_data_uleb128 (0, NULL);
12500 #endif /* VMS_DEBUGGING_INFO */
12501 }
12502
12503 if (dwarf_version < 5)
12504 dw2_asm_output_data (1, 0, "End file name table");
12505 }
12506
12507
12508 /* Output one line number table into the .debug_line section. */
12509
12510 static void
12511 output_one_line_info_table (dw_line_info_table *table)
12512 {
12513 char line_label[MAX_ARTIFICIAL_LABEL_BYTES];
12514 unsigned int current_line = 1;
12515 bool current_is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
12516 dw_line_info_entry *ent, *prev_addr;
12517 size_t i;
12518 unsigned int view;
12519
12520 view = 0;
12521
12522 FOR_EACH_VEC_SAFE_ELT (table->entries, i, ent)
12523 {
12524 switch (ent->opcode)
12525 {
12526 case LI_set_address:
12527 /* ??? Unfortunately, we have little choice here currently, and
12528 must always use the most general form. GCC does not know the
12529 address delta itself, so we can't use DW_LNS_advance_pc. Many
12530 ports do have length attributes which will give an upper bound
12531 on the address range. We could perhaps use length attributes
12532 to determine when it is safe to use DW_LNS_fixed_advance_pc. */
12533 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12534
12535 view = 0;
12536
12537 /* This can handle any delta. This takes
12538 4+DWARF2_ADDR_SIZE bytes. */
12539 dw2_asm_output_data (1, 0, "set address %s%s", line_label,
12540 debug_variable_location_views
12541 ? ", reset view to 0" : "");
12542 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12543 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12544 dw2_asm_output_addr (DWARF2_ADDR_SIZE, line_label, NULL);
12545
12546 prev_addr = ent;
12547 break;
12548
12549 case LI_adv_address:
12550 {
12551 ASM_GENERATE_INTERNAL_LABEL (line_label, LINE_CODE_LABEL, ent->val);
12552 char prev_label[MAX_ARTIFICIAL_LABEL_BYTES];
12553 ASM_GENERATE_INTERNAL_LABEL (prev_label, LINE_CODE_LABEL, prev_addr->val);
12554
12555 view++;
12556
12557 dw2_asm_output_data (1, DW_LNS_fixed_advance_pc, "fixed advance PC, increment view to %i", view);
12558 dw2_asm_output_delta (2, line_label, prev_label,
12559 "from %s to %s", prev_label, line_label);
12560
12561 prev_addr = ent;
12562 break;
12563 }
12564
12565 case LI_set_line:
12566 if (ent->val == current_line)
12567 {
12568 /* We still need to start a new row, so output a copy insn. */
12569 dw2_asm_output_data (1, DW_LNS_copy,
12570 "copy line %u", current_line);
12571 }
12572 else
12573 {
12574 int line_offset = ent->val - current_line;
12575 int line_delta = line_offset - DWARF_LINE_BASE;
12576
12577 current_line = ent->val;
12578 if (line_delta >= 0 && line_delta < (DWARF_LINE_RANGE - 1))
12579 {
12580 /* This can handle deltas from -10 to 234, using the current
12581 definitions of DWARF_LINE_BASE and DWARF_LINE_RANGE.
12582 This takes 1 byte. */
12583 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE + line_delta,
12584 "line %u", current_line);
12585 }
12586 else
12587 {
12588 /* This can handle any delta. This takes at least 4 bytes,
12589 depending on the value being encoded. */
12590 dw2_asm_output_data (1, DW_LNS_advance_line,
12591 "advance to line %u", current_line);
12592 dw2_asm_output_data_sleb128 (line_offset, NULL);
12593 dw2_asm_output_data (1, DW_LNS_copy, NULL);
12594 }
12595 }
12596 break;
12597
12598 case LI_set_file:
12599 dw2_asm_output_data (1, DW_LNS_set_file, "set file %u", ent->val);
12600 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12601 break;
12602
12603 case LI_set_column:
12604 dw2_asm_output_data (1, DW_LNS_set_column, "column %u", ent->val);
12605 dw2_asm_output_data_uleb128 (ent->val, "%u", ent->val);
12606 break;
12607
12608 case LI_negate_stmt:
12609 current_is_stmt = !current_is_stmt;
12610 dw2_asm_output_data (1, DW_LNS_negate_stmt,
12611 "is_stmt %d", current_is_stmt);
12612 break;
12613
12614 case LI_set_prologue_end:
12615 dw2_asm_output_data (1, DW_LNS_set_prologue_end,
12616 "set prologue end");
12617 break;
12618
12619 case LI_set_epilogue_begin:
12620 dw2_asm_output_data (1, DW_LNS_set_epilogue_begin,
12621 "set epilogue begin");
12622 break;
12623
12624 case LI_set_discriminator:
12625 dw2_asm_output_data (1, 0, "discriminator %u", ent->val);
12626 dw2_asm_output_data_uleb128 (1 + size_of_uleb128 (ent->val), NULL);
12627 dw2_asm_output_data (1, DW_LNE_set_discriminator, NULL);
12628 dw2_asm_output_data_uleb128 (ent->val, NULL);
12629 break;
12630 }
12631 }
12632
12633 /* Emit debug info for the address of the end of the table. */
12634 dw2_asm_output_data (1, 0, "set address %s", table->end_label);
12635 dw2_asm_output_data_uleb128 (1 + DWARF2_ADDR_SIZE, NULL);
12636 dw2_asm_output_data (1, DW_LNE_set_address, NULL);
12637 dw2_asm_output_addr (DWARF2_ADDR_SIZE, table->end_label, NULL);
12638
12639 dw2_asm_output_data (1, 0, "end sequence");
12640 dw2_asm_output_data_uleb128 (1, NULL);
12641 dw2_asm_output_data (1, DW_LNE_end_sequence, NULL);
12642 }
12643
12644 /* Output the source line number correspondence information. This
12645 information goes into the .debug_line section. */
12646
12647 static void
12648 output_line_info (bool prologue_only)
12649 {
12650 static unsigned int generation;
12651 char l1[MAX_ARTIFICIAL_LABEL_BYTES], l2[MAX_ARTIFICIAL_LABEL_BYTES];
12652 char p1[MAX_ARTIFICIAL_LABEL_BYTES], p2[MAX_ARTIFICIAL_LABEL_BYTES];
12653 bool saw_one = false;
12654 int opc;
12655
12656 ASM_GENERATE_INTERNAL_LABEL (l1, LINE_NUMBER_BEGIN_LABEL, generation);
12657 ASM_GENERATE_INTERNAL_LABEL (l2, LINE_NUMBER_END_LABEL, generation);
12658 ASM_GENERATE_INTERNAL_LABEL (p1, LN_PROLOG_AS_LABEL, generation);
12659 ASM_GENERATE_INTERNAL_LABEL (p2, LN_PROLOG_END_LABEL, generation++);
12660
12661 if (!XCOFF_DEBUGGING_INFO)
12662 {
12663 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
12664 dw2_asm_output_data (4, 0xffffffff,
12665 "Initial length escape value indicating 64-bit DWARF extension");
12666 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
12667 "Length of Source Line Info");
12668 }
12669
12670 ASM_OUTPUT_LABEL (asm_out_file, l1);
12671
12672 output_dwarf_version ();
12673 if (dwarf_version >= 5)
12674 {
12675 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
12676 dw2_asm_output_data (1, 0, "Segment Size");
12677 }
12678 dw2_asm_output_delta (DWARF_OFFSET_SIZE, p2, p1, "Prolog Length");
12679 ASM_OUTPUT_LABEL (asm_out_file, p1);
12680
12681 /* Define the architecture-dependent minimum instruction length (in bytes).
12682 In this implementation of DWARF, this field is used for information
12683 purposes only. Since GCC generates assembly language, we have no
12684 a priori knowledge of how many instruction bytes are generated for each
12685 source line, and therefore can use only the DW_LNE_set_address and
12686 DW_LNS_fixed_advance_pc line information commands. Accordingly, we fix
12687 this as '1', which is "correct enough" for all architectures,
12688 and don't let the target override. */
12689 dw2_asm_output_data (1, 1, "Minimum Instruction Length");
12690
12691 if (dwarf_version >= 4)
12692 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_MAX_OPS_PER_INSN,
12693 "Maximum Operations Per Instruction");
12694 dw2_asm_output_data (1, DWARF_LINE_DEFAULT_IS_STMT_START,
12695 "Default is_stmt_start flag");
12696 dw2_asm_output_data (1, DWARF_LINE_BASE,
12697 "Line Base Value (Special Opcodes)");
12698 dw2_asm_output_data (1, DWARF_LINE_RANGE,
12699 "Line Range Value (Special Opcodes)");
12700 dw2_asm_output_data (1, DWARF_LINE_OPCODE_BASE,
12701 "Special Opcode Base");
12702
12703 for (opc = 1; opc < DWARF_LINE_OPCODE_BASE; opc++)
12704 {
12705 int n_op_args;
12706 switch (opc)
12707 {
12708 case DW_LNS_advance_pc:
12709 case DW_LNS_advance_line:
12710 case DW_LNS_set_file:
12711 case DW_LNS_set_column:
12712 case DW_LNS_fixed_advance_pc:
12713 case DW_LNS_set_isa:
12714 n_op_args = 1;
12715 break;
12716 default:
12717 n_op_args = 0;
12718 break;
12719 }
12720
12721 dw2_asm_output_data (1, n_op_args, "opcode: %#x has %d args",
12722 opc, n_op_args);
12723 }
12724
12725 /* Write out the information about the files we use. */
12726 output_file_names ();
12727 ASM_OUTPUT_LABEL (asm_out_file, p2);
12728 if (prologue_only)
12729 {
12730 /* Output the marker for the end of the line number info. */
12731 ASM_OUTPUT_LABEL (asm_out_file, l2);
12732 return;
12733 }
12734
12735 if (separate_line_info)
12736 {
12737 dw_line_info_table *table;
12738 size_t i;
12739
12740 FOR_EACH_VEC_ELT (*separate_line_info, i, table)
12741 if (table->in_use)
12742 {
12743 output_one_line_info_table (table);
12744 saw_one = true;
12745 }
12746 }
12747 if (cold_text_section_line_info && cold_text_section_line_info->in_use)
12748 {
12749 output_one_line_info_table (cold_text_section_line_info);
12750 saw_one = true;
12751 }
12752
12753 /* ??? Some Darwin linkers crash on a .debug_line section with no
12754 sequences. Further, merely a DW_LNE_end_sequence entry is not
12755 sufficient -- the address column must also be initialized.
12756 Make sure to output at least one set_address/end_sequence pair,
12757 choosing .text since that section is always present. */
12758 if (text_section_line_info->in_use || !saw_one)
12759 output_one_line_info_table (text_section_line_info);
12760
12761 /* Output the marker for the end of the line number info. */
12762 ASM_OUTPUT_LABEL (asm_out_file, l2);
12763 }
12764 \f
12765 /* Return true if DW_AT_endianity should be emitted according to REVERSE. */
12766
12767 static inline bool
12768 need_endianity_attribute_p (bool reverse)
12769 {
12770 return reverse && (dwarf_version >= 3 || !dwarf_strict);
12771 }
12772
12773 /* Given a pointer to a tree node for some base type, return a pointer to
12774 a DIE that describes the given type. REVERSE is true if the type is
12775 to be interpreted in the reverse storage order wrt the target order.
12776
12777 This routine must only be called for GCC type nodes that correspond to
12778 Dwarf base (fundamental) types. */
12779
12780 static dw_die_ref
12781 base_type_die (tree type, bool reverse)
12782 {
12783 dw_die_ref base_type_result;
12784 enum dwarf_type encoding;
12785 bool fpt_used = false;
12786 struct fixed_point_type_info fpt_info;
12787 tree type_bias = NULL_TREE;
12788
12789 /* If this is a subtype that should not be emitted as a subrange type,
12790 use the base type. See subrange_type_for_debug_p. */
12791 if (TREE_CODE (type) == INTEGER_TYPE && TREE_TYPE (type) != NULL_TREE)
12792 type = TREE_TYPE (type);
12793
12794 switch (TREE_CODE (type))
12795 {
12796 case INTEGER_TYPE:
12797 if ((dwarf_version >= 4 || !dwarf_strict)
12798 && TYPE_NAME (type)
12799 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
12800 && DECL_IS_BUILTIN (TYPE_NAME (type))
12801 && DECL_NAME (TYPE_NAME (type)))
12802 {
12803 const char *name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (type)));
12804 if (strcmp (name, "char16_t") == 0
12805 || strcmp (name, "char32_t") == 0)
12806 {
12807 encoding = DW_ATE_UTF;
12808 break;
12809 }
12810 }
12811 if ((dwarf_version >= 3 || !dwarf_strict)
12812 && lang_hooks.types.get_fixed_point_type_info)
12813 {
12814 memset (&fpt_info, 0, sizeof (fpt_info));
12815 if (lang_hooks.types.get_fixed_point_type_info (type, &fpt_info))
12816 {
12817 fpt_used = true;
12818 encoding = ((TYPE_UNSIGNED (type))
12819 ? DW_ATE_unsigned_fixed
12820 : DW_ATE_signed_fixed);
12821 break;
12822 }
12823 }
12824 if (TYPE_STRING_FLAG (type))
12825 {
12826 if (TYPE_UNSIGNED (type))
12827 encoding = DW_ATE_unsigned_char;
12828 else
12829 encoding = DW_ATE_signed_char;
12830 }
12831 else if (TYPE_UNSIGNED (type))
12832 encoding = DW_ATE_unsigned;
12833 else
12834 encoding = DW_ATE_signed;
12835
12836 if (!dwarf_strict
12837 && lang_hooks.types.get_type_bias)
12838 type_bias = lang_hooks.types.get_type_bias (type);
12839 break;
12840
12841 case REAL_TYPE:
12842 if (DECIMAL_FLOAT_MODE_P (TYPE_MODE (type)))
12843 {
12844 if (dwarf_version >= 3 || !dwarf_strict)
12845 encoding = DW_ATE_decimal_float;
12846 else
12847 encoding = DW_ATE_lo_user;
12848 }
12849 else
12850 encoding = DW_ATE_float;
12851 break;
12852
12853 case FIXED_POINT_TYPE:
12854 if (!(dwarf_version >= 3 || !dwarf_strict))
12855 encoding = DW_ATE_lo_user;
12856 else if (TYPE_UNSIGNED (type))
12857 encoding = DW_ATE_unsigned_fixed;
12858 else
12859 encoding = DW_ATE_signed_fixed;
12860 break;
12861
12862 /* Dwarf2 doesn't know anything about complex ints, so use
12863 a user defined type for it. */
12864 case COMPLEX_TYPE:
12865 if (TREE_CODE (TREE_TYPE (type)) == REAL_TYPE)
12866 encoding = DW_ATE_complex_float;
12867 else
12868 encoding = DW_ATE_lo_user;
12869 break;
12870
12871 case BOOLEAN_TYPE:
12872 /* GNU FORTRAN/Ada/C++ BOOLEAN type. */
12873 encoding = DW_ATE_boolean;
12874 break;
12875
12876 default:
12877 /* No other TREE_CODEs are Dwarf fundamental types. */
12878 gcc_unreachable ();
12879 }
12880
12881 base_type_result = new_die_raw (DW_TAG_base_type);
12882
12883 add_AT_unsigned (base_type_result, DW_AT_byte_size,
12884 int_size_in_bytes (type));
12885 add_AT_unsigned (base_type_result, DW_AT_encoding, encoding);
12886
12887 if (need_endianity_attribute_p (reverse))
12888 add_AT_unsigned (base_type_result, DW_AT_endianity,
12889 BYTES_BIG_ENDIAN ? DW_END_little : DW_END_big);
12890
12891 add_alignment_attribute (base_type_result, type);
12892
12893 if (fpt_used)
12894 {
12895 switch (fpt_info.scale_factor_kind)
12896 {
12897 case fixed_point_scale_factor_binary:
12898 add_AT_int (base_type_result, DW_AT_binary_scale,
12899 fpt_info.scale_factor.binary);
12900 break;
12901
12902 case fixed_point_scale_factor_decimal:
12903 add_AT_int (base_type_result, DW_AT_decimal_scale,
12904 fpt_info.scale_factor.decimal);
12905 break;
12906
12907 case fixed_point_scale_factor_arbitrary:
12908 /* Arbitrary scale factors cannot be described in standard DWARF,
12909 yet. */
12910 if (!dwarf_strict)
12911 {
12912 /* Describe the scale factor as a rational constant. */
12913 const dw_die_ref scale_factor
12914 = new_die (DW_TAG_constant, comp_unit_die (), type);
12915
12916 add_AT_unsigned (scale_factor, DW_AT_GNU_numerator,
12917 fpt_info.scale_factor.arbitrary.numerator);
12918 add_AT_int (scale_factor, DW_AT_GNU_denominator,
12919 fpt_info.scale_factor.arbitrary.denominator);
12920
12921 add_AT_die_ref (base_type_result, DW_AT_small, scale_factor);
12922 }
12923 break;
12924
12925 default:
12926 gcc_unreachable ();
12927 }
12928 }
12929
12930 if (type_bias)
12931 add_scalar_info (base_type_result, DW_AT_GNU_bias, type_bias,
12932 dw_scalar_form_constant
12933 | dw_scalar_form_exprloc
12934 | dw_scalar_form_reference,
12935 NULL);
12936
12937 return base_type_result;
12938 }
12939
12940 /* A C++ function with deduced return type can have a TEMPLATE_TYPE_PARM
12941 named 'auto' in its type: return true for it, false otherwise. */
12942
12943 static inline bool
12944 is_cxx_auto (tree type)
12945 {
12946 if (is_cxx ())
12947 {
12948 tree name = TYPE_IDENTIFIER (type);
12949 if (name == get_identifier ("auto")
12950 || name == get_identifier ("decltype(auto)"))
12951 return true;
12952 }
12953 return false;
12954 }
12955
12956 /* Given a pointer to an arbitrary ..._TYPE tree node, return nonzero if the
12957 given input type is a Dwarf "fundamental" type. Otherwise return null. */
12958
12959 static inline int
12960 is_base_type (tree type)
12961 {
12962 switch (TREE_CODE (type))
12963 {
12964 case INTEGER_TYPE:
12965 case REAL_TYPE:
12966 case FIXED_POINT_TYPE:
12967 case COMPLEX_TYPE:
12968 case BOOLEAN_TYPE:
12969 return 1;
12970
12971 case VOID_TYPE:
12972 case ARRAY_TYPE:
12973 case RECORD_TYPE:
12974 case UNION_TYPE:
12975 case QUAL_UNION_TYPE:
12976 case ENUMERAL_TYPE:
12977 case FUNCTION_TYPE:
12978 case METHOD_TYPE:
12979 case POINTER_TYPE:
12980 case REFERENCE_TYPE:
12981 case NULLPTR_TYPE:
12982 case OFFSET_TYPE:
12983 case LANG_TYPE:
12984 case VECTOR_TYPE:
12985 return 0;
12986
12987 default:
12988 if (is_cxx_auto (type))
12989 return 0;
12990 gcc_unreachable ();
12991 }
12992
12993 return 0;
12994 }
12995
12996 /* Given a pointer to a tree node, assumed to be some kind of a ..._TYPE
12997 node, return the size in bits for the type if it is a constant, or else
12998 return the alignment for the type if the type's size is not constant, or
12999 else return BITS_PER_WORD if the type actually turns out to be an
13000 ERROR_MARK node. */
13001
13002 static inline unsigned HOST_WIDE_INT
13003 simple_type_size_in_bits (const_tree type)
13004 {
13005 if (TREE_CODE (type) == ERROR_MARK)
13006 return BITS_PER_WORD;
13007 else if (TYPE_SIZE (type) == NULL_TREE)
13008 return 0;
13009 else if (tree_fits_uhwi_p (TYPE_SIZE (type)))
13010 return tree_to_uhwi (TYPE_SIZE (type));
13011 else
13012 return TYPE_ALIGN (type);
13013 }
13014
13015 /* Similarly, but return an offset_int instead of UHWI. */
13016
13017 static inline offset_int
13018 offset_int_type_size_in_bits (const_tree type)
13019 {
13020 if (TREE_CODE (type) == ERROR_MARK)
13021 return BITS_PER_WORD;
13022 else if (TYPE_SIZE (type) == NULL_TREE)
13023 return 0;
13024 else if (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST)
13025 return wi::to_offset (TYPE_SIZE (type));
13026 else
13027 return TYPE_ALIGN (type);
13028 }
13029
13030 /* Given a pointer to a tree node for a subrange type, return a pointer
13031 to a DIE that describes the given type. */
13032
13033 static dw_die_ref
13034 subrange_type_die (tree type, tree low, tree high, tree bias,
13035 dw_die_ref context_die)
13036 {
13037 dw_die_ref subrange_die;
13038 const HOST_WIDE_INT size_in_bytes = int_size_in_bytes (type);
13039
13040 if (context_die == NULL)
13041 context_die = comp_unit_die ();
13042
13043 subrange_die = new_die (DW_TAG_subrange_type, context_die, type);
13044
13045 if (int_size_in_bytes (TREE_TYPE (type)) != size_in_bytes)
13046 {
13047 /* The size of the subrange type and its base type do not match,
13048 so we need to generate a size attribute for the subrange type. */
13049 add_AT_unsigned (subrange_die, DW_AT_byte_size, size_in_bytes);
13050 }
13051
13052 add_alignment_attribute (subrange_die, type);
13053
13054 if (low)
13055 add_bound_info (subrange_die, DW_AT_lower_bound, low, NULL);
13056 if (high)
13057 add_bound_info (subrange_die, DW_AT_upper_bound, high, NULL);
13058 if (bias && !dwarf_strict)
13059 add_scalar_info (subrange_die, DW_AT_GNU_bias, bias,
13060 dw_scalar_form_constant
13061 | dw_scalar_form_exprloc
13062 | dw_scalar_form_reference,
13063 NULL);
13064
13065 return subrange_die;
13066 }
13067
13068 /* Returns the (const and/or volatile) cv_qualifiers associated with
13069 the decl node. This will normally be augmented with the
13070 cv_qualifiers of the underlying type in add_type_attribute. */
13071
13072 static int
13073 decl_quals (const_tree decl)
13074 {
13075 return ((TREE_READONLY (decl)
13076 /* The C++ front-end correctly marks reference-typed
13077 variables as readonly, but from a language (and debug
13078 info) standpoint they are not const-qualified. */
13079 && TREE_CODE (TREE_TYPE (decl)) != REFERENCE_TYPE
13080 ? TYPE_QUAL_CONST : TYPE_UNQUALIFIED)
13081 | (TREE_THIS_VOLATILE (decl)
13082 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED));
13083 }
13084
13085 /* Determine the TYPE whose qualifiers match the largest strict subset
13086 of the given TYPE_QUALS, and return its qualifiers. Ignore all
13087 qualifiers outside QUAL_MASK. */
13088
13089 static int
13090 get_nearest_type_subqualifiers (tree type, int type_quals, int qual_mask)
13091 {
13092 tree t;
13093 int best_rank = 0, best_qual = 0, max_rank;
13094
13095 type_quals &= qual_mask;
13096 max_rank = popcount_hwi (type_quals) - 1;
13097
13098 for (t = TYPE_MAIN_VARIANT (type); t && best_rank < max_rank;
13099 t = TYPE_NEXT_VARIANT (t))
13100 {
13101 int q = TYPE_QUALS (t) & qual_mask;
13102
13103 if ((q & type_quals) == q && q != type_quals
13104 && check_base_type (t, type))
13105 {
13106 int rank = popcount_hwi (q);
13107
13108 if (rank > best_rank)
13109 {
13110 best_rank = rank;
13111 best_qual = q;
13112 }
13113 }
13114 }
13115
13116 return best_qual;
13117 }
13118
13119 struct dwarf_qual_info_t { int q; enum dwarf_tag t; };
13120 static const dwarf_qual_info_t dwarf_qual_info[] =
13121 {
13122 { TYPE_QUAL_CONST, DW_TAG_const_type },
13123 { TYPE_QUAL_VOLATILE, DW_TAG_volatile_type },
13124 { TYPE_QUAL_RESTRICT, DW_TAG_restrict_type },
13125 { TYPE_QUAL_ATOMIC, DW_TAG_atomic_type }
13126 };
13127 static const unsigned int dwarf_qual_info_size
13128 = sizeof (dwarf_qual_info) / sizeof (dwarf_qual_info[0]);
13129
13130 /* If DIE is a qualified DIE of some base DIE with the same parent,
13131 return the base DIE, otherwise return NULL. Set MASK to the
13132 qualifiers added compared to the returned DIE. */
13133
13134 static dw_die_ref
13135 qualified_die_p (dw_die_ref die, int *mask, unsigned int depth)
13136 {
13137 unsigned int i;
13138 for (i = 0; i < dwarf_qual_info_size; i++)
13139 if (die->die_tag == dwarf_qual_info[i].t)
13140 break;
13141 if (i == dwarf_qual_info_size)
13142 return NULL;
13143 if (vec_safe_length (die->die_attr) != 1)
13144 return NULL;
13145 dw_die_ref type = get_AT_ref (die, DW_AT_type);
13146 if (type == NULL || type->die_parent != die->die_parent)
13147 return NULL;
13148 *mask |= dwarf_qual_info[i].q;
13149 if (depth)
13150 {
13151 dw_die_ref ret = qualified_die_p (type, mask, depth - 1);
13152 if (ret)
13153 return ret;
13154 }
13155 return type;
13156 }
13157
13158 /* Given a pointer to an arbitrary ..._TYPE tree node, return a debugging
13159 entry that chains the modifiers specified by CV_QUALS in front of the
13160 given type. REVERSE is true if the type is to be interpreted in the
13161 reverse storage order wrt the target order. */
13162
13163 static dw_die_ref
13164 modified_type_die (tree type, int cv_quals, bool reverse,
13165 dw_die_ref context_die)
13166 {
13167 enum tree_code code = TREE_CODE (type);
13168 dw_die_ref mod_type_die;
13169 dw_die_ref sub_die = NULL;
13170 tree item_type = NULL;
13171 tree qualified_type;
13172 tree name, low, high;
13173 dw_die_ref mod_scope;
13174 /* Only these cv-qualifiers are currently handled. */
13175 const int cv_qual_mask = (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE
13176 | TYPE_QUAL_RESTRICT | TYPE_QUAL_ATOMIC |
13177 ENCODE_QUAL_ADDR_SPACE(~0U));
13178 const bool reverse_base_type
13179 = need_endianity_attribute_p (reverse) && is_base_type (type);
13180
13181 if (code == ERROR_MARK)
13182 return NULL;
13183
13184 if (lang_hooks.types.get_debug_type)
13185 {
13186 tree debug_type = lang_hooks.types.get_debug_type (type);
13187
13188 if (debug_type != NULL_TREE && debug_type != type)
13189 return modified_type_die (debug_type, cv_quals, reverse, context_die);
13190 }
13191
13192 cv_quals &= cv_qual_mask;
13193
13194 /* Don't emit DW_TAG_restrict_type for DWARFv2, since it is a type
13195 tag modifier (and not an attribute) old consumers won't be able
13196 to handle it. */
13197 if (dwarf_version < 3)
13198 cv_quals &= ~TYPE_QUAL_RESTRICT;
13199
13200 /* Likewise for DW_TAG_atomic_type for DWARFv5. */
13201 if (dwarf_version < 5)
13202 cv_quals &= ~TYPE_QUAL_ATOMIC;
13203
13204 /* See if we already have the appropriately qualified variant of
13205 this type. */
13206 qualified_type = get_qualified_type (type, cv_quals);
13207
13208 if (qualified_type == sizetype)
13209 {
13210 /* Try not to expose the internal sizetype type's name. */
13211 if (TYPE_NAME (qualified_type)
13212 && TREE_CODE (TYPE_NAME (qualified_type)) == TYPE_DECL)
13213 {
13214 tree t = TREE_TYPE (TYPE_NAME (qualified_type));
13215
13216 gcc_checking_assert (TREE_CODE (t) == INTEGER_TYPE
13217 && (TYPE_PRECISION (t)
13218 == TYPE_PRECISION (qualified_type))
13219 && (TYPE_UNSIGNED (t)
13220 == TYPE_UNSIGNED (qualified_type)));
13221 qualified_type = t;
13222 }
13223 else if (qualified_type == sizetype
13224 && TREE_CODE (sizetype) == TREE_CODE (size_type_node)
13225 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (size_type_node)
13226 && TYPE_UNSIGNED (sizetype) == TYPE_UNSIGNED (size_type_node))
13227 qualified_type = size_type_node;
13228 if (type == sizetype)
13229 type = qualified_type;
13230 }
13231
13232 /* If we do, then we can just use its DIE, if it exists. */
13233 if (qualified_type)
13234 {
13235 mod_type_die = lookup_type_die (qualified_type);
13236
13237 /* DW_AT_endianity doesn't come from a qualifier on the type, so it is
13238 dealt with specially: the DIE with the attribute, if it exists, is
13239 placed immediately after the regular DIE for the same base type. */
13240 if (mod_type_die
13241 && (!reverse_base_type
13242 || ((mod_type_die = mod_type_die->die_sib) != NULL
13243 && get_AT_unsigned (mod_type_die, DW_AT_endianity))))
13244 return mod_type_die;
13245 }
13246
13247 name = qualified_type ? TYPE_NAME (qualified_type) : NULL;
13248
13249 /* Handle C typedef types. */
13250 if (name
13251 && TREE_CODE (name) == TYPE_DECL
13252 && DECL_ORIGINAL_TYPE (name)
13253 && !DECL_ARTIFICIAL (name))
13254 {
13255 tree dtype = TREE_TYPE (name);
13256
13257 /* Skip the typedef for base types with DW_AT_endianity, no big deal. */
13258 if (qualified_type == dtype && !reverse_base_type)
13259 {
13260 tree origin = decl_ultimate_origin (name);
13261
13262 /* Typedef variants that have an abstract origin don't get their own
13263 type DIE (see gen_typedef_die), so fall back on the ultimate
13264 abstract origin instead. */
13265 if (origin != NULL && origin != name)
13266 return modified_type_die (TREE_TYPE (origin), cv_quals, reverse,
13267 context_die);
13268
13269 /* For a named type, use the typedef. */
13270 gen_type_die (qualified_type, context_die);
13271 return lookup_type_die (qualified_type);
13272 }
13273 else
13274 {
13275 int dquals = TYPE_QUALS_NO_ADDR_SPACE (dtype);
13276 dquals &= cv_qual_mask;
13277 if ((dquals & ~cv_quals) != TYPE_UNQUALIFIED
13278 || (cv_quals == dquals && DECL_ORIGINAL_TYPE (name) != type))
13279 /* cv-unqualified version of named type. Just use
13280 the unnamed type to which it refers. */
13281 return modified_type_die (DECL_ORIGINAL_TYPE (name), cv_quals,
13282 reverse, context_die);
13283 /* Else cv-qualified version of named type; fall through. */
13284 }
13285 }
13286
13287 mod_scope = scope_die_for (type, context_die);
13288
13289 if (cv_quals)
13290 {
13291 int sub_quals = 0, first_quals = 0;
13292 unsigned i;
13293 dw_die_ref first = NULL, last = NULL;
13294
13295 /* Determine a lesser qualified type that most closely matches
13296 this one. Then generate DW_TAG_* entries for the remaining
13297 qualifiers. */
13298 sub_quals = get_nearest_type_subqualifiers (type, cv_quals,
13299 cv_qual_mask);
13300 if (sub_quals && use_debug_types)
13301 {
13302 bool needed = false;
13303 /* If emitting type units, make sure the order of qualifiers
13304 is canonical. Thus, start from unqualified type if
13305 an earlier qualifier is missing in sub_quals, but some later
13306 one is present there. */
13307 for (i = 0; i < dwarf_qual_info_size; i++)
13308 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13309 needed = true;
13310 else if (needed && (dwarf_qual_info[i].q & cv_quals))
13311 {
13312 sub_quals = 0;
13313 break;
13314 }
13315 }
13316 mod_type_die = modified_type_die (type, sub_quals, reverse, context_die);
13317 if (mod_scope && mod_type_die && mod_type_die->die_parent == mod_scope)
13318 {
13319 /* As not all intermediate qualified DIEs have corresponding
13320 tree types, ensure that qualified DIEs in the same scope
13321 as their DW_AT_type are emitted after their DW_AT_type,
13322 only with other qualified DIEs for the same type possibly
13323 in between them. Determine the range of such qualified
13324 DIEs now (first being the base type, last being corresponding
13325 last qualified DIE for it). */
13326 unsigned int count = 0;
13327 first = qualified_die_p (mod_type_die, &first_quals,
13328 dwarf_qual_info_size);
13329 if (first == NULL)
13330 first = mod_type_die;
13331 gcc_assert ((first_quals & ~sub_quals) == 0);
13332 for (count = 0, last = first;
13333 count < (1U << dwarf_qual_info_size);
13334 count++, last = last->die_sib)
13335 {
13336 int quals = 0;
13337 if (last == mod_scope->die_child)
13338 break;
13339 if (qualified_die_p (last->die_sib, &quals, dwarf_qual_info_size)
13340 != first)
13341 break;
13342 }
13343 }
13344
13345 for (i = 0; i < dwarf_qual_info_size; i++)
13346 if (dwarf_qual_info[i].q & cv_quals & ~sub_quals)
13347 {
13348 dw_die_ref d;
13349 if (first && first != last)
13350 {
13351 for (d = first->die_sib; ; d = d->die_sib)
13352 {
13353 int quals = 0;
13354 qualified_die_p (d, &quals, dwarf_qual_info_size);
13355 if (quals == (first_quals | dwarf_qual_info[i].q))
13356 break;
13357 if (d == last)
13358 {
13359 d = NULL;
13360 break;
13361 }
13362 }
13363 if (d)
13364 {
13365 mod_type_die = d;
13366 continue;
13367 }
13368 }
13369 if (first)
13370 {
13371 d = new_die_raw (dwarf_qual_info[i].t);
13372 add_child_die_after (mod_scope, d, last);
13373 last = d;
13374 }
13375 else
13376 d = new_die (dwarf_qual_info[i].t, mod_scope, type);
13377 if (mod_type_die)
13378 add_AT_die_ref (d, DW_AT_type, mod_type_die);
13379 mod_type_die = d;
13380 first_quals |= dwarf_qual_info[i].q;
13381 }
13382 }
13383 else if (code == POINTER_TYPE || code == REFERENCE_TYPE)
13384 {
13385 dwarf_tag tag = DW_TAG_pointer_type;
13386 if (code == REFERENCE_TYPE)
13387 {
13388 if (TYPE_REF_IS_RVALUE (type) && dwarf_version >= 4)
13389 tag = DW_TAG_rvalue_reference_type;
13390 else
13391 tag = DW_TAG_reference_type;
13392 }
13393 mod_type_die = new_die (tag, mod_scope, type);
13394
13395 add_AT_unsigned (mod_type_die, DW_AT_byte_size,
13396 simple_type_size_in_bits (type) / BITS_PER_UNIT);
13397 add_alignment_attribute (mod_type_die, type);
13398 item_type = TREE_TYPE (type);
13399
13400 addr_space_t as = TYPE_ADDR_SPACE (item_type);
13401 if (!ADDR_SPACE_GENERIC_P (as))
13402 {
13403 int action = targetm.addr_space.debug (as);
13404 if (action >= 0)
13405 {
13406 /* Positive values indicate an address_class. */
13407 add_AT_unsigned (mod_type_die, DW_AT_address_class, action);
13408 }
13409 else
13410 {
13411 /* Negative values indicate an (inverted) segment base reg. */
13412 dw_loc_descr_ref d
13413 = one_reg_loc_descriptor (~action, VAR_INIT_STATUS_INITIALIZED);
13414 add_AT_loc (mod_type_die, DW_AT_segment, d);
13415 }
13416 }
13417 }
13418 else if (code == INTEGER_TYPE
13419 && TREE_TYPE (type) != NULL_TREE
13420 && subrange_type_for_debug_p (type, &low, &high))
13421 {
13422 tree bias = NULL_TREE;
13423 if (lang_hooks.types.get_type_bias)
13424 bias = lang_hooks.types.get_type_bias (type);
13425 mod_type_die = subrange_type_die (type, low, high, bias, context_die);
13426 item_type = TREE_TYPE (type);
13427 }
13428 else if (is_base_type (type))
13429 {
13430 mod_type_die = base_type_die (type, reverse);
13431
13432 /* The DIE with DW_AT_endianity is placed right after the naked DIE. */
13433 if (reverse_base_type)
13434 {
13435 dw_die_ref after_die
13436 = modified_type_die (type, cv_quals, false, context_die);
13437 add_child_die_after (comp_unit_die (), mod_type_die, after_die);
13438 }
13439 else
13440 add_child_die (comp_unit_die (), mod_type_die);
13441
13442 add_pubtype (type, mod_type_die);
13443 }
13444 else
13445 {
13446 gen_type_die (type, context_die);
13447
13448 /* We have to get the type_main_variant here (and pass that to the
13449 `lookup_type_die' routine) because the ..._TYPE node we have
13450 might simply be a *copy* of some original type node (where the
13451 copy was created to help us keep track of typedef names) and
13452 that copy might have a different TYPE_UID from the original
13453 ..._TYPE node. */
13454 if (TREE_CODE (type) == FUNCTION_TYPE
13455 || TREE_CODE (type) == METHOD_TYPE)
13456 {
13457 /* For function/method types, can't just use type_main_variant here,
13458 because that can have different ref-qualifiers for C++,
13459 but try to canonicalize. */
13460 tree main = TYPE_MAIN_VARIANT (type);
13461 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
13462 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
13463 && check_base_type (t, main)
13464 && check_lang_type (t, type))
13465 return lookup_type_die (t);
13466 return lookup_type_die (type);
13467 }
13468 else if (TREE_CODE (type) != VECTOR_TYPE
13469 && TREE_CODE (type) != ARRAY_TYPE)
13470 return lookup_type_die (type_main_variant (type));
13471 else
13472 /* Vectors have the debugging information in the type,
13473 not the main variant. */
13474 return lookup_type_die (type);
13475 }
13476
13477 /* Builtin types don't have a DECL_ORIGINAL_TYPE. For those,
13478 don't output a DW_TAG_typedef, since there isn't one in the
13479 user's program; just attach a DW_AT_name to the type.
13480 Don't attach a DW_AT_name to DW_TAG_const_type or DW_TAG_volatile_type
13481 if the base type already has the same name. */
13482 if (name
13483 && ((TREE_CODE (name) != TYPE_DECL
13484 && (qualified_type == TYPE_MAIN_VARIANT (type)
13485 || (cv_quals == TYPE_UNQUALIFIED)))
13486 || (TREE_CODE (name) == TYPE_DECL
13487 && TREE_TYPE (name) == qualified_type
13488 && DECL_NAME (name))))
13489 {
13490 if (TREE_CODE (name) == TYPE_DECL)
13491 /* Could just call add_name_and_src_coords_attributes here,
13492 but since this is a builtin type it doesn't have any
13493 useful source coordinates anyway. */
13494 name = DECL_NAME (name);
13495 add_name_attribute (mod_type_die, IDENTIFIER_POINTER (name));
13496 }
13497 /* This probably indicates a bug. */
13498 else if (mod_type_die && mod_type_die->die_tag == DW_TAG_base_type)
13499 {
13500 name = TYPE_IDENTIFIER (type);
13501 add_name_attribute (mod_type_die,
13502 name ? IDENTIFIER_POINTER (name) : "__unknown__");
13503 }
13504
13505 if (qualified_type && !reverse_base_type)
13506 equate_type_number_to_die (qualified_type, mod_type_die);
13507
13508 if (item_type)
13509 /* We must do this after the equate_type_number_to_die call, in case
13510 this is a recursive type. This ensures that the modified_type_die
13511 recursion will terminate even if the type is recursive. Recursive
13512 types are possible in Ada. */
13513 sub_die = modified_type_die (item_type,
13514 TYPE_QUALS_NO_ADDR_SPACE (item_type),
13515 reverse,
13516 context_die);
13517
13518 if (sub_die != NULL)
13519 add_AT_die_ref (mod_type_die, DW_AT_type, sub_die);
13520
13521 add_gnat_descriptive_type_attribute (mod_type_die, type, context_die);
13522 if (TYPE_ARTIFICIAL (type))
13523 add_AT_flag (mod_type_die, DW_AT_artificial, 1);
13524
13525 return mod_type_die;
13526 }
13527
13528 /* Generate DIEs for the generic parameters of T.
13529 T must be either a generic type or a generic function.
13530 See http://gcc.gnu.org/wiki/TemplateParmsDwarf for more. */
13531
13532 static void
13533 gen_generic_params_dies (tree t)
13534 {
13535 tree parms, args;
13536 int parms_num, i;
13537 dw_die_ref die = NULL;
13538 int non_default;
13539
13540 if (!t || (TYPE_P (t) && !COMPLETE_TYPE_P (t)))
13541 return;
13542
13543 if (TYPE_P (t))
13544 die = lookup_type_die (t);
13545 else if (DECL_P (t))
13546 die = lookup_decl_die (t);
13547
13548 gcc_assert (die);
13549
13550 parms = lang_hooks.get_innermost_generic_parms (t);
13551 if (!parms)
13552 /* T has no generic parameter. It means T is neither a generic type
13553 or function. End of story. */
13554 return;
13555
13556 parms_num = TREE_VEC_LENGTH (parms);
13557 args = lang_hooks.get_innermost_generic_args (t);
13558 if (TREE_CHAIN (args) && TREE_CODE (TREE_CHAIN (args)) == INTEGER_CST)
13559 non_default = int_cst_value (TREE_CHAIN (args));
13560 else
13561 non_default = TREE_VEC_LENGTH (args);
13562 for (i = 0; i < parms_num; i++)
13563 {
13564 tree parm, arg, arg_pack_elems;
13565 dw_die_ref parm_die;
13566
13567 parm = TREE_VEC_ELT (parms, i);
13568 arg = TREE_VEC_ELT (args, i);
13569 arg_pack_elems = lang_hooks.types.get_argument_pack_elems (arg);
13570 gcc_assert (parm && TREE_VALUE (parm) && arg);
13571
13572 if (parm && TREE_VALUE (parm) && arg)
13573 {
13574 /* If PARM represents a template parameter pack,
13575 emit a DW_TAG_GNU_template_parameter_pack DIE, followed
13576 by DW_TAG_template_*_parameter DIEs for the argument
13577 pack elements of ARG. Note that ARG would then be
13578 an argument pack. */
13579 if (arg_pack_elems)
13580 parm_die = template_parameter_pack_die (TREE_VALUE (parm),
13581 arg_pack_elems,
13582 die);
13583 else
13584 parm_die = generic_parameter_die (TREE_VALUE (parm), arg,
13585 true /* emit name */, die);
13586 if (i >= non_default)
13587 add_AT_flag (parm_die, DW_AT_default_value, 1);
13588 }
13589 }
13590 }
13591
13592 /* Create and return a DIE for PARM which should be
13593 the representation of a generic type parameter.
13594 For instance, in the C++ front end, PARM would be a template parameter.
13595 ARG is the argument to PARM.
13596 EMIT_NAME_P if tree, the DIE will have DW_AT_name attribute set to the
13597 name of the PARM.
13598 PARENT_DIE is the parent DIE which the new created DIE should be added to,
13599 as a child node. */
13600
13601 static dw_die_ref
13602 generic_parameter_die (tree parm, tree arg,
13603 bool emit_name_p,
13604 dw_die_ref parent_die)
13605 {
13606 dw_die_ref tmpl_die = NULL;
13607 const char *name = NULL;
13608
13609 /* C++2a accepts class literals as template parameters, and var
13610 decls with initializers represent them. The VAR_DECLs would be
13611 rejected, but we can take the DECL_INITIAL constructor and
13612 attempt to expand it. */
13613 if (arg && VAR_P (arg))
13614 arg = DECL_INITIAL (arg);
13615
13616 if (!parm || !DECL_NAME (parm) || !arg)
13617 return NULL;
13618
13619 /* We support non-type generic parameters and arguments,
13620 type generic parameters and arguments, as well as
13621 generic generic parameters (a.k.a. template template parameters in C++)
13622 and arguments. */
13623 if (TREE_CODE (parm) == PARM_DECL)
13624 /* PARM is a nontype generic parameter */
13625 tmpl_die = new_die (DW_TAG_template_value_param, parent_die, parm);
13626 else if (TREE_CODE (parm) == TYPE_DECL)
13627 /* PARM is a type generic parameter. */
13628 tmpl_die = new_die (DW_TAG_template_type_param, parent_die, parm);
13629 else if (lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13630 /* PARM is a generic generic parameter.
13631 Its DIE is a GNU extension. It shall have a
13632 DW_AT_name attribute to represent the name of the template template
13633 parameter, and a DW_AT_GNU_template_name attribute to represent the
13634 name of the template template argument. */
13635 tmpl_die = new_die (DW_TAG_GNU_template_template_param,
13636 parent_die, parm);
13637 else
13638 gcc_unreachable ();
13639
13640 if (tmpl_die)
13641 {
13642 tree tmpl_type;
13643
13644 /* If PARM is a generic parameter pack, it means we are
13645 emitting debug info for a template argument pack element.
13646 In other terms, ARG is a template argument pack element.
13647 In that case, we don't emit any DW_AT_name attribute for
13648 the die. */
13649 if (emit_name_p)
13650 {
13651 name = IDENTIFIER_POINTER (DECL_NAME (parm));
13652 gcc_assert (name);
13653 add_AT_string (tmpl_die, DW_AT_name, name);
13654 }
13655
13656 if (!lang_hooks.decls.generic_generic_parameter_decl_p (parm))
13657 {
13658 /* DWARF3, 5.6.8 says if PARM is a non-type generic parameter
13659 TMPL_DIE should have a child DW_AT_type attribute that is set
13660 to the type of the argument to PARM, which is ARG.
13661 If PARM is a type generic parameter, TMPL_DIE should have a
13662 child DW_AT_type that is set to ARG. */
13663 tmpl_type = TYPE_P (arg) ? arg : TREE_TYPE (arg);
13664 add_type_attribute (tmpl_die, tmpl_type,
13665 (TREE_THIS_VOLATILE (tmpl_type)
13666 ? TYPE_QUAL_VOLATILE : TYPE_UNQUALIFIED),
13667 false, parent_die);
13668 }
13669 else
13670 {
13671 /* So TMPL_DIE is a DIE representing a
13672 a generic generic template parameter, a.k.a template template
13673 parameter in C++ and arg is a template. */
13674
13675 /* The DW_AT_GNU_template_name attribute of the DIE must be set
13676 to the name of the argument. */
13677 name = dwarf2_name (TYPE_P (arg) ? TYPE_NAME (arg) : arg, 1);
13678 if (name)
13679 add_AT_string (tmpl_die, DW_AT_GNU_template_name, name);
13680 }
13681
13682 if (TREE_CODE (parm) == PARM_DECL)
13683 /* So PARM is a non-type generic parameter.
13684 DWARF3 5.6.8 says we must set a DW_AT_const_value child
13685 attribute of TMPL_DIE which value represents the value
13686 of ARG.
13687 We must be careful here:
13688 The value of ARG might reference some function decls.
13689 We might currently be emitting debug info for a generic
13690 type and types are emitted before function decls, we don't
13691 know if the function decls referenced by ARG will actually be
13692 emitted after cgraph computations.
13693 So must defer the generation of the DW_AT_const_value to
13694 after cgraph is ready. */
13695 append_entry_to_tmpl_value_parm_die_table (tmpl_die, arg);
13696 }
13697
13698 return tmpl_die;
13699 }
13700
13701 /* Generate and return a DW_TAG_GNU_template_parameter_pack DIE representing.
13702 PARM_PACK must be a template parameter pack. The returned DIE
13703 will be child DIE of PARENT_DIE. */
13704
13705 static dw_die_ref
13706 template_parameter_pack_die (tree parm_pack,
13707 tree parm_pack_args,
13708 dw_die_ref parent_die)
13709 {
13710 dw_die_ref die;
13711 int j;
13712
13713 gcc_assert (parent_die && parm_pack);
13714
13715 die = new_die (DW_TAG_GNU_template_parameter_pack, parent_die, parm_pack);
13716 add_name_and_src_coords_attributes (die, parm_pack);
13717 for (j = 0; j < TREE_VEC_LENGTH (parm_pack_args); j++)
13718 generic_parameter_die (parm_pack,
13719 TREE_VEC_ELT (parm_pack_args, j),
13720 false /* Don't emit DW_AT_name */,
13721 die);
13722 return die;
13723 }
13724
13725 /* Return the DBX register number described by a given RTL node. */
13726
13727 static unsigned int
13728 dbx_reg_number (const_rtx rtl)
13729 {
13730 unsigned regno = REGNO (rtl);
13731
13732 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
13733
13734 #ifdef LEAF_REG_REMAP
13735 if (crtl->uses_only_leaf_regs)
13736 {
13737 int leaf_reg = LEAF_REG_REMAP (regno);
13738 if (leaf_reg != -1)
13739 regno = (unsigned) leaf_reg;
13740 }
13741 #endif
13742
13743 regno = DBX_REGISTER_NUMBER (regno);
13744 gcc_assert (regno != INVALID_REGNUM);
13745 return regno;
13746 }
13747
13748 /* Optionally add a DW_OP_piece term to a location description expression.
13749 DW_OP_piece is only added if the location description expression already
13750 doesn't end with DW_OP_piece. */
13751
13752 static void
13753 add_loc_descr_op_piece (dw_loc_descr_ref *list_head, int size)
13754 {
13755 dw_loc_descr_ref loc;
13756
13757 if (*list_head != NULL)
13758 {
13759 /* Find the end of the chain. */
13760 for (loc = *list_head; loc->dw_loc_next != NULL; loc = loc->dw_loc_next)
13761 ;
13762
13763 if (loc->dw_loc_opc != DW_OP_piece)
13764 loc->dw_loc_next = new_loc_descr (DW_OP_piece, size, 0);
13765 }
13766 }
13767
13768 /* Return a location descriptor that designates a machine register or
13769 zero if there is none. */
13770
13771 static dw_loc_descr_ref
13772 reg_loc_descriptor (rtx rtl, enum var_init_status initialized)
13773 {
13774 rtx regs;
13775
13776 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
13777 return 0;
13778
13779 /* We only use "frame base" when we're sure we're talking about the
13780 post-prologue local stack frame. We do this by *not* running
13781 register elimination until this point, and recognizing the special
13782 argument pointer and soft frame pointer rtx's.
13783 Use DW_OP_fbreg offset DW_OP_stack_value in this case. */
13784 if ((rtl == arg_pointer_rtx || rtl == frame_pointer_rtx)
13785 && eliminate_regs (rtl, VOIDmode, NULL_RTX) != rtl)
13786 {
13787 dw_loc_descr_ref result = NULL;
13788
13789 if (dwarf_version >= 4 || !dwarf_strict)
13790 {
13791 result = mem_loc_descriptor (rtl, GET_MODE (rtl), VOIDmode,
13792 initialized);
13793 if (result)
13794 add_loc_descr (&result,
13795 new_loc_descr (DW_OP_stack_value, 0, 0));
13796 }
13797 return result;
13798 }
13799
13800 regs = targetm.dwarf_register_span (rtl);
13801
13802 if (REG_NREGS (rtl) > 1 || regs)
13803 return multiple_reg_loc_descriptor (rtl, regs, initialized);
13804 else
13805 {
13806 unsigned int dbx_regnum = dbx_reg_number (rtl);
13807 if (dbx_regnum == IGNORED_DWARF_REGNUM)
13808 return 0;
13809 return one_reg_loc_descriptor (dbx_regnum, initialized);
13810 }
13811 }
13812
13813 /* Return a location descriptor that designates a machine register for
13814 a given hard register number. */
13815
13816 static dw_loc_descr_ref
13817 one_reg_loc_descriptor (unsigned int regno, enum var_init_status initialized)
13818 {
13819 dw_loc_descr_ref reg_loc_descr;
13820
13821 if (regno <= 31)
13822 reg_loc_descr
13823 = new_loc_descr ((enum dwarf_location_atom) (DW_OP_reg0 + regno), 0, 0);
13824 else
13825 reg_loc_descr = new_loc_descr (DW_OP_regx, regno, 0);
13826
13827 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
13828 add_loc_descr (&reg_loc_descr, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13829
13830 return reg_loc_descr;
13831 }
13832
13833 /* Given an RTL of a register, return a location descriptor that
13834 designates a value that spans more than one register. */
13835
13836 static dw_loc_descr_ref
13837 multiple_reg_loc_descriptor (rtx rtl, rtx regs,
13838 enum var_init_status initialized)
13839 {
13840 int size, i;
13841 dw_loc_descr_ref loc_result = NULL;
13842
13843 /* Simple, contiguous registers. */
13844 if (regs == NULL_RTX)
13845 {
13846 unsigned reg = REGNO (rtl);
13847 int nregs;
13848
13849 #ifdef LEAF_REG_REMAP
13850 if (crtl->uses_only_leaf_regs)
13851 {
13852 int leaf_reg = LEAF_REG_REMAP (reg);
13853 if (leaf_reg != -1)
13854 reg = (unsigned) leaf_reg;
13855 }
13856 #endif
13857
13858 gcc_assert ((unsigned) DBX_REGISTER_NUMBER (reg) == dbx_reg_number (rtl));
13859 nregs = REG_NREGS (rtl);
13860
13861 /* At present we only track constant-sized pieces. */
13862 if (!GET_MODE_SIZE (GET_MODE (rtl)).is_constant (&size))
13863 return NULL;
13864 size /= nregs;
13865
13866 loc_result = NULL;
13867 while (nregs--)
13868 {
13869 dw_loc_descr_ref t;
13870
13871 t = one_reg_loc_descriptor (DBX_REGISTER_NUMBER (reg),
13872 VAR_INIT_STATUS_INITIALIZED);
13873 add_loc_descr (&loc_result, t);
13874 add_loc_descr_op_piece (&loc_result, size);
13875 ++reg;
13876 }
13877 return loc_result;
13878 }
13879
13880 /* Now onto stupid register sets in non contiguous locations. */
13881
13882 gcc_assert (GET_CODE (regs) == PARALLEL);
13883
13884 /* At present we only track constant-sized pieces. */
13885 if (!GET_MODE_SIZE (GET_MODE (XVECEXP (regs, 0, 0))).is_constant (&size))
13886 return NULL;
13887 loc_result = NULL;
13888
13889 for (i = 0; i < XVECLEN (regs, 0); ++i)
13890 {
13891 dw_loc_descr_ref t;
13892
13893 t = one_reg_loc_descriptor (dbx_reg_number (XVECEXP (regs, 0, i)),
13894 VAR_INIT_STATUS_INITIALIZED);
13895 add_loc_descr (&loc_result, t);
13896 add_loc_descr_op_piece (&loc_result, size);
13897 }
13898
13899 if (loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
13900 add_loc_descr (&loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
13901 return loc_result;
13902 }
13903
13904 static unsigned long size_of_int_loc_descriptor (HOST_WIDE_INT);
13905
13906 /* Return a location descriptor that designates a constant i,
13907 as a compound operation from constant (i >> shift), constant shift
13908 and DW_OP_shl. */
13909
13910 static dw_loc_descr_ref
13911 int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
13912 {
13913 dw_loc_descr_ref ret = int_loc_descriptor (i >> shift);
13914 add_loc_descr (&ret, int_loc_descriptor (shift));
13915 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
13916 return ret;
13917 }
13918
13919 /* Return a location descriptor that designates constant POLY_I. */
13920
13921 static dw_loc_descr_ref
13922 int_loc_descriptor (poly_int64 poly_i)
13923 {
13924 enum dwarf_location_atom op;
13925
13926 HOST_WIDE_INT i;
13927 if (!poly_i.is_constant (&i))
13928 {
13929 /* Create location descriptions for the non-constant part and
13930 add any constant offset at the end. */
13931 dw_loc_descr_ref ret = NULL;
13932 HOST_WIDE_INT constant = poly_i.coeffs[0];
13933 for (unsigned int j = 1; j < NUM_POLY_INT_COEFFS; ++j)
13934 {
13935 HOST_WIDE_INT coeff = poly_i.coeffs[j];
13936 if (coeff != 0)
13937 {
13938 dw_loc_descr_ref start = ret;
13939 unsigned int factor;
13940 int bias;
13941 unsigned int regno = targetm.dwarf_poly_indeterminate_value
13942 (j, &factor, &bias);
13943
13944 /* Add COEFF * ((REGNO / FACTOR) - BIAS) to the value:
13945 add COEFF * (REGNO / FACTOR) now and subtract
13946 COEFF * BIAS from the final constant part. */
13947 constant -= coeff * bias;
13948 add_loc_descr (&ret, new_reg_loc_descr (regno, 0));
13949 if (coeff % factor == 0)
13950 coeff /= factor;
13951 else
13952 {
13953 int amount = exact_log2 (factor);
13954 gcc_assert (amount >= 0);
13955 add_loc_descr (&ret, int_loc_descriptor (amount));
13956 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
13957 }
13958 if (coeff != 1)
13959 {
13960 add_loc_descr (&ret, int_loc_descriptor (coeff));
13961 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
13962 }
13963 if (start)
13964 add_loc_descr (&ret, new_loc_descr (DW_OP_plus, 0, 0));
13965 }
13966 }
13967 loc_descr_plus_const (&ret, constant);
13968 return ret;
13969 }
13970
13971 /* Pick the smallest representation of a constant, rather than just
13972 defaulting to the LEB encoding. */
13973 if (i >= 0)
13974 {
13975 int clz = clz_hwi (i);
13976 int ctz = ctz_hwi (i);
13977 if (i <= 31)
13978 op = (enum dwarf_location_atom) (DW_OP_lit0 + i);
13979 else if (i <= 0xff)
13980 op = DW_OP_const1u;
13981 else if (i <= 0xffff)
13982 op = DW_OP_const2u;
13983 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
13984 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
13985 /* DW_OP_litX DW_OP_litY DW_OP_shl takes just 3 bytes and
13986 DW_OP_litX DW_OP_const1u Y DW_OP_shl takes just 4 bytes,
13987 while DW_OP_const4u is 5 bytes. */
13988 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 5);
13989 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
13990 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
13991 /* DW_OP_const1u X DW_OP_litY DW_OP_shl takes just 4 bytes,
13992 while DW_OP_const4u is 5 bytes. */
13993 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
13994
13995 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
13996 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
13997 <= 4)
13998 {
13999 /* As i >= 2**31, the double cast above will yield a negative number.
14000 Since wrapping is defined in DWARF expressions we can output big
14001 positive integers as small negative ones, regardless of the size
14002 of host wide ints.
14003
14004 Here, since the evaluator will handle 32-bit values and since i >=
14005 2**31, we know it's going to be interpreted as a negative literal:
14006 store it this way if we can do better than 5 bytes this way. */
14007 return int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
14008 }
14009 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14010 op = DW_OP_const4u;
14011
14012 /* Past this point, i >= 0x100000000 and thus DW_OP_constu will take at
14013 least 6 bytes: see if we can do better before falling back to it. */
14014 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14015 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
14016 /* DW_OP_const1u X DW_OP_const1u Y DW_OP_shl takes just 5 bytes. */
14017 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 8);
14018 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
14019 && clz + 16 + (size_of_uleb128 (i) > 5 ? 255 : 31)
14020 >= HOST_BITS_PER_WIDE_INT)
14021 /* DW_OP_const2u X DW_OP_litY DW_OP_shl takes just 5 bytes,
14022 DW_OP_const2u X DW_OP_const1u Y DW_OP_shl takes 6 bytes. */
14023 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 16);
14024 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
14025 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
14026 && size_of_uleb128 (i) > 6)
14027 /* DW_OP_const4u X DW_OP_litY DW_OP_shl takes just 7 bytes. */
14028 return int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT - clz - 32);
14029 else
14030 op = DW_OP_constu;
14031 }
14032 else
14033 {
14034 if (i >= -0x80)
14035 op = DW_OP_const1s;
14036 else if (i >= -0x8000)
14037 op = DW_OP_const2s;
14038 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
14039 {
14040 if (size_of_int_loc_descriptor (i) < 5)
14041 {
14042 dw_loc_descr_ref ret = int_loc_descriptor (-i);
14043 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
14044 return ret;
14045 }
14046 op = DW_OP_const4s;
14047 }
14048 else
14049 {
14050 if (size_of_int_loc_descriptor (i)
14051 < (unsigned long) 1 + size_of_sleb128 (i))
14052 {
14053 dw_loc_descr_ref ret = int_loc_descriptor (-i);
14054 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
14055 return ret;
14056 }
14057 op = DW_OP_consts;
14058 }
14059 }
14060
14061 return new_loc_descr (op, i, 0);
14062 }
14063
14064 /* Likewise, for unsigned constants. */
14065
14066 static dw_loc_descr_ref
14067 uint_loc_descriptor (unsigned HOST_WIDE_INT i)
14068 {
14069 const unsigned HOST_WIDE_INT max_int = INTTYPE_MAXIMUM (HOST_WIDE_INT);
14070 const unsigned HOST_WIDE_INT max_uint
14071 = INTTYPE_MAXIMUM (unsigned HOST_WIDE_INT);
14072
14073 /* If possible, use the clever signed constants handling. */
14074 if (i <= max_int)
14075 return int_loc_descriptor ((HOST_WIDE_INT) i);
14076
14077 /* Here, we are left with positive numbers that cannot be represented as
14078 HOST_WIDE_INT, i.e.:
14079 max (HOST_WIDE_INT) < i <= max (unsigned HOST_WIDE_INT)
14080
14081 Using DW_OP_const4/8/./u operation to encode them consumes a lot of bytes
14082 whereas may be better to output a negative integer: thanks to integer
14083 wrapping, we know that:
14084 x = x - 2 ** DWARF2_ADDR_SIZE
14085 = x - 2 * (max (HOST_WIDE_INT) + 1)
14086 So numbers close to max (unsigned HOST_WIDE_INT) could be represented as
14087 small negative integers. Let's try that in cases it will clearly improve
14088 the encoding: there is no gain turning DW_OP_const4u into
14089 DW_OP_const4s. */
14090 if (DWARF2_ADDR_SIZE * 8 == HOST_BITS_PER_WIDE_INT
14091 && ((DWARF2_ADDR_SIZE == 4 && i > max_uint - 0x8000)
14092 || (DWARF2_ADDR_SIZE == 8 && i > max_uint - 0x80000000)))
14093 {
14094 const unsigned HOST_WIDE_INT first_shift = i - max_int - 1;
14095
14096 /* Now, -1 < first_shift <= max (HOST_WIDE_INT)
14097 i.e. 0 <= first_shift <= max (HOST_WIDE_INT). */
14098 const HOST_WIDE_INT second_shift
14099 = (HOST_WIDE_INT) first_shift - (HOST_WIDE_INT) max_int - 1;
14100
14101 /* So we finally have:
14102 -max (HOST_WIDE_INT) - 1 <= second_shift <= -1.
14103 i.e. min (HOST_WIDE_INT) <= second_shift < 0. */
14104 return int_loc_descriptor (second_shift);
14105 }
14106
14107 /* Last chance: fallback to a simple constant operation. */
14108 return new_loc_descr
14109 ((HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14110 ? DW_OP_const4u
14111 : DW_OP_const8u,
14112 i, 0);
14113 }
14114
14115 /* Generate and return a location description that computes the unsigned
14116 comparison of the two stack top entries (a OP b where b is the top-most
14117 entry and a is the second one). The KIND of comparison can be LT_EXPR,
14118 LE_EXPR, GT_EXPR or GE_EXPR. */
14119
14120 static dw_loc_descr_ref
14121 uint_comparison_loc_list (enum tree_code kind)
14122 {
14123 enum dwarf_location_atom op, flip_op;
14124 dw_loc_descr_ref ret, bra_node, jmp_node, tmp;
14125
14126 switch (kind)
14127 {
14128 case LT_EXPR:
14129 op = DW_OP_lt;
14130 break;
14131 case LE_EXPR:
14132 op = DW_OP_le;
14133 break;
14134 case GT_EXPR:
14135 op = DW_OP_gt;
14136 break;
14137 case GE_EXPR:
14138 op = DW_OP_ge;
14139 break;
14140 default:
14141 gcc_unreachable ();
14142 }
14143
14144 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14145 jmp_node = new_loc_descr (DW_OP_skip, 0, 0);
14146
14147 /* Until DWARFv4, operations all work on signed integers. It is nevertheless
14148 possible to perform unsigned comparisons: we just have to distinguish
14149 three cases:
14150
14151 1. when a and b have the same sign (as signed integers); then we should
14152 return: a OP(signed) b;
14153
14154 2. when a is a negative signed integer while b is a positive one, then a
14155 is a greater unsigned integer than b; likewise when a and b's roles
14156 are flipped.
14157
14158 So first, compare the sign of the two operands. */
14159 ret = new_loc_descr (DW_OP_over, 0, 0);
14160 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
14161 add_loc_descr (&ret, new_loc_descr (DW_OP_xor, 0, 0));
14162 /* If they have different signs (i.e. they have different sign bits), then
14163 the stack top value has now the sign bit set and thus it's smaller than
14164 zero. */
14165 add_loc_descr (&ret, new_loc_descr (DW_OP_lit0, 0, 0));
14166 add_loc_descr (&ret, new_loc_descr (DW_OP_lt, 0, 0));
14167 add_loc_descr (&ret, bra_node);
14168
14169 /* We are in case 1. At this point, we know both operands have the same
14170 sign, to it's safe to use the built-in signed comparison. */
14171 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14172 add_loc_descr (&ret, jmp_node);
14173
14174 /* We are in case 2. Here, we know both operands do not have the same sign,
14175 so we have to flip the signed comparison. */
14176 flip_op = (kind == LT_EXPR || kind == LE_EXPR) ? DW_OP_gt : DW_OP_lt;
14177 tmp = new_loc_descr (flip_op, 0, 0);
14178 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14179 bra_node->dw_loc_oprnd1.v.val_loc = tmp;
14180 add_loc_descr (&ret, tmp);
14181
14182 /* This dummy operation is necessary to make the two branches join. */
14183 tmp = new_loc_descr (DW_OP_nop, 0, 0);
14184 jmp_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
14185 jmp_node->dw_loc_oprnd1.v.val_loc = tmp;
14186 add_loc_descr (&ret, tmp);
14187
14188 return ret;
14189 }
14190
14191 /* Likewise, but takes the location description lists (might be destructive on
14192 them). Return NULL if either is NULL or if concatenation fails. */
14193
14194 static dw_loc_list_ref
14195 loc_list_from_uint_comparison (dw_loc_list_ref left, dw_loc_list_ref right,
14196 enum tree_code kind)
14197 {
14198 if (left == NULL || right == NULL)
14199 return NULL;
14200
14201 add_loc_list (&left, right);
14202 if (left == NULL)
14203 return NULL;
14204
14205 add_loc_descr_to_each (left, uint_comparison_loc_list (kind));
14206 return left;
14207 }
14208
14209 /* Return size_of_locs (int_shift_loc_descriptor (i, shift))
14210 without actually allocating it. */
14211
14212 static unsigned long
14213 size_of_int_shift_loc_descriptor (HOST_WIDE_INT i, int shift)
14214 {
14215 return size_of_int_loc_descriptor (i >> shift)
14216 + size_of_int_loc_descriptor (shift)
14217 + 1;
14218 }
14219
14220 /* Return size_of_locs (int_loc_descriptor (i)) without
14221 actually allocating it. */
14222
14223 static unsigned long
14224 size_of_int_loc_descriptor (HOST_WIDE_INT i)
14225 {
14226 unsigned long s;
14227
14228 if (i >= 0)
14229 {
14230 int clz, ctz;
14231 if (i <= 31)
14232 return 1;
14233 else if (i <= 0xff)
14234 return 2;
14235 else if (i <= 0xffff)
14236 return 3;
14237 clz = clz_hwi (i);
14238 ctz = ctz_hwi (i);
14239 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 5
14240 && clz + 5 + 255 >= HOST_BITS_PER_WIDE_INT)
14241 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14242 - clz - 5);
14243 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14244 && clz + 8 + 31 >= HOST_BITS_PER_WIDE_INT)
14245 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14246 - clz - 8);
14247 else if (DWARF2_ADDR_SIZE == 4 && i > 0x7fffffff
14248 && size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i)
14249 <= 4)
14250 return size_of_int_loc_descriptor ((HOST_WIDE_INT) (int32_t) i);
14251 else if (HOST_BITS_PER_WIDE_INT == 32 || i <= 0xffffffff)
14252 return 5;
14253 s = size_of_uleb128 ((unsigned HOST_WIDE_INT) i);
14254 if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 8
14255 && clz + 8 + 255 >= HOST_BITS_PER_WIDE_INT)
14256 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14257 - clz - 8);
14258 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 16
14259 && clz + 16 + (s > 5 ? 255 : 31) >= HOST_BITS_PER_WIDE_INT)
14260 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14261 - clz - 16);
14262 else if (clz + ctz >= HOST_BITS_PER_WIDE_INT - 32
14263 && clz + 32 + 31 >= HOST_BITS_PER_WIDE_INT
14264 && s > 6)
14265 return size_of_int_shift_loc_descriptor (i, HOST_BITS_PER_WIDE_INT
14266 - clz - 32);
14267 else
14268 return 1 + s;
14269 }
14270 else
14271 {
14272 if (i >= -0x80)
14273 return 2;
14274 else if (i >= -0x8000)
14275 return 3;
14276 else if (HOST_BITS_PER_WIDE_INT == 32 || i >= -0x80000000)
14277 {
14278 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14279 {
14280 s = size_of_int_loc_descriptor (-i) + 1;
14281 if (s < 5)
14282 return s;
14283 }
14284 return 5;
14285 }
14286 else
14287 {
14288 unsigned long r = 1 + size_of_sleb128 (i);
14289 if (-(unsigned HOST_WIDE_INT) i != (unsigned HOST_WIDE_INT) i)
14290 {
14291 s = size_of_int_loc_descriptor (-i) + 1;
14292 if (s < r)
14293 return s;
14294 }
14295 return r;
14296 }
14297 }
14298 }
14299
14300 /* Return loc description representing "address" of integer value.
14301 This can appear only as toplevel expression. */
14302
14303 static dw_loc_descr_ref
14304 address_of_int_loc_descriptor (int size, HOST_WIDE_INT i)
14305 {
14306 int litsize;
14307 dw_loc_descr_ref loc_result = NULL;
14308
14309 if (!(dwarf_version >= 4 || !dwarf_strict))
14310 return NULL;
14311
14312 litsize = size_of_int_loc_descriptor (i);
14313 /* Determine if DW_OP_stack_value or DW_OP_implicit_value
14314 is more compact. For DW_OP_stack_value we need:
14315 litsize + 1 (DW_OP_stack_value)
14316 and for DW_OP_implicit_value:
14317 1 (DW_OP_implicit_value) + 1 (length) + size. */
14318 if ((int) DWARF2_ADDR_SIZE >= size && litsize + 1 <= 1 + 1 + size)
14319 {
14320 loc_result = int_loc_descriptor (i);
14321 add_loc_descr (&loc_result,
14322 new_loc_descr (DW_OP_stack_value, 0, 0));
14323 return loc_result;
14324 }
14325
14326 loc_result = new_loc_descr (DW_OP_implicit_value,
14327 size, 0);
14328 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
14329 loc_result->dw_loc_oprnd2.v.val_int = i;
14330 return loc_result;
14331 }
14332
14333 /* Return a location descriptor that designates a base+offset location. */
14334
14335 static dw_loc_descr_ref
14336 based_loc_descr (rtx reg, poly_int64 offset,
14337 enum var_init_status initialized)
14338 {
14339 unsigned int regno;
14340 dw_loc_descr_ref result;
14341 dw_fde_ref fde = cfun->fde;
14342
14343 /* We only use "frame base" when we're sure we're talking about the
14344 post-prologue local stack frame. We do this by *not* running
14345 register elimination until this point, and recognizing the special
14346 argument pointer and soft frame pointer rtx's. */
14347 if (reg == arg_pointer_rtx || reg == frame_pointer_rtx)
14348 {
14349 rtx elim = (ira_use_lra_p
14350 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
14351 : eliminate_regs (reg, VOIDmode, NULL_RTX));
14352
14353 if (elim != reg)
14354 {
14355 /* Allow hard frame pointer here even if frame pointer
14356 isn't used since hard frame pointer is encoded with
14357 DW_OP_fbreg which uses the DW_AT_frame_base attribute,
14358 not hard frame pointer directly. */
14359 elim = strip_offset_and_add (elim, &offset);
14360 gcc_assert (elim == hard_frame_pointer_rtx
14361 || elim == stack_pointer_rtx);
14362
14363 /* If drap register is used to align stack, use frame
14364 pointer + offset to access stack variables. If stack
14365 is aligned without drap, use stack pointer + offset to
14366 access stack variables. */
14367 if (crtl->stack_realign_tried
14368 && reg == frame_pointer_rtx)
14369 {
14370 int base_reg
14371 = DWARF_FRAME_REGNUM ((fde && fde->drap_reg != INVALID_REGNUM)
14372 ? HARD_FRAME_POINTER_REGNUM
14373 : REGNO (elim));
14374 return new_reg_loc_descr (base_reg, offset);
14375 }
14376
14377 gcc_assert (frame_pointer_fb_offset_valid);
14378 offset += frame_pointer_fb_offset;
14379 HOST_WIDE_INT const_offset;
14380 if (offset.is_constant (&const_offset))
14381 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14382 else
14383 {
14384 dw_loc_descr_ref ret = new_loc_descr (DW_OP_fbreg, 0, 0);
14385 loc_descr_plus_const (&ret, offset);
14386 return ret;
14387 }
14388 }
14389 }
14390
14391 regno = REGNO (reg);
14392 #ifdef LEAF_REG_REMAP
14393 if (crtl->uses_only_leaf_regs)
14394 {
14395 int leaf_reg = LEAF_REG_REMAP (regno);
14396 if (leaf_reg != -1)
14397 regno = (unsigned) leaf_reg;
14398 }
14399 #endif
14400 regno = DWARF_FRAME_REGNUM (regno);
14401
14402 HOST_WIDE_INT const_offset;
14403 if (!optimize && fde
14404 && (fde->drap_reg == regno || fde->vdrap_reg == regno)
14405 && offset.is_constant (&const_offset))
14406 {
14407 /* Use cfa+offset to represent the location of arguments passed
14408 on the stack when drap is used to align stack.
14409 Only do this when not optimizing, for optimized code var-tracking
14410 is supposed to track where the arguments live and the register
14411 used as vdrap or drap in some spot might be used for something
14412 else in other part of the routine. */
14413 return new_loc_descr (DW_OP_fbreg, const_offset, 0);
14414 }
14415
14416 result = new_reg_loc_descr (regno, offset);
14417
14418 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
14419 add_loc_descr (&result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
14420
14421 return result;
14422 }
14423
14424 /* Return true if this RTL expression describes a base+offset calculation. */
14425
14426 static inline int
14427 is_based_loc (const_rtx rtl)
14428 {
14429 return (GET_CODE (rtl) == PLUS
14430 && ((REG_P (XEXP (rtl, 0))
14431 && REGNO (XEXP (rtl, 0)) < FIRST_PSEUDO_REGISTER
14432 && CONST_INT_P (XEXP (rtl, 1)))));
14433 }
14434
14435 /* Try to handle TLS MEMs, for which mem_loc_descriptor on XEXP (mem, 0)
14436 failed. */
14437
14438 static dw_loc_descr_ref
14439 tls_mem_loc_descriptor (rtx mem)
14440 {
14441 tree base;
14442 dw_loc_descr_ref loc_result;
14443
14444 if (MEM_EXPR (mem) == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
14445 return NULL;
14446
14447 base = get_base_address (MEM_EXPR (mem));
14448 if (base == NULL
14449 || !VAR_P (base)
14450 || !DECL_THREAD_LOCAL_P (base))
14451 return NULL;
14452
14453 loc_result = loc_descriptor_from_tree (MEM_EXPR (mem), 1, NULL);
14454 if (loc_result == NULL)
14455 return NULL;
14456
14457 if (maybe_ne (MEM_OFFSET (mem), 0))
14458 loc_descr_plus_const (&loc_result, MEM_OFFSET (mem));
14459
14460 return loc_result;
14461 }
14462
14463 /* Output debug info about reason why we failed to expand expression as dwarf
14464 expression. */
14465
14466 static void
14467 expansion_failed (tree expr, rtx rtl, char const *reason)
14468 {
14469 if (dump_file && (dump_flags & TDF_DETAILS))
14470 {
14471 fprintf (dump_file, "Failed to expand as dwarf: ");
14472 if (expr)
14473 print_generic_expr (dump_file, expr, dump_flags);
14474 if (rtl)
14475 {
14476 fprintf (dump_file, "\n");
14477 print_rtl (dump_file, rtl);
14478 }
14479 fprintf (dump_file, "\nReason: %s\n", reason);
14480 }
14481 }
14482
14483 /* Helper function for const_ok_for_output. */
14484
14485 static bool
14486 const_ok_for_output_1 (rtx rtl)
14487 {
14488 if (targetm.const_not_ok_for_debug_p (rtl))
14489 {
14490 if (GET_CODE (rtl) != UNSPEC)
14491 {
14492 expansion_failed (NULL_TREE, rtl,
14493 "Expression rejected for debug by the backend.\n");
14494 return false;
14495 }
14496
14497 /* If delegitimize_address couldn't do anything with the UNSPEC, and
14498 the target hook doesn't explicitly allow it in debug info, assume
14499 we can't express it in the debug info. */
14500 /* Don't complain about TLS UNSPECs, those are just too hard to
14501 delegitimize. Note this could be a non-decl SYMBOL_REF such as
14502 one in a constant pool entry, so testing SYMBOL_REF_TLS_MODEL
14503 rather than DECL_THREAD_LOCAL_P is not just an optimization. */
14504 if (flag_checking
14505 && (XVECLEN (rtl, 0) == 0
14506 || GET_CODE (XVECEXP (rtl, 0, 0)) != SYMBOL_REF
14507 || SYMBOL_REF_TLS_MODEL (XVECEXP (rtl, 0, 0)) == TLS_MODEL_NONE))
14508 inform (current_function_decl
14509 ? DECL_SOURCE_LOCATION (current_function_decl)
14510 : UNKNOWN_LOCATION,
14511 #if NUM_UNSPEC_VALUES > 0
14512 "non-delegitimized UNSPEC %s (%d) found in variable location",
14513 ((XINT (rtl, 1) >= 0 && XINT (rtl, 1) < NUM_UNSPEC_VALUES)
14514 ? unspec_strings[XINT (rtl, 1)] : "unknown"),
14515 #else
14516 "non-delegitimized UNSPEC %d found in variable location",
14517 #endif
14518 XINT (rtl, 1));
14519 expansion_failed (NULL_TREE, rtl,
14520 "UNSPEC hasn't been delegitimized.\n");
14521 return false;
14522 }
14523
14524 if (CONST_POLY_INT_P (rtl))
14525 return false;
14526
14527 /* FIXME: Refer to PR60655. It is possible for simplification
14528 of rtl expressions in var tracking to produce such expressions.
14529 We should really identify / validate expressions
14530 enclosed in CONST that can be handled by assemblers on various
14531 targets and only handle legitimate cases here. */
14532 switch (GET_CODE (rtl))
14533 {
14534 case SYMBOL_REF:
14535 break;
14536 case NOT:
14537 case NEG:
14538 return false;
14539 case PLUS:
14540 {
14541 /* Make sure SYMBOL_REFs/UNSPECs are at most in one of the
14542 operands. */
14543 subrtx_var_iterator::array_type array;
14544 bool first = false;
14545 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL)
14546 if (SYMBOL_REF_P (*iter)
14547 || LABEL_P (*iter)
14548 || GET_CODE (*iter) == UNSPEC)
14549 {
14550 first = true;
14551 break;
14552 }
14553 if (!first)
14554 return true;
14555 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 1), ALL)
14556 if (SYMBOL_REF_P (*iter)
14557 || LABEL_P (*iter)
14558 || GET_CODE (*iter) == UNSPEC)
14559 return false;
14560 return true;
14561 }
14562 case MINUS:
14563 {
14564 /* Disallow negation of SYMBOL_REFs or UNSPECs when they
14565 appear in the second operand of MINUS. */
14566 subrtx_var_iterator::array_type array;
14567 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 1), ALL)
14568 if (SYMBOL_REF_P (*iter)
14569 || LABEL_P (*iter)
14570 || GET_CODE (*iter) == UNSPEC)
14571 return false;
14572 return true;
14573 }
14574 default:
14575 return true;
14576 }
14577
14578 if (CONSTANT_POOL_ADDRESS_P (rtl))
14579 {
14580 bool marked;
14581 get_pool_constant_mark (rtl, &marked);
14582 /* If all references to this pool constant were optimized away,
14583 it was not output and thus we can't represent it. */
14584 if (!marked)
14585 {
14586 expansion_failed (NULL_TREE, rtl,
14587 "Constant was removed from constant pool.\n");
14588 return false;
14589 }
14590 }
14591
14592 if (SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
14593 return false;
14594
14595 /* Avoid references to external symbols in debug info, on several targets
14596 the linker might even refuse to link when linking a shared library,
14597 and in many other cases the relocations for .debug_info/.debug_loc are
14598 dropped, so the address becomes zero anyway. Hidden symbols, guaranteed
14599 to be defined within the same shared library or executable are fine. */
14600 if (SYMBOL_REF_EXTERNAL_P (rtl))
14601 {
14602 tree decl = SYMBOL_REF_DECL (rtl);
14603
14604 if (decl == NULL || !targetm.binds_local_p (decl))
14605 {
14606 expansion_failed (NULL_TREE, rtl,
14607 "Symbol not defined in current TU.\n");
14608 return false;
14609 }
14610 }
14611
14612 return true;
14613 }
14614
14615 /* Return true if constant RTL can be emitted in DW_OP_addr or
14616 DW_AT_const_value. TLS SYMBOL_REFs, external SYMBOL_REFs or
14617 non-marked constant pool SYMBOL_REFs can't be referenced in it. */
14618
14619 static bool
14620 const_ok_for_output (rtx rtl)
14621 {
14622 if (GET_CODE (rtl) == SYMBOL_REF)
14623 return const_ok_for_output_1 (rtl);
14624
14625 if (GET_CODE (rtl) == CONST)
14626 {
14627 subrtx_var_iterator::array_type array;
14628 FOR_EACH_SUBRTX_VAR (iter, array, XEXP (rtl, 0), ALL)
14629 if (!const_ok_for_output_1 (*iter))
14630 return false;
14631 return true;
14632 }
14633
14634 return true;
14635 }
14636
14637 /* Return a reference to DW_TAG_base_type corresponding to MODE and UNSIGNEDP
14638 if possible, NULL otherwise. */
14639
14640 static dw_die_ref
14641 base_type_for_mode (machine_mode mode, bool unsignedp)
14642 {
14643 dw_die_ref type_die;
14644 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
14645
14646 if (type == NULL)
14647 return NULL;
14648 switch (TREE_CODE (type))
14649 {
14650 case INTEGER_TYPE:
14651 case REAL_TYPE:
14652 break;
14653 default:
14654 return NULL;
14655 }
14656 type_die = lookup_type_die (type);
14657 if (!type_die)
14658 type_die = modified_type_die (type, TYPE_UNQUALIFIED, false,
14659 comp_unit_die ());
14660 if (type_die == NULL || type_die->die_tag != DW_TAG_base_type)
14661 return NULL;
14662 return type_die;
14663 }
14664
14665 /* For OP descriptor assumed to be in unsigned MODE, convert it to a unsigned
14666 type matching MODE, or, if MODE is narrower than or as wide as
14667 DWARF2_ADDR_SIZE, untyped. Return NULL if the conversion is not
14668 possible. */
14669
14670 static dw_loc_descr_ref
14671 convert_descriptor_to_mode (scalar_int_mode mode, dw_loc_descr_ref op)
14672 {
14673 machine_mode outer_mode = mode;
14674 dw_die_ref type_die;
14675 dw_loc_descr_ref cvt;
14676
14677 if (GET_MODE_SIZE (mode) <= DWARF2_ADDR_SIZE)
14678 {
14679 add_loc_descr (&op, new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0));
14680 return op;
14681 }
14682 type_die = base_type_for_mode (outer_mode, 1);
14683 if (type_die == NULL)
14684 return NULL;
14685 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14686 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14687 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14688 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14689 add_loc_descr (&op, cvt);
14690 return op;
14691 }
14692
14693 /* Return location descriptor for comparison OP with operands OP0 and OP1. */
14694
14695 static dw_loc_descr_ref
14696 compare_loc_descriptor (enum dwarf_location_atom op, dw_loc_descr_ref op0,
14697 dw_loc_descr_ref op1)
14698 {
14699 dw_loc_descr_ref ret = op0;
14700 add_loc_descr (&ret, op1);
14701 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14702 if (STORE_FLAG_VALUE != 1)
14703 {
14704 add_loc_descr (&ret, int_loc_descriptor (STORE_FLAG_VALUE));
14705 add_loc_descr (&ret, new_loc_descr (DW_OP_mul, 0, 0));
14706 }
14707 return ret;
14708 }
14709
14710 /* Subroutine of scompare_loc_descriptor for the case in which we're
14711 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14712 and in which OP_MODE is bigger than DWARF2_ADDR_SIZE. */
14713
14714 static dw_loc_descr_ref
14715 scompare_loc_descriptor_wide (enum dwarf_location_atom op,
14716 scalar_int_mode op_mode,
14717 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14718 {
14719 dw_die_ref type_die = base_type_for_mode (op_mode, 0);
14720 dw_loc_descr_ref cvt;
14721
14722 if (type_die == NULL)
14723 return NULL;
14724 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14725 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14726 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14727 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14728 add_loc_descr (&op0, cvt);
14729 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14730 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14731 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14732 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14733 add_loc_descr (&op1, cvt);
14734 return compare_loc_descriptor (op, op0, op1);
14735 }
14736
14737 /* Subroutine of scompare_loc_descriptor for the case in which we're
14738 comparing two scalar integer operands OP0 and OP1 that have mode OP_MODE,
14739 and in which OP_MODE is smaller than DWARF2_ADDR_SIZE. */
14740
14741 static dw_loc_descr_ref
14742 scompare_loc_descriptor_narrow (enum dwarf_location_atom op, rtx rtl,
14743 scalar_int_mode op_mode,
14744 dw_loc_descr_ref op0, dw_loc_descr_ref op1)
14745 {
14746 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (op_mode)) * BITS_PER_UNIT;
14747 /* For eq/ne, if the operands are known to be zero-extended,
14748 there is no need to do the fancy shifting up. */
14749 if (op == DW_OP_eq || op == DW_OP_ne)
14750 {
14751 dw_loc_descr_ref last0, last1;
14752 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14753 ;
14754 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14755 ;
14756 /* deref_size zero extends, and for constants we can check
14757 whether they are zero extended or not. */
14758 if (((last0->dw_loc_opc == DW_OP_deref_size
14759 && last0->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14760 || (CONST_INT_P (XEXP (rtl, 0))
14761 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 0))
14762 == (INTVAL (XEXP (rtl, 0)) & GET_MODE_MASK (op_mode))))
14763 && ((last1->dw_loc_opc == DW_OP_deref_size
14764 && last1->dw_loc_oprnd1.v.val_int <= GET_MODE_SIZE (op_mode))
14765 || (CONST_INT_P (XEXP (rtl, 1))
14766 && (unsigned HOST_WIDE_INT) INTVAL (XEXP (rtl, 1))
14767 == (INTVAL (XEXP (rtl, 1)) & GET_MODE_MASK (op_mode)))))
14768 return compare_loc_descriptor (op, op0, op1);
14769
14770 /* EQ/NE comparison against constant in narrower type than
14771 DWARF2_ADDR_SIZE can be performed either as
14772 DW_OP_const1u <shift> DW_OP_shl DW_OP_const* <cst << shift>
14773 DW_OP_{eq,ne}
14774 or
14775 DW_OP_const*u <mode_mask> DW_OP_and DW_OP_const* <cst & mode_mask>
14776 DW_OP_{eq,ne}. Pick whatever is shorter. */
14777 if (CONST_INT_P (XEXP (rtl, 1))
14778 && GET_MODE_BITSIZE (op_mode) < HOST_BITS_PER_WIDE_INT
14779 && (size_of_int_loc_descriptor (shift) + 1
14780 + size_of_int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift)
14781 >= size_of_int_loc_descriptor (GET_MODE_MASK (op_mode)) + 1
14782 + size_of_int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14783 & GET_MODE_MASK (op_mode))))
14784 {
14785 add_loc_descr (&op0, int_loc_descriptor (GET_MODE_MASK (op_mode)));
14786 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14787 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1))
14788 & GET_MODE_MASK (op_mode));
14789 return compare_loc_descriptor (op, op0, op1);
14790 }
14791 }
14792 add_loc_descr (&op0, int_loc_descriptor (shift));
14793 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14794 if (CONST_INT_P (XEXP (rtl, 1)))
14795 op1 = int_loc_descriptor (UINTVAL (XEXP (rtl, 1)) << shift);
14796 else
14797 {
14798 add_loc_descr (&op1, int_loc_descriptor (shift));
14799 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14800 }
14801 return compare_loc_descriptor (op, op0, op1);
14802 }
14803
14804 /* Return location descriptor for unsigned comparison OP RTL. */
14805
14806 static dw_loc_descr_ref
14807 scompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14808 machine_mode mem_mode)
14809 {
14810 machine_mode op_mode = GET_MODE (XEXP (rtl, 0));
14811 dw_loc_descr_ref op0, op1;
14812
14813 if (op_mode == VOIDmode)
14814 op_mode = GET_MODE (XEXP (rtl, 1));
14815 if (op_mode == VOIDmode)
14816 return NULL;
14817
14818 scalar_int_mode int_op_mode;
14819 if (dwarf_strict
14820 && dwarf_version < 5
14821 && (!is_a <scalar_int_mode> (op_mode, &int_op_mode)
14822 || GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE))
14823 return NULL;
14824
14825 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14826 VAR_INIT_STATUS_INITIALIZED);
14827 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14828 VAR_INIT_STATUS_INITIALIZED);
14829
14830 if (op0 == NULL || op1 == NULL)
14831 return NULL;
14832
14833 if (is_a <scalar_int_mode> (op_mode, &int_op_mode))
14834 {
14835 if (GET_MODE_SIZE (int_op_mode) < DWARF2_ADDR_SIZE)
14836 return scompare_loc_descriptor_narrow (op, rtl, int_op_mode, op0, op1);
14837
14838 if (GET_MODE_SIZE (int_op_mode) > DWARF2_ADDR_SIZE)
14839 return scompare_loc_descriptor_wide (op, int_op_mode, op0, op1);
14840 }
14841 return compare_loc_descriptor (op, op0, op1);
14842 }
14843
14844 /* Return location descriptor for unsigned comparison OP RTL. */
14845
14846 static dw_loc_descr_ref
14847 ucompare_loc_descriptor (enum dwarf_location_atom op, rtx rtl,
14848 machine_mode mem_mode)
14849 {
14850 dw_loc_descr_ref op0, op1;
14851
14852 machine_mode test_op_mode = GET_MODE (XEXP (rtl, 0));
14853 if (test_op_mode == VOIDmode)
14854 test_op_mode = GET_MODE (XEXP (rtl, 1));
14855
14856 scalar_int_mode op_mode;
14857 if (!is_a <scalar_int_mode> (test_op_mode, &op_mode))
14858 return NULL;
14859
14860 if (dwarf_strict
14861 && dwarf_version < 5
14862 && GET_MODE_SIZE (op_mode) > DWARF2_ADDR_SIZE)
14863 return NULL;
14864
14865 op0 = mem_loc_descriptor (XEXP (rtl, 0), op_mode, mem_mode,
14866 VAR_INIT_STATUS_INITIALIZED);
14867 op1 = mem_loc_descriptor (XEXP (rtl, 1), op_mode, mem_mode,
14868 VAR_INIT_STATUS_INITIALIZED);
14869
14870 if (op0 == NULL || op1 == NULL)
14871 return NULL;
14872
14873 if (GET_MODE_SIZE (op_mode) < DWARF2_ADDR_SIZE)
14874 {
14875 HOST_WIDE_INT mask = GET_MODE_MASK (op_mode);
14876 dw_loc_descr_ref last0, last1;
14877 for (last0 = op0; last0->dw_loc_next != NULL; last0 = last0->dw_loc_next)
14878 ;
14879 for (last1 = op1; last1->dw_loc_next != NULL; last1 = last1->dw_loc_next)
14880 ;
14881 if (CONST_INT_P (XEXP (rtl, 0)))
14882 op0 = int_loc_descriptor (INTVAL (XEXP (rtl, 0)) & mask);
14883 /* deref_size zero extends, so no need to mask it again. */
14884 else if (last0->dw_loc_opc != DW_OP_deref_size
14885 || last0->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14886 {
14887 add_loc_descr (&op0, int_loc_descriptor (mask));
14888 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14889 }
14890 if (CONST_INT_P (XEXP (rtl, 1)))
14891 op1 = int_loc_descriptor (INTVAL (XEXP (rtl, 1)) & mask);
14892 /* deref_size zero extends, so no need to mask it again. */
14893 else if (last1->dw_loc_opc != DW_OP_deref_size
14894 || last1->dw_loc_oprnd1.v.val_int > GET_MODE_SIZE (op_mode))
14895 {
14896 add_loc_descr (&op1, int_loc_descriptor (mask));
14897 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14898 }
14899 }
14900 else if (GET_MODE_SIZE (op_mode) == DWARF2_ADDR_SIZE)
14901 {
14902 HOST_WIDE_INT bias = 1;
14903 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14904 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14905 if (CONST_INT_P (XEXP (rtl, 1)))
14906 op1 = int_loc_descriptor ((unsigned HOST_WIDE_INT) bias
14907 + INTVAL (XEXP (rtl, 1)));
14908 else
14909 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst,
14910 bias, 0));
14911 }
14912 return compare_loc_descriptor (op, op0, op1);
14913 }
14914
14915 /* Return location descriptor for {U,S}{MIN,MAX}. */
14916
14917 static dw_loc_descr_ref
14918 minmax_loc_descriptor (rtx rtl, machine_mode mode,
14919 machine_mode mem_mode)
14920 {
14921 enum dwarf_location_atom op;
14922 dw_loc_descr_ref op0, op1, ret;
14923 dw_loc_descr_ref bra_node, drop_node;
14924
14925 scalar_int_mode int_mode;
14926 if (dwarf_strict
14927 && dwarf_version < 5
14928 && (!is_a <scalar_int_mode> (mode, &int_mode)
14929 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE))
14930 return NULL;
14931
14932 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
14933 VAR_INIT_STATUS_INITIALIZED);
14934 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
14935 VAR_INIT_STATUS_INITIALIZED);
14936
14937 if (op0 == NULL || op1 == NULL)
14938 return NULL;
14939
14940 add_loc_descr (&op0, new_loc_descr (DW_OP_dup, 0, 0));
14941 add_loc_descr (&op1, new_loc_descr (DW_OP_swap, 0, 0));
14942 add_loc_descr (&op1, new_loc_descr (DW_OP_over, 0, 0));
14943 if (GET_CODE (rtl) == UMIN || GET_CODE (rtl) == UMAX)
14944 {
14945 /* Checked by the caller. */
14946 int_mode = as_a <scalar_int_mode> (mode);
14947 if (GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14948 {
14949 HOST_WIDE_INT mask = GET_MODE_MASK (int_mode);
14950 add_loc_descr (&op0, int_loc_descriptor (mask));
14951 add_loc_descr (&op0, new_loc_descr (DW_OP_and, 0, 0));
14952 add_loc_descr (&op1, int_loc_descriptor (mask));
14953 add_loc_descr (&op1, new_loc_descr (DW_OP_and, 0, 0));
14954 }
14955 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
14956 {
14957 HOST_WIDE_INT bias = 1;
14958 bias <<= (DWARF2_ADDR_SIZE * BITS_PER_UNIT - 1);
14959 add_loc_descr (&op0, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14960 add_loc_descr (&op1, new_loc_descr (DW_OP_plus_uconst, bias, 0));
14961 }
14962 }
14963 else if (is_a <scalar_int_mode> (mode, &int_mode)
14964 && GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE)
14965 {
14966 int shift = (DWARF2_ADDR_SIZE - GET_MODE_SIZE (int_mode)) * BITS_PER_UNIT;
14967 add_loc_descr (&op0, int_loc_descriptor (shift));
14968 add_loc_descr (&op0, new_loc_descr (DW_OP_shl, 0, 0));
14969 add_loc_descr (&op1, int_loc_descriptor (shift));
14970 add_loc_descr (&op1, new_loc_descr (DW_OP_shl, 0, 0));
14971 }
14972 else if (is_a <scalar_int_mode> (mode, &int_mode)
14973 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
14974 {
14975 dw_die_ref type_die = base_type_for_mode (int_mode, 0);
14976 dw_loc_descr_ref cvt;
14977 if (type_die == NULL)
14978 return NULL;
14979 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14980 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14981 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14982 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14983 add_loc_descr (&op0, cvt);
14984 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
14985 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
14986 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
14987 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
14988 add_loc_descr (&op1, cvt);
14989 }
14990
14991 if (GET_CODE (rtl) == SMIN || GET_CODE (rtl) == UMIN)
14992 op = DW_OP_lt;
14993 else
14994 op = DW_OP_gt;
14995 ret = op0;
14996 add_loc_descr (&ret, op1);
14997 add_loc_descr (&ret, new_loc_descr (op, 0, 0));
14998 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
14999 add_loc_descr (&ret, bra_node);
15000 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15001 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
15002 add_loc_descr (&ret, drop_node);
15003 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
15004 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
15005 if ((GET_CODE (rtl) == SMIN || GET_CODE (rtl) == SMAX)
15006 && is_a <scalar_int_mode> (mode, &int_mode)
15007 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15008 ret = convert_descriptor_to_mode (int_mode, ret);
15009 return ret;
15010 }
15011
15012 /* Helper function for mem_loc_descriptor. Perform OP binary op,
15013 but after converting arguments to type_die, afterwards
15014 convert back to unsigned. */
15015
15016 static dw_loc_descr_ref
15017 typed_binop (enum dwarf_location_atom op, rtx rtl, dw_die_ref type_die,
15018 scalar_int_mode mode, machine_mode mem_mode)
15019 {
15020 dw_loc_descr_ref cvt, op0, op1;
15021
15022 if (type_die == NULL)
15023 return NULL;
15024 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15025 VAR_INIT_STATUS_INITIALIZED);
15026 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15027 VAR_INIT_STATUS_INITIALIZED);
15028 if (op0 == NULL || op1 == NULL)
15029 return NULL;
15030 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15031 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15032 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15033 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15034 add_loc_descr (&op0, cvt);
15035 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15036 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15037 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15038 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15039 add_loc_descr (&op1, cvt);
15040 add_loc_descr (&op0, op1);
15041 add_loc_descr (&op0, new_loc_descr (op, 0, 0));
15042 return convert_descriptor_to_mode (mode, op0);
15043 }
15044
15045 /* CLZ (where constV is CLZ_DEFINED_VALUE_AT_ZERO computed value,
15046 const0 is DW_OP_lit0 or corresponding typed constant,
15047 const1 is DW_OP_lit1 or corresponding typed constant
15048 and constMSB is constant with just the MSB bit set
15049 for the mode):
15050 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
15051 L1: const0 DW_OP_swap
15052 L2: DW_OP_dup constMSB DW_OP_and DW_OP_bra <L3> const1 DW_OP_shl
15053 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
15054 L3: DW_OP_drop
15055 L4: DW_OP_nop
15056
15057 CTZ is similar:
15058 DW_OP_dup DW_OP_bra <L1> DW_OP_drop constV DW_OP_skip <L4>
15059 L1: const0 DW_OP_swap
15060 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
15061 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
15062 L3: DW_OP_drop
15063 L4: DW_OP_nop
15064
15065 FFS is similar:
15066 DW_OP_dup DW_OP_bra <L1> DW_OP_drop const0 DW_OP_skip <L4>
15067 L1: const1 DW_OP_swap
15068 L2: DW_OP_dup const1 DW_OP_and DW_OP_bra <L3> const1 DW_OP_shr
15069 DW_OP_swap DW_OP_plus_uconst <1> DW_OP_swap DW_OP_skip <L2>
15070 L3: DW_OP_drop
15071 L4: DW_OP_nop */
15072
15073 static dw_loc_descr_ref
15074 clz_loc_descriptor (rtx rtl, scalar_int_mode mode,
15075 machine_mode mem_mode)
15076 {
15077 dw_loc_descr_ref op0, ret, tmp;
15078 HOST_WIDE_INT valv;
15079 dw_loc_descr_ref l1jump, l1label;
15080 dw_loc_descr_ref l2jump, l2label;
15081 dw_loc_descr_ref l3jump, l3label;
15082 dw_loc_descr_ref l4jump, l4label;
15083 rtx msb;
15084
15085 if (GET_MODE (XEXP (rtl, 0)) != mode)
15086 return NULL;
15087
15088 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15089 VAR_INIT_STATUS_INITIALIZED);
15090 if (op0 == NULL)
15091 return NULL;
15092 ret = op0;
15093 if (GET_CODE (rtl) == CLZ)
15094 {
15095 if (!CLZ_DEFINED_VALUE_AT_ZERO (mode, valv))
15096 valv = GET_MODE_BITSIZE (mode);
15097 }
15098 else if (GET_CODE (rtl) == FFS)
15099 valv = 0;
15100 else if (!CTZ_DEFINED_VALUE_AT_ZERO (mode, valv))
15101 valv = GET_MODE_BITSIZE (mode);
15102 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15103 l1jump = new_loc_descr (DW_OP_bra, 0, 0);
15104 add_loc_descr (&ret, l1jump);
15105 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
15106 tmp = mem_loc_descriptor (GEN_INT (valv), mode, mem_mode,
15107 VAR_INIT_STATUS_INITIALIZED);
15108 if (tmp == NULL)
15109 return NULL;
15110 add_loc_descr (&ret, tmp);
15111 l4jump = new_loc_descr (DW_OP_skip, 0, 0);
15112 add_loc_descr (&ret, l4jump);
15113 l1label = mem_loc_descriptor (GET_CODE (rtl) == FFS
15114 ? const1_rtx : const0_rtx,
15115 mode, mem_mode,
15116 VAR_INIT_STATUS_INITIALIZED);
15117 if (l1label == NULL)
15118 return NULL;
15119 add_loc_descr (&ret, l1label);
15120 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15121 l2label = new_loc_descr (DW_OP_dup, 0, 0);
15122 add_loc_descr (&ret, l2label);
15123 if (GET_CODE (rtl) != CLZ)
15124 msb = const1_rtx;
15125 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
15126 msb = GEN_INT (HOST_WIDE_INT_1U
15127 << (GET_MODE_BITSIZE (mode) - 1));
15128 else
15129 msb = immed_wide_int_const
15130 (wi::set_bit_in_zero (GET_MODE_PRECISION (mode) - 1,
15131 GET_MODE_PRECISION (mode)), mode);
15132 if (GET_CODE (msb) == CONST_INT && INTVAL (msb) < 0)
15133 tmp = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15134 ? DW_OP_const4u : HOST_BITS_PER_WIDE_INT == 64
15135 ? DW_OP_const8u : DW_OP_constu, INTVAL (msb), 0);
15136 else
15137 tmp = mem_loc_descriptor (msb, mode, mem_mode,
15138 VAR_INIT_STATUS_INITIALIZED);
15139 if (tmp == NULL)
15140 return NULL;
15141 add_loc_descr (&ret, tmp);
15142 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15143 l3jump = new_loc_descr (DW_OP_bra, 0, 0);
15144 add_loc_descr (&ret, l3jump);
15145 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15146 VAR_INIT_STATUS_INITIALIZED);
15147 if (tmp == NULL)
15148 return NULL;
15149 add_loc_descr (&ret, tmp);
15150 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == CLZ
15151 ? DW_OP_shl : DW_OP_shr, 0, 0));
15152 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15153 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst, 1, 0));
15154 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15155 l2jump = new_loc_descr (DW_OP_skip, 0, 0);
15156 add_loc_descr (&ret, l2jump);
15157 l3label = new_loc_descr (DW_OP_drop, 0, 0);
15158 add_loc_descr (&ret, l3label);
15159 l4label = new_loc_descr (DW_OP_nop, 0, 0);
15160 add_loc_descr (&ret, l4label);
15161 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15162 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15163 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15164 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15165 l3jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15166 l3jump->dw_loc_oprnd1.v.val_loc = l3label;
15167 l4jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15168 l4jump->dw_loc_oprnd1.v.val_loc = l4label;
15169 return ret;
15170 }
15171
15172 /* POPCOUNT (const0 is DW_OP_lit0 or corresponding typed constant,
15173 const1 is DW_OP_lit1 or corresponding typed constant):
15174 const0 DW_OP_swap
15175 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15176 DW_OP_plus DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15177 L2: DW_OP_drop
15178
15179 PARITY is similar:
15180 L1: DW_OP_dup DW_OP_bra <L2> DW_OP_dup DW_OP_rot const1 DW_OP_and
15181 DW_OP_xor DW_OP_swap const1 DW_OP_shr DW_OP_skip <L1>
15182 L2: DW_OP_drop */
15183
15184 static dw_loc_descr_ref
15185 popcount_loc_descriptor (rtx rtl, scalar_int_mode mode,
15186 machine_mode mem_mode)
15187 {
15188 dw_loc_descr_ref op0, ret, tmp;
15189 dw_loc_descr_ref l1jump, l1label;
15190 dw_loc_descr_ref l2jump, l2label;
15191
15192 if (GET_MODE (XEXP (rtl, 0)) != mode)
15193 return NULL;
15194
15195 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15196 VAR_INIT_STATUS_INITIALIZED);
15197 if (op0 == NULL)
15198 return NULL;
15199 ret = op0;
15200 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15201 VAR_INIT_STATUS_INITIALIZED);
15202 if (tmp == NULL)
15203 return NULL;
15204 add_loc_descr (&ret, tmp);
15205 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15206 l1label = new_loc_descr (DW_OP_dup, 0, 0);
15207 add_loc_descr (&ret, l1label);
15208 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15209 add_loc_descr (&ret, l2jump);
15210 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15211 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15212 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15213 VAR_INIT_STATUS_INITIALIZED);
15214 if (tmp == NULL)
15215 return NULL;
15216 add_loc_descr (&ret, tmp);
15217 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15218 add_loc_descr (&ret, new_loc_descr (GET_CODE (rtl) == POPCOUNT
15219 ? DW_OP_plus : DW_OP_xor, 0, 0));
15220 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15221 tmp = mem_loc_descriptor (const1_rtx, mode, mem_mode,
15222 VAR_INIT_STATUS_INITIALIZED);
15223 add_loc_descr (&ret, tmp);
15224 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15225 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15226 add_loc_descr (&ret, l1jump);
15227 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15228 add_loc_descr (&ret, l2label);
15229 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15230 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15231 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15232 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15233 return ret;
15234 }
15235
15236 /* BSWAP (constS is initial shift count, either 56 or 24):
15237 constS const0
15238 L1: DW_OP_pick <2> constS DW_OP_pick <3> DW_OP_minus DW_OP_shr
15239 const255 DW_OP_and DW_OP_pick <2> DW_OP_shl DW_OP_or
15240 DW_OP_swap DW_OP_dup const0 DW_OP_eq DW_OP_bra <L2> const8
15241 DW_OP_minus DW_OP_swap DW_OP_skip <L1>
15242 L2: DW_OP_drop DW_OP_swap DW_OP_drop */
15243
15244 static dw_loc_descr_ref
15245 bswap_loc_descriptor (rtx rtl, scalar_int_mode mode,
15246 machine_mode mem_mode)
15247 {
15248 dw_loc_descr_ref op0, ret, tmp;
15249 dw_loc_descr_ref l1jump, l1label;
15250 dw_loc_descr_ref l2jump, l2label;
15251
15252 if (BITS_PER_UNIT != 8
15253 || (GET_MODE_BITSIZE (mode) != 32
15254 && GET_MODE_BITSIZE (mode) != 64))
15255 return NULL;
15256
15257 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15258 VAR_INIT_STATUS_INITIALIZED);
15259 if (op0 == NULL)
15260 return NULL;
15261
15262 ret = op0;
15263 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15264 mode, mem_mode,
15265 VAR_INIT_STATUS_INITIALIZED);
15266 if (tmp == NULL)
15267 return NULL;
15268 add_loc_descr (&ret, tmp);
15269 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15270 VAR_INIT_STATUS_INITIALIZED);
15271 if (tmp == NULL)
15272 return NULL;
15273 add_loc_descr (&ret, tmp);
15274 l1label = new_loc_descr (DW_OP_pick, 2, 0);
15275 add_loc_descr (&ret, l1label);
15276 tmp = mem_loc_descriptor (GEN_INT (GET_MODE_BITSIZE (mode) - 8),
15277 mode, mem_mode,
15278 VAR_INIT_STATUS_INITIALIZED);
15279 add_loc_descr (&ret, tmp);
15280 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 3, 0));
15281 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15282 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15283 tmp = mem_loc_descriptor (GEN_INT (255), mode, mem_mode,
15284 VAR_INIT_STATUS_INITIALIZED);
15285 if (tmp == NULL)
15286 return NULL;
15287 add_loc_descr (&ret, tmp);
15288 add_loc_descr (&ret, new_loc_descr (DW_OP_and, 0, 0));
15289 add_loc_descr (&ret, new_loc_descr (DW_OP_pick, 2, 0));
15290 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15291 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15292 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15293 add_loc_descr (&ret, new_loc_descr (DW_OP_dup, 0, 0));
15294 tmp = mem_loc_descriptor (const0_rtx, mode, mem_mode,
15295 VAR_INIT_STATUS_INITIALIZED);
15296 add_loc_descr (&ret, tmp);
15297 add_loc_descr (&ret, new_loc_descr (DW_OP_eq, 0, 0));
15298 l2jump = new_loc_descr (DW_OP_bra, 0, 0);
15299 add_loc_descr (&ret, l2jump);
15300 tmp = mem_loc_descriptor (GEN_INT (8), mode, mem_mode,
15301 VAR_INIT_STATUS_INITIALIZED);
15302 add_loc_descr (&ret, tmp);
15303 add_loc_descr (&ret, new_loc_descr (DW_OP_minus, 0, 0));
15304 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15305 l1jump = new_loc_descr (DW_OP_skip, 0, 0);
15306 add_loc_descr (&ret, l1jump);
15307 l2label = new_loc_descr (DW_OP_drop, 0, 0);
15308 add_loc_descr (&ret, l2label);
15309 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15310 add_loc_descr (&ret, new_loc_descr (DW_OP_drop, 0, 0));
15311 l1jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15312 l1jump->dw_loc_oprnd1.v.val_loc = l1label;
15313 l2jump->dw_loc_oprnd1.val_class = dw_val_class_loc;
15314 l2jump->dw_loc_oprnd1.v.val_loc = l2label;
15315 return ret;
15316 }
15317
15318 /* ROTATE (constMASK is mode mask, BITSIZE is bitsize of mode):
15319 DW_OP_over DW_OP_over DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15320 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_neg
15321 DW_OP_plus_uconst <BITSIZE> DW_OP_shr DW_OP_or
15322
15323 ROTATERT is similar:
15324 DW_OP_over DW_OP_over DW_OP_neg DW_OP_plus_uconst <BITSIZE>
15325 DW_OP_shl [ constMASK DW_OP_and ] DW_OP_rot
15326 [ DW_OP_swap constMASK DW_OP_and DW_OP_swap ] DW_OP_shr DW_OP_or */
15327
15328 static dw_loc_descr_ref
15329 rotate_loc_descriptor (rtx rtl, scalar_int_mode mode,
15330 machine_mode mem_mode)
15331 {
15332 rtx rtlop1 = XEXP (rtl, 1);
15333 dw_loc_descr_ref op0, op1, ret, mask[2] = { NULL, NULL };
15334 int i;
15335
15336 if (is_narrower_int_mode (GET_MODE (rtlop1), mode))
15337 rtlop1 = gen_rtx_ZERO_EXTEND (mode, rtlop1);
15338 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15339 VAR_INIT_STATUS_INITIALIZED);
15340 op1 = mem_loc_descriptor (rtlop1, mode, mem_mode,
15341 VAR_INIT_STATUS_INITIALIZED);
15342 if (op0 == NULL || op1 == NULL)
15343 return NULL;
15344 if (GET_MODE_SIZE (mode) < DWARF2_ADDR_SIZE)
15345 for (i = 0; i < 2; i++)
15346 {
15347 if (GET_MODE_BITSIZE (mode) < HOST_BITS_PER_WIDE_INT)
15348 mask[i] = mem_loc_descriptor (GEN_INT (GET_MODE_MASK (mode)),
15349 mode, mem_mode,
15350 VAR_INIT_STATUS_INITIALIZED);
15351 else if (GET_MODE_BITSIZE (mode) == HOST_BITS_PER_WIDE_INT)
15352 mask[i] = new_loc_descr (HOST_BITS_PER_WIDE_INT == 32
15353 ? DW_OP_const4u
15354 : HOST_BITS_PER_WIDE_INT == 64
15355 ? DW_OP_const8u : DW_OP_constu,
15356 GET_MODE_MASK (mode), 0);
15357 else
15358 mask[i] = NULL;
15359 if (mask[i] == NULL)
15360 return NULL;
15361 add_loc_descr (&mask[i], new_loc_descr (DW_OP_and, 0, 0));
15362 }
15363 ret = op0;
15364 add_loc_descr (&ret, op1);
15365 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15366 add_loc_descr (&ret, new_loc_descr (DW_OP_over, 0, 0));
15367 if (GET_CODE (rtl) == ROTATERT)
15368 {
15369 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15370 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15371 GET_MODE_BITSIZE (mode), 0));
15372 }
15373 add_loc_descr (&ret, new_loc_descr (DW_OP_shl, 0, 0));
15374 if (mask[0] != NULL)
15375 add_loc_descr (&ret, mask[0]);
15376 add_loc_descr (&ret, new_loc_descr (DW_OP_rot, 0, 0));
15377 if (mask[1] != NULL)
15378 {
15379 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15380 add_loc_descr (&ret, mask[1]);
15381 add_loc_descr (&ret, new_loc_descr (DW_OP_swap, 0, 0));
15382 }
15383 if (GET_CODE (rtl) == ROTATE)
15384 {
15385 add_loc_descr (&ret, new_loc_descr (DW_OP_neg, 0, 0));
15386 add_loc_descr (&ret, new_loc_descr (DW_OP_plus_uconst,
15387 GET_MODE_BITSIZE (mode), 0));
15388 }
15389 add_loc_descr (&ret, new_loc_descr (DW_OP_shr, 0, 0));
15390 add_loc_descr (&ret, new_loc_descr (DW_OP_or, 0, 0));
15391 return ret;
15392 }
15393
15394 /* Helper function for mem_loc_descriptor. Return DW_OP_GNU_parameter_ref
15395 for DEBUG_PARAMETER_REF RTL. */
15396
15397 static dw_loc_descr_ref
15398 parameter_ref_descriptor (rtx rtl)
15399 {
15400 dw_loc_descr_ref ret;
15401 dw_die_ref ref;
15402
15403 if (dwarf_strict)
15404 return NULL;
15405 gcc_assert (TREE_CODE (DEBUG_PARAMETER_REF_DECL (rtl)) == PARM_DECL);
15406 /* With LTO during LTRANS we get the late DIE that refers to the early
15407 DIE, thus we add another indirection here. This seems to confuse
15408 gdb enough to make gcc.dg/guality/pr68860-1.c FAIL with LTO. */
15409 ref = lookup_decl_die (DEBUG_PARAMETER_REF_DECL (rtl));
15410 ret = new_loc_descr (DW_OP_GNU_parameter_ref, 0, 0);
15411 if (ref)
15412 {
15413 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15414 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
15415 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
15416 }
15417 else
15418 {
15419 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
15420 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_PARAMETER_REF_DECL (rtl);
15421 }
15422 return ret;
15423 }
15424
15425 /* The following routine converts the RTL for a variable or parameter
15426 (resident in memory) into an equivalent Dwarf representation of a
15427 mechanism for getting the address of that same variable onto the top of a
15428 hypothetical "address evaluation" stack.
15429
15430 When creating memory location descriptors, we are effectively transforming
15431 the RTL for a memory-resident object into its Dwarf postfix expression
15432 equivalent. This routine recursively descends an RTL tree, turning
15433 it into Dwarf postfix code as it goes.
15434
15435 MODE is the mode that should be assumed for the rtl if it is VOIDmode.
15436
15437 MEM_MODE is the mode of the memory reference, needed to handle some
15438 autoincrement addressing modes.
15439
15440 Return 0 if we can't represent the location. */
15441
15442 dw_loc_descr_ref
15443 mem_loc_descriptor (rtx rtl, machine_mode mode,
15444 machine_mode mem_mode,
15445 enum var_init_status initialized)
15446 {
15447 dw_loc_descr_ref mem_loc_result = NULL;
15448 enum dwarf_location_atom op;
15449 dw_loc_descr_ref op0, op1;
15450 rtx inner = NULL_RTX;
15451 poly_int64 offset;
15452
15453 if (mode == VOIDmode)
15454 mode = GET_MODE (rtl);
15455
15456 /* Note that for a dynamically sized array, the location we will generate a
15457 description of here will be the lowest numbered location which is
15458 actually within the array. That's *not* necessarily the same as the
15459 zeroth element of the array. */
15460
15461 rtl = targetm.delegitimize_address (rtl);
15462
15463 if (mode != GET_MODE (rtl) && GET_MODE (rtl) != VOIDmode)
15464 return NULL;
15465
15466 scalar_int_mode int_mode = BImode, inner_mode, op1_mode;
15467 switch (GET_CODE (rtl))
15468 {
15469 case POST_INC:
15470 case POST_DEC:
15471 case POST_MODIFY:
15472 return mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode, initialized);
15473
15474 case SUBREG:
15475 /* The case of a subreg may arise when we have a local (register)
15476 variable or a formal (register) parameter which doesn't quite fill
15477 up an entire register. For now, just assume that it is
15478 legitimate to make the Dwarf info refer to the whole register which
15479 contains the given subreg. */
15480 if (!subreg_lowpart_p (rtl))
15481 break;
15482 inner = SUBREG_REG (rtl);
15483 /* FALLTHRU */
15484 case TRUNCATE:
15485 if (inner == NULL_RTX)
15486 inner = XEXP (rtl, 0);
15487 if (is_a <scalar_int_mode> (mode, &int_mode)
15488 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15489 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15490 #ifdef POINTERS_EXTEND_UNSIGNED
15491 || (int_mode == Pmode && mem_mode != VOIDmode)
15492 #endif
15493 )
15494 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE)
15495 {
15496 mem_loc_result = mem_loc_descriptor (inner,
15497 inner_mode,
15498 mem_mode, initialized);
15499 break;
15500 }
15501 if (dwarf_strict && dwarf_version < 5)
15502 break;
15503 if (is_a <scalar_int_mode> (mode, &int_mode)
15504 && is_a <scalar_int_mode> (GET_MODE (inner), &inner_mode)
15505 ? GET_MODE_SIZE (int_mode) <= GET_MODE_SIZE (inner_mode)
15506 : known_eq (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15507 {
15508 dw_die_ref type_die;
15509 dw_loc_descr_ref cvt;
15510
15511 mem_loc_result = mem_loc_descriptor (inner,
15512 GET_MODE (inner),
15513 mem_mode, initialized);
15514 if (mem_loc_result == NULL)
15515 break;
15516 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15517 if (type_die == NULL)
15518 {
15519 mem_loc_result = NULL;
15520 break;
15521 }
15522 if (maybe_ne (GET_MODE_SIZE (mode), GET_MODE_SIZE (GET_MODE (inner))))
15523 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15524 else
15525 cvt = new_loc_descr (dwarf_OP (DW_OP_reinterpret), 0, 0);
15526 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15527 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
15528 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15529 add_loc_descr (&mem_loc_result, cvt);
15530 if (is_a <scalar_int_mode> (mode, &int_mode)
15531 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15532 {
15533 /* Convert it to untyped afterwards. */
15534 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15535 add_loc_descr (&mem_loc_result, cvt);
15536 }
15537 }
15538 break;
15539
15540 case REG:
15541 if (!is_a <scalar_int_mode> (mode, &int_mode)
15542 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15543 && rtl != arg_pointer_rtx
15544 && rtl != frame_pointer_rtx
15545 #ifdef POINTERS_EXTEND_UNSIGNED
15546 && (int_mode != Pmode || mem_mode == VOIDmode)
15547 #endif
15548 ))
15549 {
15550 dw_die_ref type_die;
15551 unsigned int dbx_regnum;
15552
15553 if (dwarf_strict && dwarf_version < 5)
15554 break;
15555 if (REGNO (rtl) >= FIRST_PSEUDO_REGISTER)
15556 break;
15557 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15558 if (type_die == NULL)
15559 break;
15560
15561 dbx_regnum = dbx_reg_number (rtl);
15562 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15563 break;
15564 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_regval_type),
15565 dbx_regnum, 0);
15566 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15567 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15568 mem_loc_result->dw_loc_oprnd2.v.val_die_ref.external = 0;
15569 break;
15570 }
15571 /* Whenever a register number forms a part of the description of the
15572 method for calculating the (dynamic) address of a memory resident
15573 object, DWARF rules require the register number be referred to as
15574 a "base register". This distinction is not based in any way upon
15575 what category of register the hardware believes the given register
15576 belongs to. This is strictly DWARF terminology we're dealing with
15577 here. Note that in cases where the location of a memory-resident
15578 data object could be expressed as: OP_ADD (OP_BASEREG (basereg),
15579 OP_CONST (0)) the actual DWARF location descriptor that we generate
15580 may just be OP_BASEREG (basereg). This may look deceptively like
15581 the object in question was allocated to a register (rather than in
15582 memory) so DWARF consumers need to be aware of the subtle
15583 distinction between OP_REG and OP_BASEREG. */
15584 if (REGNO (rtl) < FIRST_PSEUDO_REGISTER)
15585 mem_loc_result = based_loc_descr (rtl, 0, VAR_INIT_STATUS_INITIALIZED);
15586 else if (stack_realign_drap
15587 && crtl->drap_reg
15588 && crtl->args.internal_arg_pointer == rtl
15589 && REGNO (crtl->drap_reg) < FIRST_PSEUDO_REGISTER)
15590 {
15591 /* If RTL is internal_arg_pointer, which has been optimized
15592 out, use DRAP instead. */
15593 mem_loc_result = based_loc_descr (crtl->drap_reg, 0,
15594 VAR_INIT_STATUS_INITIALIZED);
15595 }
15596 break;
15597
15598 case SIGN_EXTEND:
15599 case ZERO_EXTEND:
15600 if (!is_a <scalar_int_mode> (mode, &int_mode)
15601 || !is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode))
15602 break;
15603 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
15604 mem_mode, VAR_INIT_STATUS_INITIALIZED);
15605 if (op0 == 0)
15606 break;
15607 else if (GET_CODE (rtl) == ZERO_EXTEND
15608 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15609 && GET_MODE_BITSIZE (inner_mode) < HOST_BITS_PER_WIDE_INT
15610 /* If DW_OP_const{1,2,4}u won't be used, it is shorter
15611 to expand zero extend as two shifts instead of
15612 masking. */
15613 && GET_MODE_SIZE (inner_mode) <= 4)
15614 {
15615 mem_loc_result = op0;
15616 add_loc_descr (&mem_loc_result,
15617 int_loc_descriptor (GET_MODE_MASK (inner_mode)));
15618 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_and, 0, 0));
15619 }
15620 else if (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE)
15621 {
15622 int shift = DWARF2_ADDR_SIZE - GET_MODE_SIZE (inner_mode);
15623 shift *= BITS_PER_UNIT;
15624 if (GET_CODE (rtl) == SIGN_EXTEND)
15625 op = DW_OP_shra;
15626 else
15627 op = DW_OP_shr;
15628 mem_loc_result = op0;
15629 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15630 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
15631 add_loc_descr (&mem_loc_result, int_loc_descriptor (shift));
15632 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15633 }
15634 else if (!dwarf_strict || dwarf_version >= 5)
15635 {
15636 dw_die_ref type_die1, type_die2;
15637 dw_loc_descr_ref cvt;
15638
15639 type_die1 = base_type_for_mode (inner_mode,
15640 GET_CODE (rtl) == ZERO_EXTEND);
15641 if (type_die1 == NULL)
15642 break;
15643 type_die2 = base_type_for_mode (int_mode, 1);
15644 if (type_die2 == NULL)
15645 break;
15646 mem_loc_result = op0;
15647 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15648 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15649 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die1;
15650 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15651 add_loc_descr (&mem_loc_result, cvt);
15652 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
15653 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
15654 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die2;
15655 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
15656 add_loc_descr (&mem_loc_result, cvt);
15657 }
15658 break;
15659
15660 case MEM:
15661 {
15662 rtx new_rtl = avoid_constant_pool_reference (rtl);
15663 if (new_rtl != rtl)
15664 {
15665 mem_loc_result = mem_loc_descriptor (new_rtl, mode, mem_mode,
15666 initialized);
15667 if (mem_loc_result != NULL)
15668 return mem_loc_result;
15669 }
15670 }
15671 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0),
15672 get_address_mode (rtl), mode,
15673 VAR_INIT_STATUS_INITIALIZED);
15674 if (mem_loc_result == NULL)
15675 mem_loc_result = tls_mem_loc_descriptor (rtl);
15676 if (mem_loc_result != NULL)
15677 {
15678 if (!is_a <scalar_int_mode> (mode, &int_mode)
15679 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15680 {
15681 dw_die_ref type_die;
15682 dw_loc_descr_ref deref;
15683 HOST_WIDE_INT size;
15684
15685 if (dwarf_strict && dwarf_version < 5)
15686 return NULL;
15687 if (!GET_MODE_SIZE (mode).is_constant (&size))
15688 return NULL;
15689 type_die
15690 = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
15691 if (type_die == NULL)
15692 return NULL;
15693 deref = new_loc_descr (dwarf_OP (DW_OP_deref_type), size, 0);
15694 deref->dw_loc_oprnd2.val_class = dw_val_class_die_ref;
15695 deref->dw_loc_oprnd2.v.val_die_ref.die = type_die;
15696 deref->dw_loc_oprnd2.v.val_die_ref.external = 0;
15697 add_loc_descr (&mem_loc_result, deref);
15698 }
15699 else if (GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE)
15700 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_deref, 0, 0));
15701 else
15702 add_loc_descr (&mem_loc_result,
15703 new_loc_descr (DW_OP_deref_size,
15704 GET_MODE_SIZE (int_mode), 0));
15705 }
15706 break;
15707
15708 case LO_SUM:
15709 return mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode, initialized);
15710
15711 case LABEL_REF:
15712 /* Some ports can transform a symbol ref into a label ref, because
15713 the symbol ref is too far away and has to be dumped into a constant
15714 pool. */
15715 case CONST:
15716 case SYMBOL_REF:
15717 case UNSPEC:
15718 if (!is_a <scalar_int_mode> (mode, &int_mode)
15719 || (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE
15720 #ifdef POINTERS_EXTEND_UNSIGNED
15721 && (int_mode != Pmode || mem_mode == VOIDmode)
15722 #endif
15723 ))
15724 break;
15725
15726 if (GET_CODE (rtl) == UNSPEC)
15727 {
15728 /* If delegitimize_address couldn't do anything with the UNSPEC, we
15729 can't express it in the debug info. This can happen e.g. with some
15730 TLS UNSPECs. Allow UNSPECs formerly from CONST that the backend
15731 approves. */
15732 bool not_ok = false;
15733 subrtx_var_iterator::array_type array;
15734 FOR_EACH_SUBRTX_VAR (iter, array, rtl, ALL)
15735 if (*iter != rtl && !CONSTANT_P (*iter))
15736 {
15737 not_ok = true;
15738 break;
15739 }
15740
15741 if (not_ok)
15742 break;
15743
15744 FOR_EACH_SUBRTX_VAR (iter, array, rtl, ALL)
15745 if (!const_ok_for_output_1 (*iter))
15746 {
15747 not_ok = true;
15748 break;
15749 }
15750
15751 if (not_ok)
15752 break;
15753
15754 rtl = gen_rtx_CONST (GET_MODE (rtl), rtl);
15755 goto symref;
15756 }
15757
15758 if (GET_CODE (rtl) == SYMBOL_REF
15759 && SYMBOL_REF_TLS_MODEL (rtl) != TLS_MODEL_NONE)
15760 {
15761 dw_loc_descr_ref temp;
15762
15763 /* If this is not defined, we have no way to emit the data. */
15764 if (!targetm.have_tls || !targetm.asm_out.output_dwarf_dtprel)
15765 break;
15766
15767 temp = new_addr_loc_descr (rtl, dtprel_true);
15768
15769 /* We check for DWARF 5 here because gdb did not implement
15770 DW_OP_form_tls_address until after 7.12. */
15771 mem_loc_result = new_loc_descr ((dwarf_version >= 5
15772 ? DW_OP_form_tls_address
15773 : DW_OP_GNU_push_tls_address),
15774 0, 0);
15775 add_loc_descr (&mem_loc_result, temp);
15776
15777 break;
15778 }
15779
15780 if (!const_ok_for_output (rtl))
15781 {
15782 if (GET_CODE (rtl) == CONST)
15783 switch (GET_CODE (XEXP (rtl, 0)))
15784 {
15785 case NOT:
15786 op = DW_OP_not;
15787 goto try_const_unop;
15788 case NEG:
15789 op = DW_OP_neg;
15790 goto try_const_unop;
15791 try_const_unop:
15792 rtx arg;
15793 arg = XEXP (XEXP (rtl, 0), 0);
15794 if (!CONSTANT_P (arg))
15795 arg = gen_rtx_CONST (int_mode, arg);
15796 op0 = mem_loc_descriptor (arg, int_mode, mem_mode,
15797 initialized);
15798 if (op0)
15799 {
15800 mem_loc_result = op0;
15801 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15802 }
15803 break;
15804 default:
15805 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), int_mode,
15806 mem_mode, initialized);
15807 break;
15808 }
15809 break;
15810 }
15811
15812 symref:
15813 mem_loc_result = new_addr_loc_descr (rtl, dtprel_false);
15814 vec_safe_push (used_rtx_array, rtl);
15815 break;
15816
15817 case CONCAT:
15818 case CONCATN:
15819 case VAR_LOCATION:
15820 case DEBUG_IMPLICIT_PTR:
15821 expansion_failed (NULL_TREE, rtl,
15822 "CONCAT/CONCATN/VAR_LOCATION is handled only by loc_descriptor");
15823 return 0;
15824
15825 case ENTRY_VALUE:
15826 if (dwarf_strict && dwarf_version < 5)
15827 return NULL;
15828 if (REG_P (ENTRY_VALUE_EXP (rtl)))
15829 {
15830 if (!is_a <scalar_int_mode> (mode, &int_mode)
15831 || GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15832 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15833 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15834 else
15835 {
15836 unsigned int dbx_regnum = dbx_reg_number (ENTRY_VALUE_EXP (rtl));
15837 if (dbx_regnum == IGNORED_DWARF_REGNUM)
15838 return NULL;
15839 op0 = one_reg_loc_descriptor (dbx_regnum,
15840 VAR_INIT_STATUS_INITIALIZED);
15841 }
15842 }
15843 else if (MEM_P (ENTRY_VALUE_EXP (rtl))
15844 && REG_P (XEXP (ENTRY_VALUE_EXP (rtl), 0)))
15845 {
15846 op0 = mem_loc_descriptor (ENTRY_VALUE_EXP (rtl), mode,
15847 VOIDmode, VAR_INIT_STATUS_INITIALIZED);
15848 if (op0 && op0->dw_loc_opc == DW_OP_fbreg)
15849 return NULL;
15850 }
15851 else
15852 gcc_unreachable ();
15853 if (op0 == NULL)
15854 return NULL;
15855 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_entry_value), 0, 0);
15856 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_loc;
15857 mem_loc_result->dw_loc_oprnd1.v.val_loc = op0;
15858 break;
15859
15860 case DEBUG_PARAMETER_REF:
15861 mem_loc_result = parameter_ref_descriptor (rtl);
15862 break;
15863
15864 case PRE_MODIFY:
15865 /* Extract the PLUS expression nested inside and fall into
15866 PLUS code below. */
15867 rtl = XEXP (rtl, 1);
15868 goto plus;
15869
15870 case PRE_INC:
15871 case PRE_DEC:
15872 /* Turn these into a PLUS expression and fall into the PLUS code
15873 below. */
15874 rtl = gen_rtx_PLUS (mode, XEXP (rtl, 0),
15875 gen_int_mode (GET_CODE (rtl) == PRE_INC
15876 ? GET_MODE_UNIT_SIZE (mem_mode)
15877 : -GET_MODE_UNIT_SIZE (mem_mode),
15878 mode));
15879
15880 /* fall through */
15881
15882 case PLUS:
15883 plus:
15884 if (is_based_loc (rtl)
15885 && is_a <scalar_int_mode> (mode, &int_mode)
15886 && (GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
15887 || XEXP (rtl, 0) == arg_pointer_rtx
15888 || XEXP (rtl, 0) == frame_pointer_rtx))
15889 mem_loc_result = based_loc_descr (XEXP (rtl, 0),
15890 INTVAL (XEXP (rtl, 1)),
15891 VAR_INIT_STATUS_INITIALIZED);
15892 else
15893 {
15894 mem_loc_result = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15895 VAR_INIT_STATUS_INITIALIZED);
15896 if (mem_loc_result == 0)
15897 break;
15898
15899 if (CONST_INT_P (XEXP (rtl, 1))
15900 && (GET_MODE_SIZE (as_a <scalar_int_mode> (mode))
15901 <= DWARF2_ADDR_SIZE))
15902 loc_descr_plus_const (&mem_loc_result, INTVAL (XEXP (rtl, 1)));
15903 else
15904 {
15905 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15906 VAR_INIT_STATUS_INITIALIZED);
15907 if (op1 == 0)
15908 return NULL;
15909 add_loc_descr (&mem_loc_result, op1);
15910 add_loc_descr (&mem_loc_result,
15911 new_loc_descr (DW_OP_plus, 0, 0));
15912 }
15913 }
15914 break;
15915
15916 /* If a pseudo-reg is optimized away, it is possible for it to
15917 be replaced with a MEM containing a multiply or shift. */
15918 case MINUS:
15919 op = DW_OP_minus;
15920 goto do_binop;
15921
15922 case MULT:
15923 op = DW_OP_mul;
15924 goto do_binop;
15925
15926 case DIV:
15927 if ((!dwarf_strict || dwarf_version >= 5)
15928 && is_a <scalar_int_mode> (mode, &int_mode)
15929 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
15930 {
15931 mem_loc_result = typed_binop (DW_OP_div, rtl,
15932 base_type_for_mode (mode, 0),
15933 int_mode, mem_mode);
15934 break;
15935 }
15936 op = DW_OP_div;
15937 goto do_binop;
15938
15939 case UMOD:
15940 op = DW_OP_mod;
15941 goto do_binop;
15942
15943 case ASHIFT:
15944 op = DW_OP_shl;
15945 goto do_shift;
15946
15947 case ASHIFTRT:
15948 op = DW_OP_shra;
15949 goto do_shift;
15950
15951 case LSHIFTRT:
15952 op = DW_OP_shr;
15953 goto do_shift;
15954
15955 do_shift:
15956 if (!is_a <scalar_int_mode> (mode, &int_mode))
15957 break;
15958 op0 = mem_loc_descriptor (XEXP (rtl, 0), int_mode, mem_mode,
15959 VAR_INIT_STATUS_INITIALIZED);
15960 {
15961 rtx rtlop1 = XEXP (rtl, 1);
15962 if (is_a <scalar_int_mode> (GET_MODE (rtlop1), &op1_mode)
15963 && GET_MODE_BITSIZE (op1_mode) < GET_MODE_BITSIZE (int_mode))
15964 rtlop1 = gen_rtx_ZERO_EXTEND (int_mode, rtlop1);
15965 op1 = mem_loc_descriptor (rtlop1, int_mode, mem_mode,
15966 VAR_INIT_STATUS_INITIALIZED);
15967 }
15968
15969 if (op0 == 0 || op1 == 0)
15970 break;
15971
15972 mem_loc_result = op0;
15973 add_loc_descr (&mem_loc_result, op1);
15974 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
15975 break;
15976
15977 case AND:
15978 op = DW_OP_and;
15979 goto do_binop;
15980
15981 case IOR:
15982 op = DW_OP_or;
15983 goto do_binop;
15984
15985 case XOR:
15986 op = DW_OP_xor;
15987 goto do_binop;
15988
15989 do_binop:
15990 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
15991 VAR_INIT_STATUS_INITIALIZED);
15992 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
15993 VAR_INIT_STATUS_INITIALIZED);
15994
15995 if (op0 == 0 || op1 == 0)
15996 break;
15997
15998 mem_loc_result = op0;
15999 add_loc_descr (&mem_loc_result, op1);
16000 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
16001 break;
16002
16003 case MOD:
16004 if ((!dwarf_strict || dwarf_version >= 5)
16005 && is_a <scalar_int_mode> (mode, &int_mode)
16006 && GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
16007 {
16008 mem_loc_result = typed_binop (DW_OP_mod, rtl,
16009 base_type_for_mode (mode, 0),
16010 int_mode, mem_mode);
16011 break;
16012 }
16013
16014 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
16015 VAR_INIT_STATUS_INITIALIZED);
16016 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
16017 VAR_INIT_STATUS_INITIALIZED);
16018
16019 if (op0 == 0 || op1 == 0)
16020 break;
16021
16022 mem_loc_result = op0;
16023 add_loc_descr (&mem_loc_result, op1);
16024 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
16025 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_over, 0, 0));
16026 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_div, 0, 0));
16027 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_mul, 0, 0));
16028 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_minus, 0, 0));
16029 break;
16030
16031 case UDIV:
16032 if ((!dwarf_strict || dwarf_version >= 5)
16033 && is_a <scalar_int_mode> (mode, &int_mode))
16034 {
16035 if (GET_MODE_SIZE (int_mode) > DWARF2_ADDR_SIZE)
16036 {
16037 op = DW_OP_div;
16038 goto do_binop;
16039 }
16040 mem_loc_result = typed_binop (DW_OP_div, rtl,
16041 base_type_for_mode (int_mode, 1),
16042 int_mode, mem_mode);
16043 }
16044 break;
16045
16046 case NOT:
16047 op = DW_OP_not;
16048 goto do_unop;
16049
16050 case ABS:
16051 op = DW_OP_abs;
16052 goto do_unop;
16053
16054 case NEG:
16055 op = DW_OP_neg;
16056 goto do_unop;
16057
16058 do_unop:
16059 op0 = mem_loc_descriptor (XEXP (rtl, 0), mode, mem_mode,
16060 VAR_INIT_STATUS_INITIALIZED);
16061
16062 if (op0 == 0)
16063 break;
16064
16065 mem_loc_result = op0;
16066 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
16067 break;
16068
16069 case CONST_INT:
16070 if (!is_a <scalar_int_mode> (mode, &int_mode)
16071 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16072 #ifdef POINTERS_EXTEND_UNSIGNED
16073 || (int_mode == Pmode
16074 && mem_mode != VOIDmode
16075 && trunc_int_for_mode (INTVAL (rtl), ptr_mode) == INTVAL (rtl))
16076 #endif
16077 )
16078 {
16079 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
16080 break;
16081 }
16082 if ((!dwarf_strict || dwarf_version >= 5)
16083 && (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT
16084 || GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_DOUBLE_INT))
16085 {
16086 dw_die_ref type_die = base_type_for_mode (int_mode, 1);
16087 scalar_int_mode amode;
16088 if (type_die == NULL)
16089 return NULL;
16090 if (INTVAL (rtl) >= 0
16091 && (int_mode_for_size (DWARF2_ADDR_SIZE * BITS_PER_UNIT, 0)
16092 .exists (&amode))
16093 && trunc_int_for_mode (INTVAL (rtl), amode) == INTVAL (rtl)
16094 /* const DW_OP_convert <XXX> vs.
16095 DW_OP_const_type <XXX, 1, const>. */
16096 && size_of_int_loc_descriptor (INTVAL (rtl)) + 1 + 1
16097 < (unsigned long) 1 + 1 + 1 + GET_MODE_SIZE (int_mode))
16098 {
16099 mem_loc_result = int_loc_descriptor (INTVAL (rtl));
16100 op0 = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16101 op0->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16102 op0->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16103 op0->dw_loc_oprnd1.v.val_die_ref.external = 0;
16104 add_loc_descr (&mem_loc_result, op0);
16105 return mem_loc_result;
16106 }
16107 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0,
16108 INTVAL (rtl));
16109 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16110 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16111 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16112 if (GET_MODE_BITSIZE (int_mode) == HOST_BITS_PER_WIDE_INT)
16113 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_const;
16114 else
16115 {
16116 mem_loc_result->dw_loc_oprnd2.val_class
16117 = dw_val_class_const_double;
16118 mem_loc_result->dw_loc_oprnd2.v.val_double
16119 = double_int::from_shwi (INTVAL (rtl));
16120 }
16121 }
16122 break;
16123
16124 case CONST_DOUBLE:
16125 if (!dwarf_strict || dwarf_version >= 5)
16126 {
16127 dw_die_ref type_die;
16128
16129 /* Note that if TARGET_SUPPORTS_WIDE_INT == 0, a
16130 CONST_DOUBLE rtx could represent either a large integer
16131 or a floating-point constant. If TARGET_SUPPORTS_WIDE_INT != 0,
16132 the value is always a floating point constant.
16133
16134 When it is an integer, a CONST_DOUBLE is used whenever
16135 the constant requires 2 HWIs to be adequately represented.
16136 We output CONST_DOUBLEs as blocks. */
16137 if (mode == VOIDmode
16138 || (GET_MODE (rtl) == VOIDmode
16139 && maybe_ne (GET_MODE_BITSIZE (mode),
16140 HOST_BITS_PER_DOUBLE_INT)))
16141 break;
16142 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
16143 if (type_die == NULL)
16144 return NULL;
16145 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
16146 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16147 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16148 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16149 #if TARGET_SUPPORTS_WIDE_INT == 0
16150 if (!SCALAR_FLOAT_MODE_P (mode))
16151 {
16152 mem_loc_result->dw_loc_oprnd2.val_class
16153 = dw_val_class_const_double;
16154 mem_loc_result->dw_loc_oprnd2.v.val_double
16155 = rtx_to_double_int (rtl);
16156 }
16157 else
16158 #endif
16159 {
16160 scalar_float_mode float_mode = as_a <scalar_float_mode> (mode);
16161 unsigned int length = GET_MODE_SIZE (float_mode);
16162 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16163
16164 insert_float (rtl, array);
16165 mem_loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16166 mem_loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16167 mem_loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16168 mem_loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16169 }
16170 }
16171 break;
16172
16173 case CONST_WIDE_INT:
16174 if (!dwarf_strict || dwarf_version >= 5)
16175 {
16176 dw_die_ref type_die;
16177
16178 type_die = base_type_for_mode (mode, SCALAR_INT_MODE_P (mode));
16179 if (type_die == NULL)
16180 return NULL;
16181 mem_loc_result = new_loc_descr (dwarf_OP (DW_OP_const_type), 0, 0);
16182 mem_loc_result->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16183 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16184 mem_loc_result->dw_loc_oprnd1.v.val_die_ref.external = 0;
16185 mem_loc_result->dw_loc_oprnd2.val_class
16186 = dw_val_class_wide_int;
16187 mem_loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16188 *mem_loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, mode);
16189 }
16190 break;
16191
16192 case CONST_POLY_INT:
16193 mem_loc_result = int_loc_descriptor (rtx_to_poly_int64 (rtl));
16194 break;
16195
16196 case EQ:
16197 mem_loc_result = scompare_loc_descriptor (DW_OP_eq, rtl, mem_mode);
16198 break;
16199
16200 case GE:
16201 mem_loc_result = scompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16202 break;
16203
16204 case GT:
16205 mem_loc_result = scompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16206 break;
16207
16208 case LE:
16209 mem_loc_result = scompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16210 break;
16211
16212 case LT:
16213 mem_loc_result = scompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16214 break;
16215
16216 case NE:
16217 mem_loc_result = scompare_loc_descriptor (DW_OP_ne, rtl, mem_mode);
16218 break;
16219
16220 case GEU:
16221 mem_loc_result = ucompare_loc_descriptor (DW_OP_ge, rtl, mem_mode);
16222 break;
16223
16224 case GTU:
16225 mem_loc_result = ucompare_loc_descriptor (DW_OP_gt, rtl, mem_mode);
16226 break;
16227
16228 case LEU:
16229 mem_loc_result = ucompare_loc_descriptor (DW_OP_le, rtl, mem_mode);
16230 break;
16231
16232 case LTU:
16233 mem_loc_result = ucompare_loc_descriptor (DW_OP_lt, rtl, mem_mode);
16234 break;
16235
16236 case UMIN:
16237 case UMAX:
16238 if (!SCALAR_INT_MODE_P (mode))
16239 break;
16240 /* FALLTHRU */
16241 case SMIN:
16242 case SMAX:
16243 mem_loc_result = minmax_loc_descriptor (rtl, mode, mem_mode);
16244 break;
16245
16246 case ZERO_EXTRACT:
16247 case SIGN_EXTRACT:
16248 if (CONST_INT_P (XEXP (rtl, 1))
16249 && CONST_INT_P (XEXP (rtl, 2))
16250 && is_a <scalar_int_mode> (mode, &int_mode)
16251 && is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &inner_mode)
16252 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16253 && GET_MODE_SIZE (inner_mode) <= DWARF2_ADDR_SIZE
16254 && ((unsigned) INTVAL (XEXP (rtl, 1))
16255 + (unsigned) INTVAL (XEXP (rtl, 2))
16256 <= GET_MODE_BITSIZE (int_mode)))
16257 {
16258 int shift, size;
16259 op0 = mem_loc_descriptor (XEXP (rtl, 0), inner_mode,
16260 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16261 if (op0 == 0)
16262 break;
16263 if (GET_CODE (rtl) == SIGN_EXTRACT)
16264 op = DW_OP_shra;
16265 else
16266 op = DW_OP_shr;
16267 mem_loc_result = op0;
16268 size = INTVAL (XEXP (rtl, 1));
16269 shift = INTVAL (XEXP (rtl, 2));
16270 if (BITS_BIG_ENDIAN)
16271 shift = GET_MODE_BITSIZE (inner_mode) - shift - size;
16272 if (shift + size != (int) DWARF2_ADDR_SIZE)
16273 {
16274 add_loc_descr (&mem_loc_result,
16275 int_loc_descriptor (DWARF2_ADDR_SIZE
16276 - shift - size));
16277 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_shl, 0, 0));
16278 }
16279 if (size != (int) DWARF2_ADDR_SIZE)
16280 {
16281 add_loc_descr (&mem_loc_result,
16282 int_loc_descriptor (DWARF2_ADDR_SIZE - size));
16283 add_loc_descr (&mem_loc_result, new_loc_descr (op, 0, 0));
16284 }
16285 }
16286 break;
16287
16288 case IF_THEN_ELSE:
16289 {
16290 dw_loc_descr_ref op2, bra_node, drop_node;
16291 op0 = mem_loc_descriptor (XEXP (rtl, 0),
16292 GET_MODE (XEXP (rtl, 0)) == VOIDmode
16293 ? word_mode : GET_MODE (XEXP (rtl, 0)),
16294 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16295 op1 = mem_loc_descriptor (XEXP (rtl, 1), mode, mem_mode,
16296 VAR_INIT_STATUS_INITIALIZED);
16297 op2 = mem_loc_descriptor (XEXP (rtl, 2), mode, mem_mode,
16298 VAR_INIT_STATUS_INITIALIZED);
16299 if (op0 == NULL || op1 == NULL || op2 == NULL)
16300 break;
16301
16302 mem_loc_result = op1;
16303 add_loc_descr (&mem_loc_result, op2);
16304 add_loc_descr (&mem_loc_result, op0);
16305 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
16306 add_loc_descr (&mem_loc_result, bra_node);
16307 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_swap, 0, 0));
16308 drop_node = new_loc_descr (DW_OP_drop, 0, 0);
16309 add_loc_descr (&mem_loc_result, drop_node);
16310 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
16311 bra_node->dw_loc_oprnd1.v.val_loc = drop_node;
16312 }
16313 break;
16314
16315 case FLOAT_EXTEND:
16316 case FLOAT_TRUNCATE:
16317 case FLOAT:
16318 case UNSIGNED_FLOAT:
16319 case FIX:
16320 case UNSIGNED_FIX:
16321 if (!dwarf_strict || dwarf_version >= 5)
16322 {
16323 dw_die_ref type_die;
16324 dw_loc_descr_ref cvt;
16325
16326 op0 = mem_loc_descriptor (XEXP (rtl, 0), GET_MODE (XEXP (rtl, 0)),
16327 mem_mode, VAR_INIT_STATUS_INITIALIZED);
16328 if (op0 == NULL)
16329 break;
16330 if (is_a <scalar_int_mode> (GET_MODE (XEXP (rtl, 0)), &int_mode)
16331 && (GET_CODE (rtl) == FLOAT
16332 || GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE))
16333 {
16334 type_die = base_type_for_mode (int_mode,
16335 GET_CODE (rtl) == UNSIGNED_FLOAT);
16336 if (type_die == NULL)
16337 break;
16338 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16339 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16340 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16341 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16342 add_loc_descr (&op0, cvt);
16343 }
16344 type_die = base_type_for_mode (mode, GET_CODE (rtl) == UNSIGNED_FIX);
16345 if (type_die == NULL)
16346 break;
16347 cvt = new_loc_descr (dwarf_OP (DW_OP_convert), 0, 0);
16348 cvt->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16349 cvt->dw_loc_oprnd1.v.val_die_ref.die = type_die;
16350 cvt->dw_loc_oprnd1.v.val_die_ref.external = 0;
16351 add_loc_descr (&op0, cvt);
16352 if (is_a <scalar_int_mode> (mode, &int_mode)
16353 && (GET_CODE (rtl) == FIX
16354 || GET_MODE_SIZE (int_mode) < DWARF2_ADDR_SIZE))
16355 {
16356 op0 = convert_descriptor_to_mode (int_mode, op0);
16357 if (op0 == NULL)
16358 break;
16359 }
16360 mem_loc_result = op0;
16361 }
16362 break;
16363
16364 case CLZ:
16365 case CTZ:
16366 case FFS:
16367 if (is_a <scalar_int_mode> (mode, &int_mode))
16368 mem_loc_result = clz_loc_descriptor (rtl, int_mode, mem_mode);
16369 break;
16370
16371 case POPCOUNT:
16372 case PARITY:
16373 if (is_a <scalar_int_mode> (mode, &int_mode))
16374 mem_loc_result = popcount_loc_descriptor (rtl, int_mode, mem_mode);
16375 break;
16376
16377 case BSWAP:
16378 if (is_a <scalar_int_mode> (mode, &int_mode))
16379 mem_loc_result = bswap_loc_descriptor (rtl, int_mode, mem_mode);
16380 break;
16381
16382 case ROTATE:
16383 case ROTATERT:
16384 if (is_a <scalar_int_mode> (mode, &int_mode))
16385 mem_loc_result = rotate_loc_descriptor (rtl, int_mode, mem_mode);
16386 break;
16387
16388 case COMPARE:
16389 /* In theory, we could implement the above. */
16390 /* DWARF cannot represent the unsigned compare operations
16391 natively. */
16392 case SS_MULT:
16393 case US_MULT:
16394 case SS_DIV:
16395 case US_DIV:
16396 case SS_PLUS:
16397 case US_PLUS:
16398 case SS_MINUS:
16399 case US_MINUS:
16400 case SS_NEG:
16401 case US_NEG:
16402 case SS_ABS:
16403 case SS_ASHIFT:
16404 case US_ASHIFT:
16405 case SS_TRUNCATE:
16406 case US_TRUNCATE:
16407 case UNORDERED:
16408 case ORDERED:
16409 case UNEQ:
16410 case UNGE:
16411 case UNGT:
16412 case UNLE:
16413 case UNLT:
16414 case LTGT:
16415 case FRACT_CONVERT:
16416 case UNSIGNED_FRACT_CONVERT:
16417 case SAT_FRACT:
16418 case UNSIGNED_SAT_FRACT:
16419 case SQRT:
16420 case ASM_OPERANDS:
16421 case VEC_MERGE:
16422 case VEC_SELECT:
16423 case VEC_CONCAT:
16424 case VEC_DUPLICATE:
16425 case VEC_SERIES:
16426 case HIGH:
16427 case FMA:
16428 case STRICT_LOW_PART:
16429 case CONST_VECTOR:
16430 case CONST_FIXED:
16431 case CLRSB:
16432 case CLOBBER:
16433 case CLOBBER_HIGH:
16434 break;
16435
16436 case CONST_STRING:
16437 resolve_one_addr (&rtl);
16438 goto symref;
16439
16440 /* RTL sequences inside PARALLEL record a series of DWARF operations for
16441 the expression. An UNSPEC rtx represents a raw DWARF operation,
16442 new_loc_descr is called for it to build the operation directly.
16443 Otherwise mem_loc_descriptor is called recursively. */
16444 case PARALLEL:
16445 {
16446 int index = 0;
16447 dw_loc_descr_ref exp_result = NULL;
16448
16449 for (; index < XVECLEN (rtl, 0); index++)
16450 {
16451 rtx elem = XVECEXP (rtl, 0, index);
16452 if (GET_CODE (elem) == UNSPEC)
16453 {
16454 /* Each DWARF operation UNSPEC contain two operands, if
16455 one operand is not used for the operation, const0_rtx is
16456 passed. */
16457 gcc_assert (XVECLEN (elem, 0) == 2);
16458
16459 HOST_WIDE_INT dw_op = XINT (elem, 1);
16460 HOST_WIDE_INT oprnd1 = INTVAL (XVECEXP (elem, 0, 0));
16461 HOST_WIDE_INT oprnd2 = INTVAL (XVECEXP (elem, 0, 1));
16462 exp_result
16463 = new_loc_descr ((enum dwarf_location_atom) dw_op, oprnd1,
16464 oprnd2);
16465 }
16466 else
16467 exp_result
16468 = mem_loc_descriptor (elem, mode, mem_mode,
16469 VAR_INIT_STATUS_INITIALIZED);
16470
16471 if (!mem_loc_result)
16472 mem_loc_result = exp_result;
16473 else
16474 add_loc_descr (&mem_loc_result, exp_result);
16475 }
16476
16477 break;
16478 }
16479
16480 default:
16481 if (flag_checking)
16482 {
16483 print_rtl (stderr, rtl);
16484 gcc_unreachable ();
16485 }
16486 break;
16487 }
16488
16489 if (mem_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16490 add_loc_descr (&mem_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16491
16492 return mem_loc_result;
16493 }
16494
16495 /* Return a descriptor that describes the concatenation of two locations.
16496 This is typically a complex variable. */
16497
16498 static dw_loc_descr_ref
16499 concat_loc_descriptor (rtx x0, rtx x1, enum var_init_status initialized)
16500 {
16501 /* At present we only track constant-sized pieces. */
16502 unsigned int size0, size1;
16503 if (!GET_MODE_SIZE (GET_MODE (x0)).is_constant (&size0)
16504 || !GET_MODE_SIZE (GET_MODE (x1)).is_constant (&size1))
16505 return 0;
16506
16507 dw_loc_descr_ref cc_loc_result = NULL;
16508 dw_loc_descr_ref x0_ref
16509 = loc_descriptor (x0, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16510 dw_loc_descr_ref x1_ref
16511 = loc_descriptor (x1, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16512
16513 if (x0_ref == 0 || x1_ref == 0)
16514 return 0;
16515
16516 cc_loc_result = x0_ref;
16517 add_loc_descr_op_piece (&cc_loc_result, size0);
16518
16519 add_loc_descr (&cc_loc_result, x1_ref);
16520 add_loc_descr_op_piece (&cc_loc_result, size1);
16521
16522 if (initialized == VAR_INIT_STATUS_UNINITIALIZED)
16523 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16524
16525 return cc_loc_result;
16526 }
16527
16528 /* Return a descriptor that describes the concatenation of N
16529 locations. */
16530
16531 static dw_loc_descr_ref
16532 concatn_loc_descriptor (rtx concatn, enum var_init_status initialized)
16533 {
16534 unsigned int i;
16535 dw_loc_descr_ref cc_loc_result = NULL;
16536 unsigned int n = XVECLEN (concatn, 0);
16537 unsigned int size;
16538
16539 for (i = 0; i < n; ++i)
16540 {
16541 dw_loc_descr_ref ref;
16542 rtx x = XVECEXP (concatn, 0, i);
16543
16544 /* At present we only track constant-sized pieces. */
16545 if (!GET_MODE_SIZE (GET_MODE (x)).is_constant (&size))
16546 return NULL;
16547
16548 ref = loc_descriptor (x, VOIDmode, VAR_INIT_STATUS_INITIALIZED);
16549 if (ref == NULL)
16550 return NULL;
16551
16552 add_loc_descr (&cc_loc_result, ref);
16553 add_loc_descr_op_piece (&cc_loc_result, size);
16554 }
16555
16556 if (cc_loc_result && initialized == VAR_INIT_STATUS_UNINITIALIZED)
16557 add_loc_descr (&cc_loc_result, new_loc_descr (DW_OP_GNU_uninit, 0, 0));
16558
16559 return cc_loc_result;
16560 }
16561
16562 /* Helper function for loc_descriptor. Return DW_OP_implicit_pointer
16563 for DEBUG_IMPLICIT_PTR RTL. */
16564
16565 static dw_loc_descr_ref
16566 implicit_ptr_descriptor (rtx rtl, HOST_WIDE_INT offset)
16567 {
16568 dw_loc_descr_ref ret;
16569 dw_die_ref ref;
16570
16571 if (dwarf_strict && dwarf_version < 5)
16572 return NULL;
16573 gcc_assert (TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == VAR_DECL
16574 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == PARM_DECL
16575 || TREE_CODE (DEBUG_IMPLICIT_PTR_DECL (rtl)) == RESULT_DECL);
16576 ref = lookup_decl_die (DEBUG_IMPLICIT_PTR_DECL (rtl));
16577 ret = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
16578 ret->dw_loc_oprnd2.val_class = dw_val_class_const;
16579 if (ref)
16580 {
16581 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
16582 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
16583 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
16584 }
16585 else
16586 {
16587 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
16588 ret->dw_loc_oprnd1.v.val_decl_ref = DEBUG_IMPLICIT_PTR_DECL (rtl);
16589 }
16590 return ret;
16591 }
16592
16593 /* Output a proper Dwarf location descriptor for a variable or parameter
16594 which is either allocated in a register or in a memory location. For a
16595 register, we just generate an OP_REG and the register number. For a
16596 memory location we provide a Dwarf postfix expression describing how to
16597 generate the (dynamic) address of the object onto the address stack.
16598
16599 MODE is mode of the decl if this loc_descriptor is going to be used in
16600 .debug_loc section where DW_OP_stack_value and DW_OP_implicit_value are
16601 allowed, VOIDmode otherwise.
16602
16603 If we don't know how to describe it, return 0. */
16604
16605 static dw_loc_descr_ref
16606 loc_descriptor (rtx rtl, machine_mode mode,
16607 enum var_init_status initialized)
16608 {
16609 dw_loc_descr_ref loc_result = NULL;
16610 scalar_int_mode int_mode;
16611
16612 switch (GET_CODE (rtl))
16613 {
16614 case SUBREG:
16615 /* The case of a subreg may arise when we have a local (register)
16616 variable or a formal (register) parameter which doesn't quite fill
16617 up an entire register. For now, just assume that it is
16618 legitimate to make the Dwarf info refer to the whole register which
16619 contains the given subreg. */
16620 if (REG_P (SUBREG_REG (rtl)) && subreg_lowpart_p (rtl))
16621 loc_result = loc_descriptor (SUBREG_REG (rtl),
16622 GET_MODE (SUBREG_REG (rtl)), initialized);
16623 else
16624 goto do_default;
16625 break;
16626
16627 case REG:
16628 loc_result = reg_loc_descriptor (rtl, initialized);
16629 break;
16630
16631 case MEM:
16632 loc_result = mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
16633 GET_MODE (rtl), initialized);
16634 if (loc_result == NULL)
16635 loc_result = tls_mem_loc_descriptor (rtl);
16636 if (loc_result == NULL)
16637 {
16638 rtx new_rtl = avoid_constant_pool_reference (rtl);
16639 if (new_rtl != rtl)
16640 loc_result = loc_descriptor (new_rtl, mode, initialized);
16641 }
16642 break;
16643
16644 case CONCAT:
16645 loc_result = concat_loc_descriptor (XEXP (rtl, 0), XEXP (rtl, 1),
16646 initialized);
16647 break;
16648
16649 case CONCATN:
16650 loc_result = concatn_loc_descriptor (rtl, initialized);
16651 break;
16652
16653 case VAR_LOCATION:
16654 /* Single part. */
16655 if (GET_CODE (PAT_VAR_LOCATION_LOC (rtl)) != PARALLEL)
16656 {
16657 rtx loc = PAT_VAR_LOCATION_LOC (rtl);
16658 if (GET_CODE (loc) == EXPR_LIST)
16659 loc = XEXP (loc, 0);
16660 loc_result = loc_descriptor (loc, mode, initialized);
16661 break;
16662 }
16663
16664 rtl = XEXP (rtl, 1);
16665 /* FALLTHRU */
16666
16667 case PARALLEL:
16668 {
16669 rtvec par_elems = XVEC (rtl, 0);
16670 int num_elem = GET_NUM_ELEM (par_elems);
16671 machine_mode mode;
16672 int i, size;
16673
16674 /* Create the first one, so we have something to add to. */
16675 loc_result = loc_descriptor (XEXP (RTVEC_ELT (par_elems, 0), 0),
16676 VOIDmode, initialized);
16677 if (loc_result == NULL)
16678 return NULL;
16679 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, 0), 0));
16680 /* At present we only track constant-sized pieces. */
16681 if (!GET_MODE_SIZE (mode).is_constant (&size))
16682 return NULL;
16683 add_loc_descr_op_piece (&loc_result, size);
16684 for (i = 1; i < num_elem; i++)
16685 {
16686 dw_loc_descr_ref temp;
16687
16688 temp = loc_descriptor (XEXP (RTVEC_ELT (par_elems, i), 0),
16689 VOIDmode, initialized);
16690 if (temp == NULL)
16691 return NULL;
16692 add_loc_descr (&loc_result, temp);
16693 mode = GET_MODE (XEXP (RTVEC_ELT (par_elems, i), 0));
16694 /* At present we only track constant-sized pieces. */
16695 if (!GET_MODE_SIZE (mode).is_constant (&size))
16696 return NULL;
16697 add_loc_descr_op_piece (&loc_result, size);
16698 }
16699 }
16700 break;
16701
16702 case CONST_INT:
16703 if (mode != VOIDmode && mode != BLKmode)
16704 {
16705 int_mode = as_a <scalar_int_mode> (mode);
16706 loc_result = address_of_int_loc_descriptor (GET_MODE_SIZE (int_mode),
16707 INTVAL (rtl));
16708 }
16709 break;
16710
16711 case CONST_DOUBLE:
16712 if (mode == VOIDmode)
16713 mode = GET_MODE (rtl);
16714
16715 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16716 {
16717 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16718
16719 /* Note that a CONST_DOUBLE rtx could represent either an integer
16720 or a floating-point constant. A CONST_DOUBLE is used whenever
16721 the constant requires more than one word in order to be
16722 adequately represented. We output CONST_DOUBLEs as blocks. */
16723 scalar_mode smode = as_a <scalar_mode> (mode);
16724 loc_result = new_loc_descr (DW_OP_implicit_value,
16725 GET_MODE_SIZE (smode), 0);
16726 #if TARGET_SUPPORTS_WIDE_INT == 0
16727 if (!SCALAR_FLOAT_MODE_P (smode))
16728 {
16729 loc_result->dw_loc_oprnd2.val_class = dw_val_class_const_double;
16730 loc_result->dw_loc_oprnd2.v.val_double
16731 = rtx_to_double_int (rtl);
16732 }
16733 else
16734 #endif
16735 {
16736 unsigned int length = GET_MODE_SIZE (smode);
16737 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
16738
16739 insert_float (rtl, array);
16740 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16741 loc_result->dw_loc_oprnd2.v.val_vec.length = length / 4;
16742 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = 4;
16743 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16744 }
16745 }
16746 break;
16747
16748 case CONST_WIDE_INT:
16749 if (mode == VOIDmode)
16750 mode = GET_MODE (rtl);
16751
16752 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16753 {
16754 int_mode = as_a <scalar_int_mode> (mode);
16755 loc_result = new_loc_descr (DW_OP_implicit_value,
16756 GET_MODE_SIZE (int_mode), 0);
16757 loc_result->dw_loc_oprnd2.val_class = dw_val_class_wide_int;
16758 loc_result->dw_loc_oprnd2.v.val_wide = ggc_alloc<wide_int> ();
16759 *loc_result->dw_loc_oprnd2.v.val_wide = rtx_mode_t (rtl, int_mode);
16760 }
16761 break;
16762
16763 case CONST_VECTOR:
16764 if (mode == VOIDmode)
16765 mode = GET_MODE (rtl);
16766
16767 if (mode != VOIDmode && (dwarf_version >= 4 || !dwarf_strict))
16768 {
16769 unsigned int length;
16770 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
16771 return NULL;
16772
16773 unsigned int elt_size = GET_MODE_UNIT_SIZE (GET_MODE (rtl));
16774 unsigned char *array
16775 = ggc_vec_alloc<unsigned char> (length * elt_size);
16776 unsigned int i;
16777 unsigned char *p;
16778 machine_mode imode = GET_MODE_INNER (mode);
16779
16780 gcc_assert (mode == GET_MODE (rtl) || VOIDmode == GET_MODE (rtl));
16781 switch (GET_MODE_CLASS (mode))
16782 {
16783 case MODE_VECTOR_INT:
16784 for (i = 0, p = array; i < length; i++, p += elt_size)
16785 {
16786 rtx elt = CONST_VECTOR_ELT (rtl, i);
16787 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
16788 }
16789 break;
16790
16791 case MODE_VECTOR_FLOAT:
16792 for (i = 0, p = array; i < length; i++, p += elt_size)
16793 {
16794 rtx elt = CONST_VECTOR_ELT (rtl, i);
16795 insert_float (elt, p);
16796 }
16797 break;
16798
16799 default:
16800 gcc_unreachable ();
16801 }
16802
16803 loc_result = new_loc_descr (DW_OP_implicit_value,
16804 length * elt_size, 0);
16805 loc_result->dw_loc_oprnd2.val_class = dw_val_class_vec;
16806 loc_result->dw_loc_oprnd2.v.val_vec.length = length;
16807 loc_result->dw_loc_oprnd2.v.val_vec.elt_size = elt_size;
16808 loc_result->dw_loc_oprnd2.v.val_vec.array = array;
16809 }
16810 break;
16811
16812 case CONST:
16813 if (mode == VOIDmode
16814 || CONST_SCALAR_INT_P (XEXP (rtl, 0))
16815 || CONST_DOUBLE_AS_FLOAT_P (XEXP (rtl, 0))
16816 || GET_CODE (XEXP (rtl, 0)) == CONST_VECTOR)
16817 {
16818 loc_result = loc_descriptor (XEXP (rtl, 0), mode, initialized);
16819 break;
16820 }
16821 /* FALLTHROUGH */
16822 case SYMBOL_REF:
16823 if (!const_ok_for_output (rtl))
16824 break;
16825 /* FALLTHROUGH */
16826 case LABEL_REF:
16827 if (is_a <scalar_int_mode> (mode, &int_mode)
16828 && GET_MODE_SIZE (int_mode) == DWARF2_ADDR_SIZE
16829 && (dwarf_version >= 4 || !dwarf_strict))
16830 {
16831 loc_result = new_addr_loc_descr (rtl, dtprel_false);
16832 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
16833 vec_safe_push (used_rtx_array, rtl);
16834 }
16835 break;
16836
16837 case DEBUG_IMPLICIT_PTR:
16838 loc_result = implicit_ptr_descriptor (rtl, 0);
16839 break;
16840
16841 case PLUS:
16842 if (GET_CODE (XEXP (rtl, 0)) == DEBUG_IMPLICIT_PTR
16843 && CONST_INT_P (XEXP (rtl, 1)))
16844 {
16845 loc_result
16846 = implicit_ptr_descriptor (XEXP (rtl, 0), INTVAL (XEXP (rtl, 1)));
16847 break;
16848 }
16849 /* FALLTHRU */
16850 do_default:
16851 default:
16852 if ((is_a <scalar_int_mode> (mode, &int_mode)
16853 && GET_MODE (rtl) == int_mode
16854 && GET_MODE_SIZE (int_mode) <= DWARF2_ADDR_SIZE
16855 && dwarf_version >= 4)
16856 || (!dwarf_strict && mode != VOIDmode && mode != BLKmode))
16857 {
16858 /* Value expression. */
16859 loc_result = mem_loc_descriptor (rtl, mode, VOIDmode, initialized);
16860 if (loc_result)
16861 add_loc_descr (&loc_result,
16862 new_loc_descr (DW_OP_stack_value, 0, 0));
16863 }
16864 break;
16865 }
16866
16867 return loc_result;
16868 }
16869
16870 /* We need to figure out what section we should use as the base for the
16871 address ranges where a given location is valid.
16872 1. If this particular DECL has a section associated with it, use that.
16873 2. If this function has a section associated with it, use that.
16874 3. Otherwise, use the text section.
16875 XXX: If you split a variable across multiple sections, we won't notice. */
16876
16877 static const char *
16878 secname_for_decl (const_tree decl)
16879 {
16880 const char *secname;
16881
16882 if (VAR_OR_FUNCTION_DECL_P (decl)
16883 && (DECL_EXTERNAL (decl) || TREE_PUBLIC (decl) || TREE_STATIC (decl))
16884 && DECL_SECTION_NAME (decl))
16885 secname = DECL_SECTION_NAME (decl);
16886 else if (current_function_decl && DECL_SECTION_NAME (current_function_decl))
16887 {
16888 if (in_cold_section_p)
16889 {
16890 section *sec = current_function_section ();
16891 if (sec->common.flags & SECTION_NAMED)
16892 return sec->named.name;
16893 }
16894 secname = DECL_SECTION_NAME (current_function_decl);
16895 }
16896 else if (cfun && in_cold_section_p)
16897 secname = crtl->subsections.cold_section_label;
16898 else
16899 secname = text_section_label;
16900
16901 return secname;
16902 }
16903
16904 /* Return true when DECL_BY_REFERENCE is defined and set for DECL. */
16905
16906 static bool
16907 decl_by_reference_p (tree decl)
16908 {
16909 return ((TREE_CODE (decl) == PARM_DECL || TREE_CODE (decl) == RESULT_DECL
16910 || VAR_P (decl))
16911 && DECL_BY_REFERENCE (decl));
16912 }
16913
16914 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
16915 for VARLOC. */
16916
16917 static dw_loc_descr_ref
16918 dw_loc_list_1 (tree loc, rtx varloc, int want_address,
16919 enum var_init_status initialized)
16920 {
16921 int have_address = 0;
16922 dw_loc_descr_ref descr;
16923 machine_mode mode;
16924
16925 if (want_address != 2)
16926 {
16927 gcc_assert (GET_CODE (varloc) == VAR_LOCATION);
16928 /* Single part. */
16929 if (GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
16930 {
16931 varloc = PAT_VAR_LOCATION_LOC (varloc);
16932 if (GET_CODE (varloc) == EXPR_LIST)
16933 varloc = XEXP (varloc, 0);
16934 mode = GET_MODE (varloc);
16935 if (MEM_P (varloc))
16936 {
16937 rtx addr = XEXP (varloc, 0);
16938 descr = mem_loc_descriptor (addr, get_address_mode (varloc),
16939 mode, initialized);
16940 if (descr)
16941 have_address = 1;
16942 else
16943 {
16944 rtx x = avoid_constant_pool_reference (varloc);
16945 if (x != varloc)
16946 descr = mem_loc_descriptor (x, mode, VOIDmode,
16947 initialized);
16948 }
16949 }
16950 else
16951 descr = mem_loc_descriptor (varloc, mode, VOIDmode, initialized);
16952 }
16953 else
16954 return 0;
16955 }
16956 else
16957 {
16958 if (GET_CODE (varloc) == VAR_LOCATION)
16959 mode = DECL_MODE (PAT_VAR_LOCATION_DECL (varloc));
16960 else
16961 mode = DECL_MODE (loc);
16962 descr = loc_descriptor (varloc, mode, initialized);
16963 have_address = 1;
16964 }
16965
16966 if (!descr)
16967 return 0;
16968
16969 if (want_address == 2 && !have_address
16970 && (dwarf_version >= 4 || !dwarf_strict))
16971 {
16972 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
16973 {
16974 expansion_failed (loc, NULL_RTX,
16975 "DWARF address size mismatch");
16976 return 0;
16977 }
16978 add_loc_descr (&descr, new_loc_descr (DW_OP_stack_value, 0, 0));
16979 have_address = 1;
16980 }
16981 /* Show if we can't fill the request for an address. */
16982 if (want_address && !have_address)
16983 {
16984 expansion_failed (loc, NULL_RTX,
16985 "Want address and only have value");
16986 return 0;
16987 }
16988
16989 /* If we've got an address and don't want one, dereference. */
16990 if (!want_address && have_address)
16991 {
16992 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
16993 enum dwarf_location_atom op;
16994
16995 if (size > DWARF2_ADDR_SIZE || size == -1)
16996 {
16997 expansion_failed (loc, NULL_RTX,
16998 "DWARF address size mismatch");
16999 return 0;
17000 }
17001 else if (size == DWARF2_ADDR_SIZE)
17002 op = DW_OP_deref;
17003 else
17004 op = DW_OP_deref_size;
17005
17006 add_loc_descr (&descr, new_loc_descr (op, size, 0));
17007 }
17008
17009 return descr;
17010 }
17011
17012 /* Create a DW_OP_piece or DW_OP_bit_piece for bitsize, or return NULL
17013 if it is not possible. */
17014
17015 static dw_loc_descr_ref
17016 new_loc_descr_op_bit_piece (HOST_WIDE_INT bitsize, HOST_WIDE_INT offset)
17017 {
17018 if ((bitsize % BITS_PER_UNIT) == 0 && offset == 0)
17019 return new_loc_descr (DW_OP_piece, bitsize / BITS_PER_UNIT, 0);
17020 else if (dwarf_version >= 3 || !dwarf_strict)
17021 return new_loc_descr (DW_OP_bit_piece, bitsize, offset);
17022 else
17023 return NULL;
17024 }
17025
17026 /* Helper function for dw_loc_list. Compute proper Dwarf location descriptor
17027 for VAR_LOC_NOTE for variable DECL that has been optimized by SRA. */
17028
17029 static dw_loc_descr_ref
17030 dw_sra_loc_expr (tree decl, rtx loc)
17031 {
17032 rtx p;
17033 unsigned HOST_WIDE_INT padsize = 0;
17034 dw_loc_descr_ref descr, *descr_tail;
17035 unsigned HOST_WIDE_INT decl_size;
17036 rtx varloc;
17037 enum var_init_status initialized;
17038
17039 if (DECL_SIZE (decl) == NULL
17040 || !tree_fits_uhwi_p (DECL_SIZE (decl)))
17041 return NULL;
17042
17043 decl_size = tree_to_uhwi (DECL_SIZE (decl));
17044 descr = NULL;
17045 descr_tail = &descr;
17046
17047 for (p = loc; p; p = XEXP (p, 1))
17048 {
17049 unsigned HOST_WIDE_INT bitsize = decl_piece_bitsize (p);
17050 rtx loc_note = *decl_piece_varloc_ptr (p);
17051 dw_loc_descr_ref cur_descr;
17052 dw_loc_descr_ref *tail, last = NULL;
17053 unsigned HOST_WIDE_INT opsize = 0;
17054
17055 if (loc_note == NULL_RTX
17056 || NOTE_VAR_LOCATION_LOC (loc_note) == NULL_RTX)
17057 {
17058 padsize += bitsize;
17059 continue;
17060 }
17061 initialized = NOTE_VAR_LOCATION_STATUS (loc_note);
17062 varloc = NOTE_VAR_LOCATION (loc_note);
17063 cur_descr = dw_loc_list_1 (decl, varloc, 2, initialized);
17064 if (cur_descr == NULL)
17065 {
17066 padsize += bitsize;
17067 continue;
17068 }
17069
17070 /* Check that cur_descr either doesn't use
17071 DW_OP_*piece operations, or their sum is equal
17072 to bitsize. Otherwise we can't embed it. */
17073 for (tail = &cur_descr; *tail != NULL;
17074 tail = &(*tail)->dw_loc_next)
17075 if ((*tail)->dw_loc_opc == DW_OP_piece)
17076 {
17077 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned
17078 * BITS_PER_UNIT;
17079 last = *tail;
17080 }
17081 else if ((*tail)->dw_loc_opc == DW_OP_bit_piece)
17082 {
17083 opsize += (*tail)->dw_loc_oprnd1.v.val_unsigned;
17084 last = *tail;
17085 }
17086
17087 if (last != NULL && opsize != bitsize)
17088 {
17089 padsize += bitsize;
17090 /* Discard the current piece of the descriptor and release any
17091 addr_table entries it uses. */
17092 remove_loc_list_addr_table_entries (cur_descr);
17093 continue;
17094 }
17095
17096 /* If there is a hole, add DW_OP_*piece after empty DWARF
17097 expression, which means that those bits are optimized out. */
17098 if (padsize)
17099 {
17100 if (padsize > decl_size)
17101 {
17102 remove_loc_list_addr_table_entries (cur_descr);
17103 goto discard_descr;
17104 }
17105 decl_size -= padsize;
17106 *descr_tail = new_loc_descr_op_bit_piece (padsize, 0);
17107 if (*descr_tail == NULL)
17108 {
17109 remove_loc_list_addr_table_entries (cur_descr);
17110 goto discard_descr;
17111 }
17112 descr_tail = &(*descr_tail)->dw_loc_next;
17113 padsize = 0;
17114 }
17115 *descr_tail = cur_descr;
17116 descr_tail = tail;
17117 if (bitsize > decl_size)
17118 goto discard_descr;
17119 decl_size -= bitsize;
17120 if (last == NULL)
17121 {
17122 HOST_WIDE_INT offset = 0;
17123 if (GET_CODE (varloc) == VAR_LOCATION
17124 && GET_CODE (PAT_VAR_LOCATION_LOC (varloc)) != PARALLEL)
17125 {
17126 varloc = PAT_VAR_LOCATION_LOC (varloc);
17127 if (GET_CODE (varloc) == EXPR_LIST)
17128 varloc = XEXP (varloc, 0);
17129 }
17130 do
17131 {
17132 if (GET_CODE (varloc) == CONST
17133 || GET_CODE (varloc) == SIGN_EXTEND
17134 || GET_CODE (varloc) == ZERO_EXTEND)
17135 varloc = XEXP (varloc, 0);
17136 else if (GET_CODE (varloc) == SUBREG)
17137 varloc = SUBREG_REG (varloc);
17138 else
17139 break;
17140 }
17141 while (1);
17142 /* DW_OP_bit_size offset should be zero for register
17143 or implicit location descriptions and empty location
17144 descriptions, but for memory addresses needs big endian
17145 adjustment. */
17146 if (MEM_P (varloc))
17147 {
17148 unsigned HOST_WIDE_INT memsize;
17149 if (!poly_uint64 (MEM_SIZE (varloc)).is_constant (&memsize))
17150 goto discard_descr;
17151 memsize *= BITS_PER_UNIT;
17152 if (memsize != bitsize)
17153 {
17154 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
17155 && (memsize > BITS_PER_WORD || bitsize > BITS_PER_WORD))
17156 goto discard_descr;
17157 if (memsize < bitsize)
17158 goto discard_descr;
17159 if (BITS_BIG_ENDIAN)
17160 offset = memsize - bitsize;
17161 }
17162 }
17163
17164 *descr_tail = new_loc_descr_op_bit_piece (bitsize, offset);
17165 if (*descr_tail == NULL)
17166 goto discard_descr;
17167 descr_tail = &(*descr_tail)->dw_loc_next;
17168 }
17169 }
17170
17171 /* If there were any non-empty expressions, add padding till the end of
17172 the decl. */
17173 if (descr != NULL && decl_size != 0)
17174 {
17175 *descr_tail = new_loc_descr_op_bit_piece (decl_size, 0);
17176 if (*descr_tail == NULL)
17177 goto discard_descr;
17178 }
17179 return descr;
17180
17181 discard_descr:
17182 /* Discard the descriptor and release any addr_table entries it uses. */
17183 remove_loc_list_addr_table_entries (descr);
17184 return NULL;
17185 }
17186
17187 /* Return the dwarf representation of the location list LOC_LIST of
17188 DECL. WANT_ADDRESS has the same meaning as in loc_list_from_tree
17189 function. */
17190
17191 static dw_loc_list_ref
17192 dw_loc_list (var_loc_list *loc_list, tree decl, int want_address)
17193 {
17194 const char *endname, *secname;
17195 var_loc_view endview;
17196 rtx varloc;
17197 enum var_init_status initialized;
17198 struct var_loc_node *node;
17199 dw_loc_descr_ref descr;
17200 char label_id[MAX_ARTIFICIAL_LABEL_BYTES];
17201 dw_loc_list_ref list = NULL;
17202 dw_loc_list_ref *listp = &list;
17203
17204 /* Now that we know what section we are using for a base,
17205 actually construct the list of locations.
17206 The first location information is what is passed to the
17207 function that creates the location list, and the remaining
17208 locations just get added on to that list.
17209 Note that we only know the start address for a location
17210 (IE location changes), so to build the range, we use
17211 the range [current location start, next location start].
17212 This means we have to special case the last node, and generate
17213 a range of [last location start, end of function label]. */
17214
17215 if (cfun && crtl->has_bb_partition)
17216 {
17217 bool save_in_cold_section_p = in_cold_section_p;
17218 in_cold_section_p = first_function_block_is_cold;
17219 if (loc_list->last_before_switch == NULL)
17220 in_cold_section_p = !in_cold_section_p;
17221 secname = secname_for_decl (decl);
17222 in_cold_section_p = save_in_cold_section_p;
17223 }
17224 else
17225 secname = secname_for_decl (decl);
17226
17227 for (node = loc_list->first; node; node = node->next)
17228 {
17229 bool range_across_switch = false;
17230 if (GET_CODE (node->loc) == EXPR_LIST
17231 || NOTE_VAR_LOCATION_LOC (node->loc) != NULL_RTX)
17232 {
17233 if (GET_CODE (node->loc) == EXPR_LIST)
17234 {
17235 descr = NULL;
17236 /* This requires DW_OP_{,bit_}piece, which is not usable
17237 inside DWARF expressions. */
17238 if (want_address == 2)
17239 descr = dw_sra_loc_expr (decl, node->loc);
17240 }
17241 else
17242 {
17243 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17244 varloc = NOTE_VAR_LOCATION (node->loc);
17245 descr = dw_loc_list_1 (decl, varloc, want_address, initialized);
17246 }
17247 if (descr)
17248 {
17249 /* If section switch happens in between node->label
17250 and node->next->label (or end of function) and
17251 we can't emit it as a single entry list,
17252 emit two ranges, first one ending at the end
17253 of first partition and second one starting at the
17254 beginning of second partition. */
17255 if (node == loc_list->last_before_switch
17256 && (node != loc_list->first || loc_list->first->next
17257 /* If we are to emit a view number, we will emit
17258 a loclist rather than a single location
17259 expression for the entire function (see
17260 loc_list_has_views), so we have to split the
17261 range that straddles across partitions. */
17262 || !ZERO_VIEW_P (node->view))
17263 && current_function_decl)
17264 {
17265 endname = cfun->fde->dw_fde_end;
17266 endview = 0;
17267 range_across_switch = true;
17268 }
17269 /* The variable has a location between NODE->LABEL and
17270 NODE->NEXT->LABEL. */
17271 else if (node->next)
17272 endname = node->next->label, endview = node->next->view;
17273 /* If the variable has a location at the last label
17274 it keeps its location until the end of function. */
17275 else if (!current_function_decl)
17276 endname = text_end_label, endview = 0;
17277 else
17278 {
17279 ASM_GENERATE_INTERNAL_LABEL (label_id, FUNC_END_LABEL,
17280 current_function_funcdef_no);
17281 endname = ggc_strdup (label_id);
17282 endview = 0;
17283 }
17284
17285 *listp = new_loc_list (descr, node->label, node->view,
17286 endname, endview, secname);
17287 if (TREE_CODE (decl) == PARM_DECL
17288 && node == loc_list->first
17289 && NOTE_P (node->loc)
17290 && strcmp (node->label, endname) == 0)
17291 (*listp)->force = true;
17292 listp = &(*listp)->dw_loc_next;
17293 }
17294 }
17295
17296 if (cfun
17297 && crtl->has_bb_partition
17298 && node == loc_list->last_before_switch)
17299 {
17300 bool save_in_cold_section_p = in_cold_section_p;
17301 in_cold_section_p = !first_function_block_is_cold;
17302 secname = secname_for_decl (decl);
17303 in_cold_section_p = save_in_cold_section_p;
17304 }
17305
17306 if (range_across_switch)
17307 {
17308 if (GET_CODE (node->loc) == EXPR_LIST)
17309 descr = dw_sra_loc_expr (decl, node->loc);
17310 else
17311 {
17312 initialized = NOTE_VAR_LOCATION_STATUS (node->loc);
17313 varloc = NOTE_VAR_LOCATION (node->loc);
17314 descr = dw_loc_list_1 (decl, varloc, want_address,
17315 initialized);
17316 }
17317 gcc_assert (descr);
17318 /* The variable has a location between NODE->LABEL and
17319 NODE->NEXT->LABEL. */
17320 if (node->next)
17321 endname = node->next->label, endview = node->next->view;
17322 else
17323 endname = cfun->fde->dw_fde_second_end, endview = 0;
17324 *listp = new_loc_list (descr, cfun->fde->dw_fde_second_begin, 0,
17325 endname, endview, secname);
17326 listp = &(*listp)->dw_loc_next;
17327 }
17328 }
17329
17330 /* Try to avoid the overhead of a location list emitting a location
17331 expression instead, but only if we didn't have more than one
17332 location entry in the first place. If some entries were not
17333 representable, we don't want to pretend a single entry that was
17334 applies to the entire scope in which the variable is
17335 available. */
17336 if (list && loc_list->first->next)
17337 gen_llsym (list);
17338 else
17339 maybe_gen_llsym (list);
17340
17341 return list;
17342 }
17343
17344 /* Return if the loc_list has only single element and thus can be represented
17345 as location description. */
17346
17347 static bool
17348 single_element_loc_list_p (dw_loc_list_ref list)
17349 {
17350 gcc_assert (!list->dw_loc_next || list->ll_symbol);
17351 return !list->ll_symbol;
17352 }
17353
17354 /* Duplicate a single element of location list. */
17355
17356 static inline dw_loc_descr_ref
17357 copy_loc_descr (dw_loc_descr_ref ref)
17358 {
17359 dw_loc_descr_ref copy = ggc_alloc<dw_loc_descr_node> ();
17360 memcpy (copy, ref, sizeof (dw_loc_descr_node));
17361 return copy;
17362 }
17363
17364 /* To each location in list LIST append loc descr REF. */
17365
17366 static void
17367 add_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17368 {
17369 dw_loc_descr_ref copy;
17370 add_loc_descr (&list->expr, ref);
17371 list = list->dw_loc_next;
17372 while (list)
17373 {
17374 copy = copy_loc_descr (ref);
17375 add_loc_descr (&list->expr, copy);
17376 while (copy->dw_loc_next)
17377 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17378 list = list->dw_loc_next;
17379 }
17380 }
17381
17382 /* To each location in list LIST prepend loc descr REF. */
17383
17384 static void
17385 prepend_loc_descr_to_each (dw_loc_list_ref list, dw_loc_descr_ref ref)
17386 {
17387 dw_loc_descr_ref copy;
17388 dw_loc_descr_ref ref_end = list->expr;
17389 add_loc_descr (&ref, list->expr);
17390 list->expr = ref;
17391 list = list->dw_loc_next;
17392 while (list)
17393 {
17394 dw_loc_descr_ref end = list->expr;
17395 list->expr = copy = copy_loc_descr (ref);
17396 while (copy->dw_loc_next != ref_end)
17397 copy = copy->dw_loc_next = copy_loc_descr (copy->dw_loc_next);
17398 copy->dw_loc_next = end;
17399 list = list->dw_loc_next;
17400 }
17401 }
17402
17403 /* Given two lists RET and LIST
17404 produce location list that is result of adding expression in LIST
17405 to expression in RET on each position in program.
17406 Might be destructive on both RET and LIST.
17407
17408 TODO: We handle only simple cases of RET or LIST having at most one
17409 element. General case would involve sorting the lists in program order
17410 and merging them that will need some additional work.
17411 Adding that will improve quality of debug info especially for SRA-ed
17412 structures. */
17413
17414 static void
17415 add_loc_list (dw_loc_list_ref *ret, dw_loc_list_ref list)
17416 {
17417 if (!list)
17418 return;
17419 if (!*ret)
17420 {
17421 *ret = list;
17422 return;
17423 }
17424 if (!list->dw_loc_next)
17425 {
17426 add_loc_descr_to_each (*ret, list->expr);
17427 return;
17428 }
17429 if (!(*ret)->dw_loc_next)
17430 {
17431 prepend_loc_descr_to_each (list, (*ret)->expr);
17432 *ret = list;
17433 return;
17434 }
17435 expansion_failed (NULL_TREE, NULL_RTX,
17436 "Don't know how to merge two non-trivial"
17437 " location lists.\n");
17438 *ret = NULL;
17439 return;
17440 }
17441
17442 /* LOC is constant expression. Try a luck, look it up in constant
17443 pool and return its loc_descr of its address. */
17444
17445 static dw_loc_descr_ref
17446 cst_pool_loc_descr (tree loc)
17447 {
17448 /* Get an RTL for this, if something has been emitted. */
17449 rtx rtl = lookup_constant_def (loc);
17450
17451 if (!rtl || !MEM_P (rtl))
17452 {
17453 gcc_assert (!rtl);
17454 return 0;
17455 }
17456 gcc_assert (GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF);
17457
17458 /* TODO: We might get more coverage if we was actually delaying expansion
17459 of all expressions till end of compilation when constant pools are fully
17460 populated. */
17461 if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (XEXP (rtl, 0))))
17462 {
17463 expansion_failed (loc, NULL_RTX,
17464 "CST value in contant pool but not marked.");
17465 return 0;
17466 }
17467 return mem_loc_descriptor (XEXP (rtl, 0), get_address_mode (rtl),
17468 GET_MODE (rtl), VAR_INIT_STATUS_INITIALIZED);
17469 }
17470
17471 /* Return dw_loc_list representing address of addr_expr LOC
17472 by looking for inner INDIRECT_REF expression and turning
17473 it into simple arithmetics.
17474
17475 See loc_list_from_tree for the meaning of CONTEXT. */
17476
17477 static dw_loc_list_ref
17478 loc_list_for_address_of_addr_expr_of_indirect_ref (tree loc, bool toplev,
17479 loc_descr_context *context)
17480 {
17481 tree obj, offset;
17482 poly_int64 bitsize, bitpos, bytepos;
17483 machine_mode mode;
17484 int unsignedp, reversep, volatilep = 0;
17485 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
17486
17487 obj = get_inner_reference (TREE_OPERAND (loc, 0),
17488 &bitsize, &bitpos, &offset, &mode,
17489 &unsignedp, &reversep, &volatilep);
17490 STRIP_NOPS (obj);
17491 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos))
17492 {
17493 expansion_failed (loc, NULL_RTX, "bitfield access");
17494 return 0;
17495 }
17496 if (!INDIRECT_REF_P (obj))
17497 {
17498 expansion_failed (obj,
17499 NULL_RTX, "no indirect ref in inner refrence");
17500 return 0;
17501 }
17502 if (!offset && known_eq (bitpos, 0))
17503 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), toplev ? 2 : 1,
17504 context);
17505 else if (toplev
17506 && int_size_in_bytes (TREE_TYPE (loc)) <= DWARF2_ADDR_SIZE
17507 && (dwarf_version >= 4 || !dwarf_strict))
17508 {
17509 list_ret = loc_list_from_tree (TREE_OPERAND (obj, 0), 0, context);
17510 if (!list_ret)
17511 return 0;
17512 if (offset)
17513 {
17514 /* Variable offset. */
17515 list_ret1 = loc_list_from_tree (offset, 0, context);
17516 if (list_ret1 == 0)
17517 return 0;
17518 add_loc_list (&list_ret, list_ret1);
17519 if (!list_ret)
17520 return 0;
17521 add_loc_descr_to_each (list_ret,
17522 new_loc_descr (DW_OP_plus, 0, 0));
17523 }
17524 HOST_WIDE_INT value;
17525 if (bytepos.is_constant (&value) && value > 0)
17526 add_loc_descr_to_each (list_ret,
17527 new_loc_descr (DW_OP_plus_uconst, value, 0));
17528 else if (maybe_ne (bytepos, 0))
17529 loc_list_plus_const (list_ret, bytepos);
17530 add_loc_descr_to_each (list_ret,
17531 new_loc_descr (DW_OP_stack_value, 0, 0));
17532 }
17533 return list_ret;
17534 }
17535
17536 /* Set LOC to the next operation that is not a DW_OP_nop operation. In the case
17537 all operations from LOC are nops, move to the last one. Insert in NOPS all
17538 operations that are skipped. */
17539
17540 static void
17541 loc_descr_to_next_no_nop (dw_loc_descr_ref &loc,
17542 hash_set<dw_loc_descr_ref> &nops)
17543 {
17544 while (loc->dw_loc_next != NULL && loc->dw_loc_opc == DW_OP_nop)
17545 {
17546 nops.add (loc);
17547 loc = loc->dw_loc_next;
17548 }
17549 }
17550
17551 /* Helper for loc_descr_without_nops: free the location description operation
17552 P. */
17553
17554 bool
17555 free_loc_descr (const dw_loc_descr_ref &loc, void *data ATTRIBUTE_UNUSED)
17556 {
17557 ggc_free (loc);
17558 return true;
17559 }
17560
17561 /* Remove all DW_OP_nop operations from LOC except, if it exists, the one that
17562 finishes LOC. */
17563
17564 static void
17565 loc_descr_without_nops (dw_loc_descr_ref &loc)
17566 {
17567 if (loc->dw_loc_opc == DW_OP_nop && loc->dw_loc_next == NULL)
17568 return;
17569
17570 /* Set of all DW_OP_nop operations we remove. */
17571 hash_set<dw_loc_descr_ref> nops;
17572
17573 /* First, strip all prefix NOP operations in order to keep the head of the
17574 operations list. */
17575 loc_descr_to_next_no_nop (loc, nops);
17576
17577 for (dw_loc_descr_ref cur = loc; cur != NULL;)
17578 {
17579 /* For control flow operations: strip "prefix" nops in destination
17580 labels. */
17581 if (cur->dw_loc_oprnd1.val_class == dw_val_class_loc)
17582 loc_descr_to_next_no_nop (cur->dw_loc_oprnd1.v.val_loc, nops);
17583 if (cur->dw_loc_oprnd2.val_class == dw_val_class_loc)
17584 loc_descr_to_next_no_nop (cur->dw_loc_oprnd2.v.val_loc, nops);
17585
17586 /* Do the same for the operations that follow, then move to the next
17587 iteration. */
17588 if (cur->dw_loc_next != NULL)
17589 loc_descr_to_next_no_nop (cur->dw_loc_next, nops);
17590 cur = cur->dw_loc_next;
17591 }
17592
17593 nops.traverse<void *, free_loc_descr> (NULL);
17594 }
17595
17596
17597 struct dwarf_procedure_info;
17598
17599 /* Helper structure for location descriptions generation. */
17600 struct loc_descr_context
17601 {
17602 /* The type that is implicitly referenced by DW_OP_push_object_address, or
17603 NULL_TREE if DW_OP_push_object_address in invalid for this location
17604 description. This is used when processing PLACEHOLDER_EXPR nodes. */
17605 tree context_type;
17606 /* The ..._DECL node that should be translated as a
17607 DW_OP_push_object_address operation. */
17608 tree base_decl;
17609 /* Information about the DWARF procedure we are currently generating. NULL if
17610 we are not generating a DWARF procedure. */
17611 struct dwarf_procedure_info *dpi;
17612 /* True if integral PLACEHOLDER_EXPR stands for the first argument passed
17613 by consumer. Used for DW_TAG_generic_subrange attributes. */
17614 bool placeholder_arg;
17615 /* True if PLACEHOLDER_EXPR has been seen. */
17616 bool placeholder_seen;
17617 };
17618
17619 /* DWARF procedures generation
17620
17621 DWARF expressions (aka. location descriptions) are used to encode variable
17622 things such as sizes or offsets. Such computations can have redundant parts
17623 that can be factorized in order to reduce the size of the output debug
17624 information. This is the whole point of DWARF procedures.
17625
17626 Thanks to stor-layout.c, size and offset expressions in GENERIC trees are
17627 already factorized into functions ("size functions") in order to handle very
17628 big and complex types. Such functions are quite simple: they have integral
17629 arguments, they return an integral result and their body contains only a
17630 return statement with arithmetic expressions. This is the only kind of
17631 function we are interested in translating into DWARF procedures, here.
17632
17633 DWARF expressions and DWARF procedure are executed using a stack, so we have
17634 to define some calling convention for them to interact. Let's say that:
17635
17636 - Before calling a DWARF procedure, DWARF expressions must push on the stack
17637 all arguments in reverse order (right-to-left) so that when the DWARF
17638 procedure execution starts, the first argument is the top of the stack.
17639
17640 - Then, when returning, the DWARF procedure must have consumed all arguments
17641 on the stack, must have pushed the result and touched nothing else.
17642
17643 - Each integral argument and the result are integral types can be hold in a
17644 single stack slot.
17645
17646 - We call "frame offset" the number of stack slots that are "under DWARF
17647 procedure control": it includes the arguments slots, the temporaries and
17648 the result slot. Thus, it is equal to the number of arguments when the
17649 procedure execution starts and must be equal to one (the result) when it
17650 returns. */
17651
17652 /* Helper structure used when generating operations for a DWARF procedure. */
17653 struct dwarf_procedure_info
17654 {
17655 /* The FUNCTION_DECL node corresponding to the DWARF procedure that is
17656 currently translated. */
17657 tree fndecl;
17658 /* The number of arguments FNDECL takes. */
17659 unsigned args_count;
17660 };
17661
17662 /* Return a pointer to a newly created DIE node for a DWARF procedure. Add
17663 LOCATION as its DW_AT_location attribute. If FNDECL is not NULL_TREE,
17664 equate it to this DIE. */
17665
17666 static dw_die_ref
17667 new_dwarf_proc_die (dw_loc_descr_ref location, tree fndecl,
17668 dw_die_ref parent_die)
17669 {
17670 dw_die_ref dwarf_proc_die;
17671
17672 if ((dwarf_version < 3 && dwarf_strict)
17673 || location == NULL)
17674 return NULL;
17675
17676 dwarf_proc_die = new_die (DW_TAG_dwarf_procedure, parent_die, fndecl);
17677 if (fndecl)
17678 equate_decl_number_to_die (fndecl, dwarf_proc_die);
17679 add_AT_loc (dwarf_proc_die, DW_AT_location, location);
17680 return dwarf_proc_die;
17681 }
17682
17683 /* Return whether TYPE is a supported type as a DWARF procedure argument
17684 type or return type (we handle only scalar types and pointer types that
17685 aren't wider than the DWARF expression evaluation stack. */
17686
17687 static bool
17688 is_handled_procedure_type (tree type)
17689 {
17690 return ((INTEGRAL_TYPE_P (type)
17691 || TREE_CODE (type) == OFFSET_TYPE
17692 || TREE_CODE (type) == POINTER_TYPE)
17693 && int_size_in_bytes (type) <= DWARF2_ADDR_SIZE);
17694 }
17695
17696 /* Helper for resolve_args_picking: do the same but stop when coming across
17697 visited nodes. For each node we visit, register in FRAME_OFFSETS the frame
17698 offset *before* evaluating the corresponding operation. */
17699
17700 static bool
17701 resolve_args_picking_1 (dw_loc_descr_ref loc, unsigned initial_frame_offset,
17702 struct dwarf_procedure_info *dpi,
17703 hash_map<dw_loc_descr_ref, unsigned> &frame_offsets)
17704 {
17705 /* The "frame_offset" identifier is already used to name a macro... */
17706 unsigned frame_offset_ = initial_frame_offset;
17707 dw_loc_descr_ref l;
17708
17709 for (l = loc; l != NULL;)
17710 {
17711 bool existed;
17712 unsigned &l_frame_offset = frame_offsets.get_or_insert (l, &existed);
17713
17714 /* If we already met this node, there is nothing to compute anymore. */
17715 if (existed)
17716 {
17717 /* Make sure that the stack size is consistent wherever the execution
17718 flow comes from. */
17719 gcc_assert ((unsigned) l_frame_offset == frame_offset_);
17720 break;
17721 }
17722 l_frame_offset = frame_offset_;
17723
17724 /* If needed, relocate the picking offset with respect to the frame
17725 offset. */
17726 if (l->frame_offset_rel)
17727 {
17728 unsigned HOST_WIDE_INT off;
17729 switch (l->dw_loc_opc)
17730 {
17731 case DW_OP_pick:
17732 off = l->dw_loc_oprnd1.v.val_unsigned;
17733 break;
17734 case DW_OP_dup:
17735 off = 0;
17736 break;
17737 case DW_OP_over:
17738 off = 1;
17739 break;
17740 default:
17741 gcc_unreachable ();
17742 }
17743 /* frame_offset_ is the size of the current stack frame, including
17744 incoming arguments. Besides, the arguments are pushed
17745 right-to-left. Thus, in order to access the Nth argument from
17746 this operation node, the picking has to skip temporaries *plus*
17747 one stack slot per argument (0 for the first one, 1 for the second
17748 one, etc.).
17749
17750 The targetted argument number (N) is already set as the operand,
17751 and the number of temporaries can be computed with:
17752 frame_offsets_ - dpi->args_count */
17753 off += frame_offset_ - dpi->args_count;
17754
17755 /* DW_OP_pick handles only offsets from 0 to 255 (inclusive)... */
17756 if (off > 255)
17757 return false;
17758
17759 if (off == 0)
17760 {
17761 l->dw_loc_opc = DW_OP_dup;
17762 l->dw_loc_oprnd1.v.val_unsigned = 0;
17763 }
17764 else if (off == 1)
17765 {
17766 l->dw_loc_opc = DW_OP_over;
17767 l->dw_loc_oprnd1.v.val_unsigned = 0;
17768 }
17769 else
17770 {
17771 l->dw_loc_opc = DW_OP_pick;
17772 l->dw_loc_oprnd1.v.val_unsigned = off;
17773 }
17774 }
17775
17776 /* Update frame_offset according to the effect the current operation has
17777 on the stack. */
17778 switch (l->dw_loc_opc)
17779 {
17780 case DW_OP_deref:
17781 case DW_OP_swap:
17782 case DW_OP_rot:
17783 case DW_OP_abs:
17784 case DW_OP_neg:
17785 case DW_OP_not:
17786 case DW_OP_plus_uconst:
17787 case DW_OP_skip:
17788 case DW_OP_reg0:
17789 case DW_OP_reg1:
17790 case DW_OP_reg2:
17791 case DW_OP_reg3:
17792 case DW_OP_reg4:
17793 case DW_OP_reg5:
17794 case DW_OP_reg6:
17795 case DW_OP_reg7:
17796 case DW_OP_reg8:
17797 case DW_OP_reg9:
17798 case DW_OP_reg10:
17799 case DW_OP_reg11:
17800 case DW_OP_reg12:
17801 case DW_OP_reg13:
17802 case DW_OP_reg14:
17803 case DW_OP_reg15:
17804 case DW_OP_reg16:
17805 case DW_OP_reg17:
17806 case DW_OP_reg18:
17807 case DW_OP_reg19:
17808 case DW_OP_reg20:
17809 case DW_OP_reg21:
17810 case DW_OP_reg22:
17811 case DW_OP_reg23:
17812 case DW_OP_reg24:
17813 case DW_OP_reg25:
17814 case DW_OP_reg26:
17815 case DW_OP_reg27:
17816 case DW_OP_reg28:
17817 case DW_OP_reg29:
17818 case DW_OP_reg30:
17819 case DW_OP_reg31:
17820 case DW_OP_bregx:
17821 case DW_OP_piece:
17822 case DW_OP_deref_size:
17823 case DW_OP_nop:
17824 case DW_OP_bit_piece:
17825 case DW_OP_implicit_value:
17826 case DW_OP_stack_value:
17827 break;
17828
17829 case DW_OP_addr:
17830 case DW_OP_const1u:
17831 case DW_OP_const1s:
17832 case DW_OP_const2u:
17833 case DW_OP_const2s:
17834 case DW_OP_const4u:
17835 case DW_OP_const4s:
17836 case DW_OP_const8u:
17837 case DW_OP_const8s:
17838 case DW_OP_constu:
17839 case DW_OP_consts:
17840 case DW_OP_dup:
17841 case DW_OP_over:
17842 case DW_OP_pick:
17843 case DW_OP_lit0:
17844 case DW_OP_lit1:
17845 case DW_OP_lit2:
17846 case DW_OP_lit3:
17847 case DW_OP_lit4:
17848 case DW_OP_lit5:
17849 case DW_OP_lit6:
17850 case DW_OP_lit7:
17851 case DW_OP_lit8:
17852 case DW_OP_lit9:
17853 case DW_OP_lit10:
17854 case DW_OP_lit11:
17855 case DW_OP_lit12:
17856 case DW_OP_lit13:
17857 case DW_OP_lit14:
17858 case DW_OP_lit15:
17859 case DW_OP_lit16:
17860 case DW_OP_lit17:
17861 case DW_OP_lit18:
17862 case DW_OP_lit19:
17863 case DW_OP_lit20:
17864 case DW_OP_lit21:
17865 case DW_OP_lit22:
17866 case DW_OP_lit23:
17867 case DW_OP_lit24:
17868 case DW_OP_lit25:
17869 case DW_OP_lit26:
17870 case DW_OP_lit27:
17871 case DW_OP_lit28:
17872 case DW_OP_lit29:
17873 case DW_OP_lit30:
17874 case DW_OP_lit31:
17875 case DW_OP_breg0:
17876 case DW_OP_breg1:
17877 case DW_OP_breg2:
17878 case DW_OP_breg3:
17879 case DW_OP_breg4:
17880 case DW_OP_breg5:
17881 case DW_OP_breg6:
17882 case DW_OP_breg7:
17883 case DW_OP_breg8:
17884 case DW_OP_breg9:
17885 case DW_OP_breg10:
17886 case DW_OP_breg11:
17887 case DW_OP_breg12:
17888 case DW_OP_breg13:
17889 case DW_OP_breg14:
17890 case DW_OP_breg15:
17891 case DW_OP_breg16:
17892 case DW_OP_breg17:
17893 case DW_OP_breg18:
17894 case DW_OP_breg19:
17895 case DW_OP_breg20:
17896 case DW_OP_breg21:
17897 case DW_OP_breg22:
17898 case DW_OP_breg23:
17899 case DW_OP_breg24:
17900 case DW_OP_breg25:
17901 case DW_OP_breg26:
17902 case DW_OP_breg27:
17903 case DW_OP_breg28:
17904 case DW_OP_breg29:
17905 case DW_OP_breg30:
17906 case DW_OP_breg31:
17907 case DW_OP_fbreg:
17908 case DW_OP_push_object_address:
17909 case DW_OP_call_frame_cfa:
17910 case DW_OP_GNU_variable_value:
17911 case DW_OP_GNU_addr_index:
17912 case DW_OP_GNU_const_index:
17913 ++frame_offset_;
17914 break;
17915
17916 case DW_OP_drop:
17917 case DW_OP_xderef:
17918 case DW_OP_and:
17919 case DW_OP_div:
17920 case DW_OP_minus:
17921 case DW_OP_mod:
17922 case DW_OP_mul:
17923 case DW_OP_or:
17924 case DW_OP_plus:
17925 case DW_OP_shl:
17926 case DW_OP_shr:
17927 case DW_OP_shra:
17928 case DW_OP_xor:
17929 case DW_OP_bra:
17930 case DW_OP_eq:
17931 case DW_OP_ge:
17932 case DW_OP_gt:
17933 case DW_OP_le:
17934 case DW_OP_lt:
17935 case DW_OP_ne:
17936 case DW_OP_regx:
17937 case DW_OP_xderef_size:
17938 --frame_offset_;
17939 break;
17940
17941 case DW_OP_call2:
17942 case DW_OP_call4:
17943 case DW_OP_call_ref:
17944 {
17945 dw_die_ref dwarf_proc = l->dw_loc_oprnd1.v.val_die_ref.die;
17946 int *stack_usage = dwarf_proc_stack_usage_map->get (dwarf_proc);
17947
17948 if (stack_usage == NULL)
17949 return false;
17950 frame_offset_ += *stack_usage;
17951 break;
17952 }
17953
17954 case DW_OP_implicit_pointer:
17955 case DW_OP_entry_value:
17956 case DW_OP_const_type:
17957 case DW_OP_regval_type:
17958 case DW_OP_deref_type:
17959 case DW_OP_convert:
17960 case DW_OP_reinterpret:
17961 case DW_OP_form_tls_address:
17962 case DW_OP_GNU_push_tls_address:
17963 case DW_OP_GNU_uninit:
17964 case DW_OP_GNU_encoded_addr:
17965 case DW_OP_GNU_implicit_pointer:
17966 case DW_OP_GNU_entry_value:
17967 case DW_OP_GNU_const_type:
17968 case DW_OP_GNU_regval_type:
17969 case DW_OP_GNU_deref_type:
17970 case DW_OP_GNU_convert:
17971 case DW_OP_GNU_reinterpret:
17972 case DW_OP_GNU_parameter_ref:
17973 /* loc_list_from_tree will probably not output these operations for
17974 size functions, so assume they will not appear here. */
17975 /* Fall through... */
17976
17977 default:
17978 gcc_unreachable ();
17979 }
17980
17981 /* Now, follow the control flow (except subroutine calls). */
17982 switch (l->dw_loc_opc)
17983 {
17984 case DW_OP_bra:
17985 if (!resolve_args_picking_1 (l->dw_loc_next, frame_offset_, dpi,
17986 frame_offsets))
17987 return false;
17988 /* Fall through. */
17989
17990 case DW_OP_skip:
17991 l = l->dw_loc_oprnd1.v.val_loc;
17992 break;
17993
17994 case DW_OP_stack_value:
17995 return true;
17996
17997 default:
17998 l = l->dw_loc_next;
17999 break;
18000 }
18001 }
18002
18003 return true;
18004 }
18005
18006 /* Make a DFS over operations reachable through LOC (i.e. follow branch
18007 operations) in order to resolve the operand of DW_OP_pick operations that
18008 target DWARF procedure arguments (DPI). INITIAL_FRAME_OFFSET is the frame
18009 offset *before* LOC is executed. Return if all relocations were
18010 successful. */
18011
18012 static bool
18013 resolve_args_picking (dw_loc_descr_ref loc, unsigned initial_frame_offset,
18014 struct dwarf_procedure_info *dpi)
18015 {
18016 /* Associate to all visited operations the frame offset *before* evaluating
18017 this operation. */
18018 hash_map<dw_loc_descr_ref, unsigned> frame_offsets;
18019
18020 return resolve_args_picking_1 (loc, initial_frame_offset, dpi,
18021 frame_offsets);
18022 }
18023
18024 /* Try to generate a DWARF procedure that computes the same result as FNDECL.
18025 Return NULL if it is not possible. */
18026
18027 static dw_die_ref
18028 function_to_dwarf_procedure (tree fndecl)
18029 {
18030 struct loc_descr_context ctx;
18031 struct dwarf_procedure_info dpi;
18032 dw_die_ref dwarf_proc_die;
18033 tree tree_body = DECL_SAVED_TREE (fndecl);
18034 dw_loc_descr_ref loc_body, epilogue;
18035
18036 tree cursor;
18037 unsigned i;
18038
18039 /* Do not generate multiple DWARF procedures for the same function
18040 declaration. */
18041 dwarf_proc_die = lookup_decl_die (fndecl);
18042 if (dwarf_proc_die != NULL)
18043 return dwarf_proc_die;
18044
18045 /* DWARF procedures are available starting with the DWARFv3 standard. */
18046 if (dwarf_version < 3 && dwarf_strict)
18047 return NULL;
18048
18049 /* We handle only functions for which we still have a body, that return a
18050 supported type and that takes arguments with supported types. Note that
18051 there is no point translating functions that return nothing. */
18052 if (tree_body == NULL_TREE
18053 || DECL_RESULT (fndecl) == NULL_TREE
18054 || !is_handled_procedure_type (TREE_TYPE (DECL_RESULT (fndecl))))
18055 return NULL;
18056
18057 for (cursor = DECL_ARGUMENTS (fndecl);
18058 cursor != NULL_TREE;
18059 cursor = TREE_CHAIN (cursor))
18060 if (!is_handled_procedure_type (TREE_TYPE (cursor)))
18061 return NULL;
18062
18063 /* Match only "expr" in: RETURN_EXPR (MODIFY_EXPR (RESULT_DECL, expr)). */
18064 if (TREE_CODE (tree_body) != RETURN_EXPR)
18065 return NULL;
18066 tree_body = TREE_OPERAND (tree_body, 0);
18067 if (TREE_CODE (tree_body) != MODIFY_EXPR
18068 || TREE_OPERAND (tree_body, 0) != DECL_RESULT (fndecl))
18069 return NULL;
18070 tree_body = TREE_OPERAND (tree_body, 1);
18071
18072 /* Try to translate the body expression itself. Note that this will probably
18073 cause an infinite recursion if its call graph has a cycle. This is very
18074 unlikely for size functions, however, so don't bother with such things at
18075 the moment. */
18076 ctx.context_type = NULL_TREE;
18077 ctx.base_decl = NULL_TREE;
18078 ctx.dpi = &dpi;
18079 ctx.placeholder_arg = false;
18080 ctx.placeholder_seen = false;
18081 dpi.fndecl = fndecl;
18082 dpi.args_count = list_length (DECL_ARGUMENTS (fndecl));
18083 loc_body = loc_descriptor_from_tree (tree_body, 0, &ctx);
18084 if (!loc_body)
18085 return NULL;
18086
18087 /* After evaluating all operands in "loc_body", we should still have on the
18088 stack all arguments plus the desired function result (top of the stack).
18089 Generate code in order to keep only the result in our stack frame. */
18090 epilogue = NULL;
18091 for (i = 0; i < dpi.args_count; ++i)
18092 {
18093 dw_loc_descr_ref op_couple = new_loc_descr (DW_OP_swap, 0, 0);
18094 op_couple->dw_loc_next = new_loc_descr (DW_OP_drop, 0, 0);
18095 op_couple->dw_loc_next->dw_loc_next = epilogue;
18096 epilogue = op_couple;
18097 }
18098 add_loc_descr (&loc_body, epilogue);
18099 if (!resolve_args_picking (loc_body, dpi.args_count, &dpi))
18100 return NULL;
18101
18102 /* Trailing nops from loc_descriptor_from_tree (if any) cannot be removed
18103 because they are considered useful. Now there is an epilogue, they are
18104 not anymore, so give it another try. */
18105 loc_descr_without_nops (loc_body);
18106
18107 /* fndecl may be used both as a regular DW_TAG_subprogram DIE and as
18108 a DW_TAG_dwarf_procedure, so we may have a conflict, here. It's unlikely,
18109 though, given that size functions do not come from source, so they should
18110 not have a dedicated DW_TAG_subprogram DIE. */
18111 dwarf_proc_die
18112 = new_dwarf_proc_die (loc_body, fndecl,
18113 get_context_die (DECL_CONTEXT (fndecl)));
18114
18115 /* The called DWARF procedure consumes one stack slot per argument and
18116 returns one stack slot. */
18117 dwarf_proc_stack_usage_map->put (dwarf_proc_die, 1 - dpi.args_count);
18118
18119 return dwarf_proc_die;
18120 }
18121
18122
18123 /* Generate Dwarf location list representing LOC.
18124 If WANT_ADDRESS is false, expression computing LOC will be computed
18125 If WANT_ADDRESS is 1, expression computing address of LOC will be returned
18126 if WANT_ADDRESS is 2, expression computing address useable in location
18127 will be returned (i.e. DW_OP_reg can be used
18128 to refer to register values).
18129
18130 CONTEXT provides information to customize the location descriptions
18131 generation. Its context_type field specifies what type is implicitly
18132 referenced by DW_OP_push_object_address. If it is NULL_TREE, this operation
18133 will not be generated.
18134
18135 Its DPI field determines whether we are generating a DWARF expression for a
18136 DWARF procedure, so PARM_DECL references are processed specifically.
18137
18138 If CONTEXT is NULL, the behavior is the same as if context_type, base_decl
18139 and dpi fields were null. */
18140
18141 static dw_loc_list_ref
18142 loc_list_from_tree_1 (tree loc, int want_address,
18143 struct loc_descr_context *context)
18144 {
18145 dw_loc_descr_ref ret = NULL, ret1 = NULL;
18146 dw_loc_list_ref list_ret = NULL, list_ret1 = NULL;
18147 int have_address = 0;
18148 enum dwarf_location_atom op;
18149
18150 /* ??? Most of the time we do not take proper care for sign/zero
18151 extending the values properly. Hopefully this won't be a real
18152 problem... */
18153
18154 if (context != NULL
18155 && context->base_decl == loc
18156 && want_address == 0)
18157 {
18158 if (dwarf_version >= 3 || !dwarf_strict)
18159 return new_loc_list (new_loc_descr (DW_OP_push_object_address, 0, 0),
18160 NULL, 0, NULL, 0, NULL);
18161 else
18162 return NULL;
18163 }
18164
18165 switch (TREE_CODE (loc))
18166 {
18167 case ERROR_MARK:
18168 expansion_failed (loc, NULL_RTX, "ERROR_MARK");
18169 return 0;
18170
18171 case PLACEHOLDER_EXPR:
18172 /* This case involves extracting fields from an object to determine the
18173 position of other fields. It is supposed to appear only as the first
18174 operand of COMPONENT_REF nodes and to reference precisely the type
18175 that the context allows. */
18176 if (context != NULL
18177 && TREE_TYPE (loc) == context->context_type
18178 && want_address >= 1)
18179 {
18180 if (dwarf_version >= 3 || !dwarf_strict)
18181 {
18182 ret = new_loc_descr (DW_OP_push_object_address, 0, 0);
18183 have_address = 1;
18184 break;
18185 }
18186 else
18187 return NULL;
18188 }
18189 /* For DW_TAG_generic_subrange attributes, PLACEHOLDER_EXPR stands for
18190 the single argument passed by consumer. */
18191 else if (context != NULL
18192 && context->placeholder_arg
18193 && INTEGRAL_TYPE_P (TREE_TYPE (loc))
18194 && want_address == 0)
18195 {
18196 ret = new_loc_descr (DW_OP_pick, 0, 0);
18197 ret->frame_offset_rel = 1;
18198 context->placeholder_seen = true;
18199 break;
18200 }
18201 else
18202 expansion_failed (loc, NULL_RTX,
18203 "PLACEHOLDER_EXPR for an unexpected type");
18204 break;
18205
18206 case CALL_EXPR:
18207 {
18208 const int nargs = call_expr_nargs (loc);
18209 tree callee = get_callee_fndecl (loc);
18210 int i;
18211 dw_die_ref dwarf_proc;
18212
18213 if (callee == NULL_TREE)
18214 goto call_expansion_failed;
18215
18216 /* We handle only functions that return an integer. */
18217 if (!is_handled_procedure_type (TREE_TYPE (TREE_TYPE (callee))))
18218 goto call_expansion_failed;
18219
18220 dwarf_proc = function_to_dwarf_procedure (callee);
18221 if (dwarf_proc == NULL)
18222 goto call_expansion_failed;
18223
18224 /* Evaluate arguments right-to-left so that the first argument will
18225 be the top-most one on the stack. */
18226 for (i = nargs - 1; i >= 0; --i)
18227 {
18228 dw_loc_descr_ref loc_descr
18229 = loc_descriptor_from_tree (CALL_EXPR_ARG (loc, i), 0,
18230 context);
18231
18232 if (loc_descr == NULL)
18233 goto call_expansion_failed;
18234
18235 add_loc_descr (&ret, loc_descr);
18236 }
18237
18238 ret1 = new_loc_descr (DW_OP_call4, 0, 0);
18239 ret1->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18240 ret1->dw_loc_oprnd1.v.val_die_ref.die = dwarf_proc;
18241 ret1->dw_loc_oprnd1.v.val_die_ref.external = 0;
18242 add_loc_descr (&ret, ret1);
18243 break;
18244
18245 call_expansion_failed:
18246 expansion_failed (loc, NULL_RTX, "CALL_EXPR");
18247 /* There are no opcodes for these operations. */
18248 return 0;
18249 }
18250
18251 case PREINCREMENT_EXPR:
18252 case PREDECREMENT_EXPR:
18253 case POSTINCREMENT_EXPR:
18254 case POSTDECREMENT_EXPR:
18255 expansion_failed (loc, NULL_RTX, "PRE/POST INDCREMENT/DECREMENT");
18256 /* There are no opcodes for these operations. */
18257 return 0;
18258
18259 case ADDR_EXPR:
18260 /* If we already want an address, see if there is INDIRECT_REF inside
18261 e.g. for &this->field. */
18262 if (want_address)
18263 {
18264 list_ret = loc_list_for_address_of_addr_expr_of_indirect_ref
18265 (loc, want_address == 2, context);
18266 if (list_ret)
18267 have_address = 1;
18268 else if (decl_address_ip_invariant_p (TREE_OPERAND (loc, 0))
18269 && (ret = cst_pool_loc_descr (loc)))
18270 have_address = 1;
18271 }
18272 /* Otherwise, process the argument and look for the address. */
18273 if (!list_ret && !ret)
18274 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 1, context);
18275 else
18276 {
18277 if (want_address)
18278 expansion_failed (loc, NULL_RTX, "need address of ADDR_EXPR");
18279 return NULL;
18280 }
18281 break;
18282
18283 case VAR_DECL:
18284 if (DECL_THREAD_LOCAL_P (loc))
18285 {
18286 rtx rtl;
18287 enum dwarf_location_atom tls_op;
18288 enum dtprel_bool dtprel = dtprel_false;
18289
18290 if (targetm.have_tls)
18291 {
18292 /* If this is not defined, we have no way to emit the
18293 data. */
18294 if (!targetm.asm_out.output_dwarf_dtprel)
18295 return 0;
18296
18297 /* The way DW_OP_GNU_push_tls_address is specified, we
18298 can only look up addresses of objects in the current
18299 module. We used DW_OP_addr as first op, but that's
18300 wrong, because DW_OP_addr is relocated by the debug
18301 info consumer, while DW_OP_GNU_push_tls_address
18302 operand shouldn't be. */
18303 if (DECL_EXTERNAL (loc) && !targetm.binds_local_p (loc))
18304 return 0;
18305 dtprel = dtprel_true;
18306 /* We check for DWARF 5 here because gdb did not implement
18307 DW_OP_form_tls_address until after 7.12. */
18308 tls_op = (dwarf_version >= 5 ? DW_OP_form_tls_address
18309 : DW_OP_GNU_push_tls_address);
18310 }
18311 else
18312 {
18313 if (!targetm.emutls.debug_form_tls_address
18314 || !(dwarf_version >= 3 || !dwarf_strict))
18315 return 0;
18316 /* We stuffed the control variable into the DECL_VALUE_EXPR
18317 to signal (via DECL_HAS_VALUE_EXPR_P) that the decl should
18318 no longer appear in gimple code. We used the control
18319 variable in specific so that we could pick it up here. */
18320 loc = DECL_VALUE_EXPR (loc);
18321 tls_op = DW_OP_form_tls_address;
18322 }
18323
18324 rtl = rtl_for_decl_location (loc);
18325 if (rtl == NULL_RTX)
18326 return 0;
18327
18328 if (!MEM_P (rtl))
18329 return 0;
18330 rtl = XEXP (rtl, 0);
18331 if (! CONSTANT_P (rtl))
18332 return 0;
18333
18334 ret = new_addr_loc_descr (rtl, dtprel);
18335 ret1 = new_loc_descr (tls_op, 0, 0);
18336 add_loc_descr (&ret, ret1);
18337
18338 have_address = 1;
18339 break;
18340 }
18341 /* FALLTHRU */
18342
18343 case PARM_DECL:
18344 if (context != NULL && context->dpi != NULL
18345 && DECL_CONTEXT (loc) == context->dpi->fndecl)
18346 {
18347 /* We are generating code for a DWARF procedure and we want to access
18348 one of its arguments: find the appropriate argument offset and let
18349 the resolve_args_picking pass compute the offset that complies
18350 with the stack frame size. */
18351 unsigned i = 0;
18352 tree cursor;
18353
18354 for (cursor = DECL_ARGUMENTS (context->dpi->fndecl);
18355 cursor != NULL_TREE && cursor != loc;
18356 cursor = TREE_CHAIN (cursor), ++i)
18357 ;
18358 /* If we are translating a DWARF procedure, all referenced parameters
18359 must belong to the current function. */
18360 gcc_assert (cursor != NULL_TREE);
18361
18362 ret = new_loc_descr (DW_OP_pick, i, 0);
18363 ret->frame_offset_rel = 1;
18364 break;
18365 }
18366 /* FALLTHRU */
18367
18368 case RESULT_DECL:
18369 if (DECL_HAS_VALUE_EXPR_P (loc))
18370 return loc_list_from_tree_1 (DECL_VALUE_EXPR (loc),
18371 want_address, context);
18372 /* FALLTHRU */
18373
18374 case FUNCTION_DECL:
18375 {
18376 rtx rtl;
18377 var_loc_list *loc_list = lookup_decl_loc (loc);
18378
18379 if (loc_list && loc_list->first)
18380 {
18381 list_ret = dw_loc_list (loc_list, loc, want_address);
18382 have_address = want_address != 0;
18383 break;
18384 }
18385 rtl = rtl_for_decl_location (loc);
18386 if (rtl == NULL_RTX)
18387 {
18388 if (TREE_CODE (loc) != FUNCTION_DECL
18389 && early_dwarf
18390 && current_function_decl
18391 && want_address != 1
18392 && ! DECL_IGNORED_P (loc)
18393 && (INTEGRAL_TYPE_P (TREE_TYPE (loc))
18394 || POINTER_TYPE_P (TREE_TYPE (loc)))
18395 && DECL_CONTEXT (loc) == current_function_decl
18396 && (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (TREE_TYPE (loc)))
18397 <= DWARF2_ADDR_SIZE))
18398 {
18399 dw_die_ref ref = lookup_decl_die (loc);
18400 ret = new_loc_descr (DW_OP_GNU_variable_value, 0, 0);
18401 if (ref)
18402 {
18403 ret->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
18404 ret->dw_loc_oprnd1.v.val_die_ref.die = ref;
18405 ret->dw_loc_oprnd1.v.val_die_ref.external = 0;
18406 }
18407 else
18408 {
18409 ret->dw_loc_oprnd1.val_class = dw_val_class_decl_ref;
18410 ret->dw_loc_oprnd1.v.val_decl_ref = loc;
18411 }
18412 break;
18413 }
18414 expansion_failed (loc, NULL_RTX, "DECL has no RTL");
18415 return 0;
18416 }
18417 else if (CONST_INT_P (rtl))
18418 {
18419 HOST_WIDE_INT val = INTVAL (rtl);
18420 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18421 val &= GET_MODE_MASK (DECL_MODE (loc));
18422 ret = int_loc_descriptor (val);
18423 }
18424 else if (GET_CODE (rtl) == CONST_STRING)
18425 {
18426 expansion_failed (loc, NULL_RTX, "CONST_STRING");
18427 return 0;
18428 }
18429 else if (CONSTANT_P (rtl) && const_ok_for_output (rtl))
18430 ret = new_addr_loc_descr (rtl, dtprel_false);
18431 else
18432 {
18433 machine_mode mode, mem_mode;
18434
18435 /* Certain constructs can only be represented at top-level. */
18436 if (want_address == 2)
18437 {
18438 ret = loc_descriptor (rtl, VOIDmode,
18439 VAR_INIT_STATUS_INITIALIZED);
18440 have_address = 1;
18441 }
18442 else
18443 {
18444 mode = GET_MODE (rtl);
18445 mem_mode = VOIDmode;
18446 if (MEM_P (rtl))
18447 {
18448 mem_mode = mode;
18449 mode = get_address_mode (rtl);
18450 rtl = XEXP (rtl, 0);
18451 have_address = 1;
18452 }
18453 ret = mem_loc_descriptor (rtl, mode, mem_mode,
18454 VAR_INIT_STATUS_INITIALIZED);
18455 }
18456 if (!ret)
18457 expansion_failed (loc, rtl,
18458 "failed to produce loc descriptor for rtl");
18459 }
18460 }
18461 break;
18462
18463 case MEM_REF:
18464 if (!integer_zerop (TREE_OPERAND (loc, 1)))
18465 {
18466 have_address = 1;
18467 goto do_plus;
18468 }
18469 /* Fallthru. */
18470 case INDIRECT_REF:
18471 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18472 have_address = 1;
18473 break;
18474
18475 case TARGET_MEM_REF:
18476 case SSA_NAME:
18477 case DEBUG_EXPR_DECL:
18478 return NULL;
18479
18480 case COMPOUND_EXPR:
18481 return loc_list_from_tree_1 (TREE_OPERAND (loc, 1), want_address,
18482 context);
18483
18484 CASE_CONVERT:
18485 case VIEW_CONVERT_EXPR:
18486 case SAVE_EXPR:
18487 case MODIFY_EXPR:
18488 case NON_LVALUE_EXPR:
18489 return loc_list_from_tree_1 (TREE_OPERAND (loc, 0), want_address,
18490 context);
18491
18492 case COMPONENT_REF:
18493 case BIT_FIELD_REF:
18494 case ARRAY_REF:
18495 case ARRAY_RANGE_REF:
18496 case REALPART_EXPR:
18497 case IMAGPART_EXPR:
18498 {
18499 tree obj, offset;
18500 poly_int64 bitsize, bitpos, bytepos;
18501 machine_mode mode;
18502 int unsignedp, reversep, volatilep = 0;
18503
18504 obj = get_inner_reference (loc, &bitsize, &bitpos, &offset, &mode,
18505 &unsignedp, &reversep, &volatilep);
18506
18507 gcc_assert (obj != loc);
18508
18509 list_ret = loc_list_from_tree_1 (obj,
18510 want_address == 2
18511 && known_eq (bitpos, 0)
18512 && !offset ? 2 : 1,
18513 context);
18514 /* TODO: We can extract value of the small expression via shifting even
18515 for nonzero bitpos. */
18516 if (list_ret == 0)
18517 return 0;
18518 if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos)
18519 || !multiple_p (bitsize, BITS_PER_UNIT))
18520 {
18521 expansion_failed (loc, NULL_RTX,
18522 "bitfield access");
18523 return 0;
18524 }
18525
18526 if (offset != NULL_TREE)
18527 {
18528 /* Variable offset. */
18529 list_ret1 = loc_list_from_tree_1 (offset, 0, context);
18530 if (list_ret1 == 0)
18531 return 0;
18532 add_loc_list (&list_ret, list_ret1);
18533 if (!list_ret)
18534 return 0;
18535 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus, 0, 0));
18536 }
18537
18538 HOST_WIDE_INT value;
18539 if (bytepos.is_constant (&value) && value > 0)
18540 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_plus_uconst,
18541 value, 0));
18542 else if (maybe_ne (bytepos, 0))
18543 loc_list_plus_const (list_ret, bytepos);
18544
18545 have_address = 1;
18546 break;
18547 }
18548
18549 case INTEGER_CST:
18550 if ((want_address || !tree_fits_shwi_p (loc))
18551 && (ret = cst_pool_loc_descr (loc)))
18552 have_address = 1;
18553 else if (want_address == 2
18554 && tree_fits_shwi_p (loc)
18555 && (ret = address_of_int_loc_descriptor
18556 (int_size_in_bytes (TREE_TYPE (loc)),
18557 tree_to_shwi (loc))))
18558 have_address = 1;
18559 else if (tree_fits_shwi_p (loc))
18560 ret = int_loc_descriptor (tree_to_shwi (loc));
18561 else if (tree_fits_uhwi_p (loc))
18562 ret = uint_loc_descriptor (tree_to_uhwi (loc));
18563 else
18564 {
18565 expansion_failed (loc, NULL_RTX,
18566 "Integer operand is not host integer");
18567 return 0;
18568 }
18569 break;
18570
18571 case POLY_INT_CST:
18572 {
18573 if (want_address)
18574 {
18575 expansion_failed (loc, NULL_RTX,
18576 "constant address with a runtime component");
18577 return 0;
18578 }
18579 poly_int64 value;
18580 if (!poly_int_tree_p (loc, &value))
18581 {
18582 expansion_failed (loc, NULL_RTX, "constant too big");
18583 return 0;
18584 }
18585 ret = int_loc_descriptor (value);
18586 }
18587 break;
18588
18589 case CONSTRUCTOR:
18590 case REAL_CST:
18591 case STRING_CST:
18592 case COMPLEX_CST:
18593 if ((ret = cst_pool_loc_descr (loc)))
18594 have_address = 1;
18595 else if (TREE_CODE (loc) == CONSTRUCTOR)
18596 {
18597 tree type = TREE_TYPE (loc);
18598 unsigned HOST_WIDE_INT size = int_size_in_bytes (type);
18599 unsigned HOST_WIDE_INT offset = 0;
18600 unsigned HOST_WIDE_INT cnt;
18601 constructor_elt *ce;
18602
18603 if (TREE_CODE (type) == RECORD_TYPE)
18604 {
18605 /* This is very limited, but it's enough to output
18606 pointers to member functions, as long as the
18607 referenced function is defined in the current
18608 translation unit. */
18609 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (loc), cnt, ce)
18610 {
18611 tree val = ce->value;
18612
18613 tree field = ce->index;
18614
18615 if (val)
18616 STRIP_NOPS (val);
18617
18618 if (!field || DECL_BIT_FIELD (field))
18619 {
18620 expansion_failed (loc, NULL_RTX,
18621 "bitfield in record type constructor");
18622 size = offset = (unsigned HOST_WIDE_INT)-1;
18623 ret = NULL;
18624 break;
18625 }
18626
18627 HOST_WIDE_INT fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
18628 unsigned HOST_WIDE_INT pos = int_byte_position (field);
18629 gcc_assert (pos + fieldsize <= size);
18630 if (pos < offset)
18631 {
18632 expansion_failed (loc, NULL_RTX,
18633 "out-of-order fields in record constructor");
18634 size = offset = (unsigned HOST_WIDE_INT)-1;
18635 ret = NULL;
18636 break;
18637 }
18638 if (pos > offset)
18639 {
18640 ret1 = new_loc_descr (DW_OP_piece, pos - offset, 0);
18641 add_loc_descr (&ret, ret1);
18642 offset = pos;
18643 }
18644 if (val && fieldsize != 0)
18645 {
18646 ret1 = loc_descriptor_from_tree (val, want_address, context);
18647 if (!ret1)
18648 {
18649 expansion_failed (loc, NULL_RTX,
18650 "unsupported expression in field");
18651 size = offset = (unsigned HOST_WIDE_INT)-1;
18652 ret = NULL;
18653 break;
18654 }
18655 add_loc_descr (&ret, ret1);
18656 }
18657 if (fieldsize)
18658 {
18659 ret1 = new_loc_descr (DW_OP_piece, fieldsize, 0);
18660 add_loc_descr (&ret, ret1);
18661 offset = pos + fieldsize;
18662 }
18663 }
18664
18665 if (offset != size)
18666 {
18667 ret1 = new_loc_descr (DW_OP_piece, size - offset, 0);
18668 add_loc_descr (&ret, ret1);
18669 offset = size;
18670 }
18671
18672 have_address = !!want_address;
18673 }
18674 else
18675 expansion_failed (loc, NULL_RTX,
18676 "constructor of non-record type");
18677 }
18678 else
18679 /* We can construct small constants here using int_loc_descriptor. */
18680 expansion_failed (loc, NULL_RTX,
18681 "constructor or constant not in constant pool");
18682 break;
18683
18684 case TRUTH_AND_EXPR:
18685 case TRUTH_ANDIF_EXPR:
18686 case BIT_AND_EXPR:
18687 op = DW_OP_and;
18688 goto do_binop;
18689
18690 case TRUTH_XOR_EXPR:
18691 case BIT_XOR_EXPR:
18692 op = DW_OP_xor;
18693 goto do_binop;
18694
18695 case TRUTH_OR_EXPR:
18696 case TRUTH_ORIF_EXPR:
18697 case BIT_IOR_EXPR:
18698 op = DW_OP_or;
18699 goto do_binop;
18700
18701 case FLOOR_DIV_EXPR:
18702 case CEIL_DIV_EXPR:
18703 case ROUND_DIV_EXPR:
18704 case TRUNC_DIV_EXPR:
18705 case EXACT_DIV_EXPR:
18706 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18707 return 0;
18708 op = DW_OP_div;
18709 goto do_binop;
18710
18711 case MINUS_EXPR:
18712 op = DW_OP_minus;
18713 goto do_binop;
18714
18715 case FLOOR_MOD_EXPR:
18716 case CEIL_MOD_EXPR:
18717 case ROUND_MOD_EXPR:
18718 case TRUNC_MOD_EXPR:
18719 if (TYPE_UNSIGNED (TREE_TYPE (loc)))
18720 {
18721 op = DW_OP_mod;
18722 goto do_binop;
18723 }
18724 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18725 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18726 if (list_ret == 0 || list_ret1 == 0)
18727 return 0;
18728
18729 add_loc_list (&list_ret, list_ret1);
18730 if (list_ret == 0)
18731 return 0;
18732 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18733 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_over, 0, 0));
18734 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_div, 0, 0));
18735 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_mul, 0, 0));
18736 add_loc_descr_to_each (list_ret, new_loc_descr (DW_OP_minus, 0, 0));
18737 break;
18738
18739 case MULT_EXPR:
18740 op = DW_OP_mul;
18741 goto do_binop;
18742
18743 case LSHIFT_EXPR:
18744 op = DW_OP_shl;
18745 goto do_binop;
18746
18747 case RSHIFT_EXPR:
18748 op = (TYPE_UNSIGNED (TREE_TYPE (loc)) ? DW_OP_shr : DW_OP_shra);
18749 goto do_binop;
18750
18751 case POINTER_PLUS_EXPR:
18752 case PLUS_EXPR:
18753 do_plus:
18754 if (tree_fits_shwi_p (TREE_OPERAND (loc, 1)))
18755 {
18756 /* Big unsigned numbers can fit in HOST_WIDE_INT but it may be
18757 smarter to encode their opposite. The DW_OP_plus_uconst operation
18758 takes 1 + X bytes, X being the size of the ULEB128 addend. On the
18759 other hand, a "<push literal>; DW_OP_minus" pattern takes 1 + Y
18760 bytes, Y being the size of the operation that pushes the opposite
18761 of the addend. So let's choose the smallest representation. */
18762 const tree tree_addend = TREE_OPERAND (loc, 1);
18763 offset_int wi_addend;
18764 HOST_WIDE_INT shwi_addend;
18765 dw_loc_descr_ref loc_naddend;
18766
18767 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18768 if (list_ret == 0)
18769 return 0;
18770
18771 /* Try to get the literal to push. It is the opposite of the addend,
18772 so as we rely on wrapping during DWARF evaluation, first decode
18773 the literal as a "DWARF-sized" signed number. */
18774 wi_addend = wi::to_offset (tree_addend);
18775 wi_addend = wi::sext (wi_addend, DWARF2_ADDR_SIZE * 8);
18776 shwi_addend = wi_addend.to_shwi ();
18777 loc_naddend = (shwi_addend != INTTYPE_MINIMUM (HOST_WIDE_INT))
18778 ? int_loc_descriptor (-shwi_addend)
18779 : NULL;
18780
18781 if (loc_naddend != NULL
18782 && ((unsigned) size_of_uleb128 (shwi_addend)
18783 > size_of_loc_descr (loc_naddend)))
18784 {
18785 add_loc_descr_to_each (list_ret, loc_naddend);
18786 add_loc_descr_to_each (list_ret,
18787 new_loc_descr (DW_OP_minus, 0, 0));
18788 }
18789 else
18790 {
18791 for (dw_loc_descr_ref loc_cur = loc_naddend; loc_cur != NULL; )
18792 {
18793 loc_naddend = loc_cur;
18794 loc_cur = loc_cur->dw_loc_next;
18795 ggc_free (loc_naddend);
18796 }
18797 loc_list_plus_const (list_ret, wi_addend.to_shwi ());
18798 }
18799 break;
18800 }
18801
18802 op = DW_OP_plus;
18803 goto do_binop;
18804
18805 case LE_EXPR:
18806 op = DW_OP_le;
18807 goto do_comp_binop;
18808
18809 case GE_EXPR:
18810 op = DW_OP_ge;
18811 goto do_comp_binop;
18812
18813 case LT_EXPR:
18814 op = DW_OP_lt;
18815 goto do_comp_binop;
18816
18817 case GT_EXPR:
18818 op = DW_OP_gt;
18819 goto do_comp_binop;
18820
18821 do_comp_binop:
18822 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (loc, 0))))
18823 {
18824 list_ret = loc_list_from_tree (TREE_OPERAND (loc, 0), 0, context);
18825 list_ret1 = loc_list_from_tree (TREE_OPERAND (loc, 1), 0, context);
18826 list_ret = loc_list_from_uint_comparison (list_ret, list_ret1,
18827 TREE_CODE (loc));
18828 break;
18829 }
18830 else
18831 goto do_binop;
18832
18833 case EQ_EXPR:
18834 op = DW_OP_eq;
18835 goto do_binop;
18836
18837 case NE_EXPR:
18838 op = DW_OP_ne;
18839 goto do_binop;
18840
18841 do_binop:
18842 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18843 list_ret1 = loc_list_from_tree_1 (TREE_OPERAND (loc, 1), 0, context);
18844 if (list_ret == 0 || list_ret1 == 0)
18845 return 0;
18846
18847 add_loc_list (&list_ret, list_ret1);
18848 if (list_ret == 0)
18849 return 0;
18850 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18851 break;
18852
18853 case TRUTH_NOT_EXPR:
18854 case BIT_NOT_EXPR:
18855 op = DW_OP_not;
18856 goto do_unop;
18857
18858 case ABS_EXPR:
18859 op = DW_OP_abs;
18860 goto do_unop;
18861
18862 case NEGATE_EXPR:
18863 op = DW_OP_neg;
18864 goto do_unop;
18865
18866 do_unop:
18867 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18868 if (list_ret == 0)
18869 return 0;
18870
18871 add_loc_descr_to_each (list_ret, new_loc_descr (op, 0, 0));
18872 break;
18873
18874 case MIN_EXPR:
18875 case MAX_EXPR:
18876 {
18877 const enum tree_code code =
18878 TREE_CODE (loc) == MIN_EXPR ? GT_EXPR : LT_EXPR;
18879
18880 loc = build3 (COND_EXPR, TREE_TYPE (loc),
18881 build2 (code, integer_type_node,
18882 TREE_OPERAND (loc, 0), TREE_OPERAND (loc, 1)),
18883 TREE_OPERAND (loc, 1), TREE_OPERAND (loc, 0));
18884 }
18885
18886 /* fall through */
18887
18888 case COND_EXPR:
18889 {
18890 dw_loc_descr_ref lhs
18891 = loc_descriptor_from_tree (TREE_OPERAND (loc, 1), 0, context);
18892 dw_loc_list_ref rhs
18893 = loc_list_from_tree_1 (TREE_OPERAND (loc, 2), 0, context);
18894 dw_loc_descr_ref bra_node, jump_node, tmp;
18895
18896 list_ret = loc_list_from_tree_1 (TREE_OPERAND (loc, 0), 0, context);
18897 if (list_ret == 0 || lhs == 0 || rhs == 0)
18898 return 0;
18899
18900 bra_node = new_loc_descr (DW_OP_bra, 0, 0);
18901 add_loc_descr_to_each (list_ret, bra_node);
18902
18903 add_loc_list (&list_ret, rhs);
18904 jump_node = new_loc_descr (DW_OP_skip, 0, 0);
18905 add_loc_descr_to_each (list_ret, jump_node);
18906
18907 add_loc_descr_to_each (list_ret, lhs);
18908 bra_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18909 bra_node->dw_loc_oprnd1.v.val_loc = lhs;
18910
18911 /* ??? Need a node to point the skip at. Use a nop. */
18912 tmp = new_loc_descr (DW_OP_nop, 0, 0);
18913 add_loc_descr_to_each (list_ret, tmp);
18914 jump_node->dw_loc_oprnd1.val_class = dw_val_class_loc;
18915 jump_node->dw_loc_oprnd1.v.val_loc = tmp;
18916 }
18917 break;
18918
18919 case FIX_TRUNC_EXPR:
18920 return 0;
18921
18922 default:
18923 /* Leave front-end specific codes as simply unknown. This comes
18924 up, for instance, with the C STMT_EXPR. */
18925 if ((unsigned int) TREE_CODE (loc)
18926 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE)
18927 {
18928 expansion_failed (loc, NULL_RTX,
18929 "language specific tree node");
18930 return 0;
18931 }
18932
18933 /* Otherwise this is a generic code; we should just lists all of
18934 these explicitly. We forgot one. */
18935 if (flag_checking)
18936 gcc_unreachable ();
18937
18938 /* In a release build, we want to degrade gracefully: better to
18939 generate incomplete debugging information than to crash. */
18940 return NULL;
18941 }
18942
18943 if (!ret && !list_ret)
18944 return 0;
18945
18946 if (want_address == 2 && !have_address
18947 && (dwarf_version >= 4 || !dwarf_strict))
18948 {
18949 if (int_size_in_bytes (TREE_TYPE (loc)) > DWARF2_ADDR_SIZE)
18950 {
18951 expansion_failed (loc, NULL_RTX,
18952 "DWARF address size mismatch");
18953 return 0;
18954 }
18955 if (ret)
18956 add_loc_descr (&ret, new_loc_descr (DW_OP_stack_value, 0, 0));
18957 else
18958 add_loc_descr_to_each (list_ret,
18959 new_loc_descr (DW_OP_stack_value, 0, 0));
18960 have_address = 1;
18961 }
18962 /* Show if we can't fill the request for an address. */
18963 if (want_address && !have_address)
18964 {
18965 expansion_failed (loc, NULL_RTX,
18966 "Want address and only have value");
18967 return 0;
18968 }
18969
18970 gcc_assert (!ret || !list_ret);
18971
18972 /* If we've got an address and don't want one, dereference. */
18973 if (!want_address && have_address)
18974 {
18975 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (loc));
18976
18977 if (size > DWARF2_ADDR_SIZE || size == -1)
18978 {
18979 expansion_failed (loc, NULL_RTX,
18980 "DWARF address size mismatch");
18981 return 0;
18982 }
18983 else if (size == DWARF2_ADDR_SIZE)
18984 op = DW_OP_deref;
18985 else
18986 op = DW_OP_deref_size;
18987
18988 if (ret)
18989 add_loc_descr (&ret, new_loc_descr (op, size, 0));
18990 else
18991 add_loc_descr_to_each (list_ret, new_loc_descr (op, size, 0));
18992 }
18993 if (ret)
18994 list_ret = new_loc_list (ret, NULL, 0, NULL, 0, NULL);
18995
18996 return list_ret;
18997 }
18998
18999 /* Likewise, but strip useless DW_OP_nop operations in the resulting
19000 expressions. */
19001
19002 static dw_loc_list_ref
19003 loc_list_from_tree (tree loc, int want_address,
19004 struct loc_descr_context *context)
19005 {
19006 dw_loc_list_ref result = loc_list_from_tree_1 (loc, want_address, context);
19007
19008 for (dw_loc_list_ref loc_cur = result;
19009 loc_cur != NULL; loc_cur = loc_cur->dw_loc_next)
19010 loc_descr_without_nops (loc_cur->expr);
19011 return result;
19012 }
19013
19014 /* Same as above but return only single location expression. */
19015 static dw_loc_descr_ref
19016 loc_descriptor_from_tree (tree loc, int want_address,
19017 struct loc_descr_context *context)
19018 {
19019 dw_loc_list_ref ret = loc_list_from_tree (loc, want_address, context);
19020 if (!ret)
19021 return NULL;
19022 if (ret->dw_loc_next)
19023 {
19024 expansion_failed (loc, NULL_RTX,
19025 "Location list where only loc descriptor needed");
19026 return NULL;
19027 }
19028 return ret->expr;
19029 }
19030
19031 /* Given a pointer to what is assumed to be a FIELD_DECL node, return a
19032 pointer to the declared type for the relevant field variable, or return
19033 `integer_type_node' if the given node turns out to be an
19034 ERROR_MARK node. */
19035
19036 static inline tree
19037 field_type (const_tree decl)
19038 {
19039 tree type;
19040
19041 if (TREE_CODE (decl) == ERROR_MARK)
19042 return integer_type_node;
19043
19044 type = DECL_BIT_FIELD_TYPE (decl);
19045 if (type == NULL_TREE)
19046 type = TREE_TYPE (decl);
19047
19048 return type;
19049 }
19050
19051 /* Given a pointer to a tree node, return the alignment in bits for
19052 it, or else return BITS_PER_WORD if the node actually turns out to
19053 be an ERROR_MARK node. */
19054
19055 static inline unsigned
19056 simple_type_align_in_bits (const_tree type)
19057 {
19058 return (TREE_CODE (type) != ERROR_MARK) ? TYPE_ALIGN (type) : BITS_PER_WORD;
19059 }
19060
19061 static inline unsigned
19062 simple_decl_align_in_bits (const_tree decl)
19063 {
19064 return (TREE_CODE (decl) != ERROR_MARK) ? DECL_ALIGN (decl) : BITS_PER_WORD;
19065 }
19066
19067 /* Return the result of rounding T up to ALIGN. */
19068
19069 static inline offset_int
19070 round_up_to_align (const offset_int &t, unsigned int align)
19071 {
19072 return wi::udiv_trunc (t + align - 1, align) * align;
19073 }
19074
19075 /* Compute the size of TYPE in bytes. If possible, return NULL and store the
19076 size as an integer constant in CST_SIZE. Otherwise, if possible, return a
19077 DWARF expression that computes the size. Return NULL and set CST_SIZE to -1
19078 if we fail to return the size in one of these two forms. */
19079
19080 static dw_loc_descr_ref
19081 type_byte_size (const_tree type, HOST_WIDE_INT *cst_size)
19082 {
19083 tree tree_size;
19084 struct loc_descr_context ctx;
19085
19086 /* Return a constant integer in priority, if possible. */
19087 *cst_size = int_size_in_bytes (type);
19088 if (*cst_size != -1)
19089 return NULL;
19090
19091 ctx.context_type = const_cast<tree> (type);
19092 ctx.base_decl = NULL_TREE;
19093 ctx.dpi = NULL;
19094 ctx.placeholder_arg = false;
19095 ctx.placeholder_seen = false;
19096
19097 type = TYPE_MAIN_VARIANT (type);
19098 tree_size = TYPE_SIZE_UNIT (type);
19099 return ((tree_size != NULL_TREE)
19100 ? loc_descriptor_from_tree (tree_size, 0, &ctx)
19101 : NULL);
19102 }
19103
19104 /* Helper structure for RECORD_TYPE processing. */
19105 struct vlr_context
19106 {
19107 /* Root RECORD_TYPE. It is needed to generate data member location
19108 descriptions in variable-length records (VLR), but also to cope with
19109 variants, which are composed of nested structures multiplexed with
19110 QUAL_UNION_TYPE nodes. Each time such a structure is passed to a
19111 function processing a FIELD_DECL, it is required to be non null. */
19112 tree struct_type;
19113 /* When generating a variant part in a RECORD_TYPE (i.e. a nested
19114 QUAL_UNION_TYPE), this holds an expression that computes the offset for
19115 this variant part as part of the root record (in storage units). For
19116 regular records, it must be NULL_TREE. */
19117 tree variant_part_offset;
19118 };
19119
19120 /* Given a pointer to a FIELD_DECL, compute the byte offset of the lowest
19121 addressed byte of the "containing object" for the given FIELD_DECL. If
19122 possible, return a native constant through CST_OFFSET (in which case NULL is
19123 returned); otherwise return a DWARF expression that computes the offset.
19124
19125 Set *CST_OFFSET to 0 and return NULL if we are unable to determine what
19126 that offset is, either because the argument turns out to be a pointer to an
19127 ERROR_MARK node, or because the offset expression is too complex for us.
19128
19129 CTX is required: see the comment for VLR_CONTEXT. */
19130
19131 static dw_loc_descr_ref
19132 field_byte_offset (const_tree decl, struct vlr_context *ctx,
19133 HOST_WIDE_INT *cst_offset)
19134 {
19135 tree tree_result;
19136 dw_loc_list_ref loc_result;
19137
19138 *cst_offset = 0;
19139
19140 if (TREE_CODE (decl) == ERROR_MARK)
19141 return NULL;
19142 else
19143 gcc_assert (TREE_CODE (decl) == FIELD_DECL);
19144
19145 /* We cannot handle variable bit offsets at the moment, so abort if it's the
19146 case. */
19147 if (TREE_CODE (DECL_FIELD_BIT_OFFSET (decl)) != INTEGER_CST)
19148 return NULL;
19149
19150 /* We used to handle only constant offsets in all cases. Now, we handle
19151 properly dynamic byte offsets only when PCC bitfield type doesn't
19152 matter. */
19153 if (PCC_BITFIELD_TYPE_MATTERS
19154 && TREE_CODE (DECL_FIELD_OFFSET (decl)) == INTEGER_CST)
19155 {
19156 offset_int object_offset_in_bits;
19157 offset_int object_offset_in_bytes;
19158 offset_int bitpos_int;
19159 tree type;
19160 tree field_size_tree;
19161 offset_int deepest_bitpos;
19162 offset_int field_size_in_bits;
19163 unsigned int type_align_in_bits;
19164 unsigned int decl_align_in_bits;
19165 offset_int type_size_in_bits;
19166
19167 bitpos_int = wi::to_offset (bit_position (decl));
19168 type = field_type (decl);
19169 type_size_in_bits = offset_int_type_size_in_bits (type);
19170 type_align_in_bits = simple_type_align_in_bits (type);
19171
19172 field_size_tree = DECL_SIZE (decl);
19173
19174 /* The size could be unspecified if there was an error, or for
19175 a flexible array member. */
19176 if (!field_size_tree)
19177 field_size_tree = bitsize_zero_node;
19178
19179 /* If the size of the field is not constant, use the type size. */
19180 if (TREE_CODE (field_size_tree) == INTEGER_CST)
19181 field_size_in_bits = wi::to_offset (field_size_tree);
19182 else
19183 field_size_in_bits = type_size_in_bits;
19184
19185 decl_align_in_bits = simple_decl_align_in_bits (decl);
19186
19187 /* The GCC front-end doesn't make any attempt to keep track of the
19188 starting bit offset (relative to the start of the containing
19189 structure type) of the hypothetical "containing object" for a
19190 bit-field. Thus, when computing the byte offset value for the
19191 start of the "containing object" of a bit-field, we must deduce
19192 this information on our own. This can be rather tricky to do in
19193 some cases. For example, handling the following structure type
19194 definition when compiling for an i386/i486 target (which only
19195 aligns long long's to 32-bit boundaries) can be very tricky:
19196
19197 struct S { int field1; long long field2:31; };
19198
19199 Fortunately, there is a simple rule-of-thumb which can be used
19200 in such cases. When compiling for an i386/i486, GCC will
19201 allocate 8 bytes for the structure shown above. It decides to
19202 do this based upon one simple rule for bit-field allocation.
19203 GCC allocates each "containing object" for each bit-field at
19204 the first (i.e. lowest addressed) legitimate alignment boundary
19205 (based upon the required minimum alignment for the declared
19206 type of the field) which it can possibly use, subject to the
19207 condition that there is still enough available space remaining
19208 in the containing object (when allocated at the selected point)
19209 to fully accommodate all of the bits of the bit-field itself.
19210
19211 This simple rule makes it obvious why GCC allocates 8 bytes for
19212 each object of the structure type shown above. When looking
19213 for a place to allocate the "containing object" for `field2',
19214 the compiler simply tries to allocate a 64-bit "containing
19215 object" at each successive 32-bit boundary (starting at zero)
19216 until it finds a place to allocate that 64- bit field such that
19217 at least 31 contiguous (and previously unallocated) bits remain
19218 within that selected 64 bit field. (As it turns out, for the
19219 example above, the compiler finds it is OK to allocate the
19220 "containing object" 64-bit field at bit-offset zero within the
19221 structure type.)
19222
19223 Here we attempt to work backwards from the limited set of facts
19224 we're given, and we try to deduce from those facts, where GCC
19225 must have believed that the containing object started (within
19226 the structure type). The value we deduce is then used (by the
19227 callers of this routine) to generate DW_AT_location and
19228 DW_AT_bit_offset attributes for fields (both bit-fields and, in
19229 the case of DW_AT_location, regular fields as well). */
19230
19231 /* Figure out the bit-distance from the start of the structure to
19232 the "deepest" bit of the bit-field. */
19233 deepest_bitpos = bitpos_int + field_size_in_bits;
19234
19235 /* This is the tricky part. Use some fancy footwork to deduce
19236 where the lowest addressed bit of the containing object must
19237 be. */
19238 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19239
19240 /* Round up to type_align by default. This works best for
19241 bitfields. */
19242 object_offset_in_bits
19243 = round_up_to_align (object_offset_in_bits, type_align_in_bits);
19244
19245 if (wi::gtu_p (object_offset_in_bits, bitpos_int))
19246 {
19247 object_offset_in_bits = deepest_bitpos - type_size_in_bits;
19248
19249 /* Round up to decl_align instead. */
19250 object_offset_in_bits
19251 = round_up_to_align (object_offset_in_bits, decl_align_in_bits);
19252 }
19253
19254 object_offset_in_bytes
19255 = wi::lrshift (object_offset_in_bits, LOG2_BITS_PER_UNIT);
19256 if (ctx->variant_part_offset == NULL_TREE)
19257 {
19258 *cst_offset = object_offset_in_bytes.to_shwi ();
19259 return NULL;
19260 }
19261 tree_result = wide_int_to_tree (sizetype, object_offset_in_bytes);
19262 }
19263 else
19264 tree_result = byte_position (decl);
19265
19266 if (ctx->variant_part_offset != NULL_TREE)
19267 tree_result = fold_build2 (PLUS_EXPR, TREE_TYPE (tree_result),
19268 ctx->variant_part_offset, tree_result);
19269
19270 /* If the byte offset is a constant, it's simplier to handle a native
19271 constant rather than a DWARF expression. */
19272 if (TREE_CODE (tree_result) == INTEGER_CST)
19273 {
19274 *cst_offset = wi::to_offset (tree_result).to_shwi ();
19275 return NULL;
19276 }
19277 struct loc_descr_context loc_ctx = {
19278 ctx->struct_type, /* context_type */
19279 NULL_TREE, /* base_decl */
19280 NULL, /* dpi */
19281 false, /* placeholder_arg */
19282 false /* placeholder_seen */
19283 };
19284 loc_result = loc_list_from_tree (tree_result, 0, &loc_ctx);
19285
19286 /* We want a DWARF expression: abort if we only have a location list with
19287 multiple elements. */
19288 if (!loc_result || !single_element_loc_list_p (loc_result))
19289 return NULL;
19290 else
19291 return loc_result->expr;
19292 }
19293 \f
19294 /* The following routines define various Dwarf attributes and any data
19295 associated with them. */
19296
19297 /* Add a location description attribute value to a DIE.
19298
19299 This emits location attributes suitable for whole variables and
19300 whole parameters. Note that the location attributes for struct fields are
19301 generated by the routine `data_member_location_attribute' below. */
19302
19303 static inline void
19304 add_AT_location_description (dw_die_ref die, enum dwarf_attribute attr_kind,
19305 dw_loc_list_ref descr)
19306 {
19307 bool check_no_locviews = true;
19308 if (descr == 0)
19309 return;
19310 if (single_element_loc_list_p (descr))
19311 add_AT_loc (die, attr_kind, descr->expr);
19312 else
19313 {
19314 add_AT_loc_list (die, attr_kind, descr);
19315 gcc_assert (descr->ll_symbol);
19316 if (attr_kind == DW_AT_location && descr->vl_symbol
19317 && dwarf2out_locviews_in_attribute ())
19318 {
19319 add_AT_view_list (die, DW_AT_GNU_locviews);
19320 check_no_locviews = false;
19321 }
19322 }
19323
19324 if (check_no_locviews)
19325 gcc_assert (!get_AT (die, DW_AT_GNU_locviews));
19326 }
19327
19328 /* Add DW_AT_accessibility attribute to DIE if needed. */
19329
19330 static void
19331 add_accessibility_attribute (dw_die_ref die, tree decl)
19332 {
19333 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
19334 children, otherwise the default is DW_ACCESS_public. In DWARF2
19335 the default has always been DW_ACCESS_public. */
19336 if (TREE_PROTECTED (decl))
19337 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
19338 else if (TREE_PRIVATE (decl))
19339 {
19340 if (dwarf_version == 2
19341 || die->die_parent == NULL
19342 || die->die_parent->die_tag != DW_TAG_class_type)
19343 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
19344 }
19345 else if (dwarf_version > 2
19346 && die->die_parent
19347 && die->die_parent->die_tag == DW_TAG_class_type)
19348 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
19349 }
19350
19351 /* Attach the specialized form of location attribute used for data members of
19352 struct and union types. In the special case of a FIELD_DECL node which
19353 represents a bit-field, the "offset" part of this special location
19354 descriptor must indicate the distance in bytes from the lowest-addressed
19355 byte of the containing struct or union type to the lowest-addressed byte of
19356 the "containing object" for the bit-field. (See the `field_byte_offset'
19357 function above).
19358
19359 For any given bit-field, the "containing object" is a hypothetical object
19360 (of some integral or enum type) within which the given bit-field lives. The
19361 type of this hypothetical "containing object" is always the same as the
19362 declared type of the individual bit-field itself (for GCC anyway... the
19363 DWARF spec doesn't actually mandate this). Note that it is the size (in
19364 bytes) of the hypothetical "containing object" which will be given in the
19365 DW_AT_byte_size attribute for this bit-field. (See the
19366 `byte_size_attribute' function below.) It is also used when calculating the
19367 value of the DW_AT_bit_offset attribute. (See the `bit_offset_attribute'
19368 function below.)
19369
19370 CTX is required: see the comment for VLR_CONTEXT. */
19371
19372 static void
19373 add_data_member_location_attribute (dw_die_ref die,
19374 tree decl,
19375 struct vlr_context *ctx)
19376 {
19377 HOST_WIDE_INT offset;
19378 dw_loc_descr_ref loc_descr = 0;
19379
19380 if (TREE_CODE (decl) == TREE_BINFO)
19381 {
19382 /* We're working on the TAG_inheritance for a base class. */
19383 if (BINFO_VIRTUAL_P (decl) && is_cxx ())
19384 {
19385 /* For C++ virtual bases we can't just use BINFO_OFFSET, as they
19386 aren't at a fixed offset from all (sub)objects of the same
19387 type. We need to extract the appropriate offset from our
19388 vtable. The following dwarf expression means
19389
19390 BaseAddr = ObAddr + *((*ObAddr) - Offset)
19391
19392 This is specific to the V3 ABI, of course. */
19393
19394 dw_loc_descr_ref tmp;
19395
19396 /* Make a copy of the object address. */
19397 tmp = new_loc_descr (DW_OP_dup, 0, 0);
19398 add_loc_descr (&loc_descr, tmp);
19399
19400 /* Extract the vtable address. */
19401 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19402 add_loc_descr (&loc_descr, tmp);
19403
19404 /* Calculate the address of the offset. */
19405 offset = tree_to_shwi (BINFO_VPTR_FIELD (decl));
19406 gcc_assert (offset < 0);
19407
19408 tmp = int_loc_descriptor (-offset);
19409 add_loc_descr (&loc_descr, tmp);
19410 tmp = new_loc_descr (DW_OP_minus, 0, 0);
19411 add_loc_descr (&loc_descr, tmp);
19412
19413 /* Extract the offset. */
19414 tmp = new_loc_descr (DW_OP_deref, 0, 0);
19415 add_loc_descr (&loc_descr, tmp);
19416
19417 /* Add it to the object address. */
19418 tmp = new_loc_descr (DW_OP_plus, 0, 0);
19419 add_loc_descr (&loc_descr, tmp);
19420 }
19421 else
19422 offset = tree_to_shwi (BINFO_OFFSET (decl));
19423 }
19424 else
19425 {
19426 loc_descr = field_byte_offset (decl, ctx, &offset);
19427
19428 /* If loc_descr is available then we know the field offset is dynamic.
19429 However, GDB does not handle dynamic field offsets very well at the
19430 moment. */
19431 if (loc_descr != NULL && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL)
19432 {
19433 loc_descr = NULL;
19434 offset = 0;
19435 }
19436
19437 /* Data member location evalutation starts with the base address on the
19438 stack. Compute the field offset and add it to this base address. */
19439 else if (loc_descr != NULL)
19440 add_loc_descr (&loc_descr, new_loc_descr (DW_OP_plus, 0, 0));
19441 }
19442
19443 if (! loc_descr)
19444 {
19445 /* While DW_AT_data_bit_offset has been added already in DWARF4,
19446 e.g. GDB only added support to it in November 2016. For DWARF5
19447 we need newer debug info consumers anyway. We might change this
19448 to dwarf_version >= 4 once most consumers catched up. */
19449 if (dwarf_version >= 5
19450 && TREE_CODE (decl) == FIELD_DECL
19451 && DECL_BIT_FIELD_TYPE (decl))
19452 {
19453 tree off = bit_position (decl);
19454 if (tree_fits_uhwi_p (off) && get_AT (die, DW_AT_bit_size))
19455 {
19456 remove_AT (die, DW_AT_byte_size);
19457 remove_AT (die, DW_AT_bit_offset);
19458 add_AT_unsigned (die, DW_AT_data_bit_offset, tree_to_uhwi (off));
19459 return;
19460 }
19461 }
19462 if (dwarf_version > 2)
19463 {
19464 /* Don't need to output a location expression, just the constant. */
19465 if (offset < 0)
19466 add_AT_int (die, DW_AT_data_member_location, offset);
19467 else
19468 add_AT_unsigned (die, DW_AT_data_member_location, offset);
19469 return;
19470 }
19471 else
19472 {
19473 enum dwarf_location_atom op;
19474
19475 /* The DWARF2 standard says that we should assume that the structure
19476 address is already on the stack, so we can specify a structure
19477 field address by using DW_OP_plus_uconst. */
19478 op = DW_OP_plus_uconst;
19479 loc_descr = new_loc_descr (op, offset, 0);
19480 }
19481 }
19482
19483 add_AT_loc (die, DW_AT_data_member_location, loc_descr);
19484 }
19485
19486 /* Writes integer values to dw_vec_const array. */
19487
19488 static void
19489 insert_int (HOST_WIDE_INT val, unsigned int size, unsigned char *dest)
19490 {
19491 while (size != 0)
19492 {
19493 *dest++ = val & 0xff;
19494 val >>= 8;
19495 --size;
19496 }
19497 }
19498
19499 /* Reads integers from dw_vec_const array. Inverse of insert_int. */
19500
19501 static HOST_WIDE_INT
19502 extract_int (const unsigned char *src, unsigned int size)
19503 {
19504 HOST_WIDE_INT val = 0;
19505
19506 src += size;
19507 while (size != 0)
19508 {
19509 val <<= 8;
19510 val |= *--src & 0xff;
19511 --size;
19512 }
19513 return val;
19514 }
19515
19516 /* Writes wide_int values to dw_vec_const array. */
19517
19518 static void
19519 insert_wide_int (const wide_int &val, unsigned char *dest, int elt_size)
19520 {
19521 int i;
19522
19523 if (elt_size <= HOST_BITS_PER_WIDE_INT/BITS_PER_UNIT)
19524 {
19525 insert_int ((HOST_WIDE_INT) val.elt (0), elt_size, dest);
19526 return;
19527 }
19528
19529 /* We'd have to extend this code to support odd sizes. */
19530 gcc_assert (elt_size % (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT) == 0);
19531
19532 int n = elt_size / (HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT);
19533
19534 if (WORDS_BIG_ENDIAN)
19535 for (i = n - 1; i >= 0; i--)
19536 {
19537 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19538 dest += sizeof (HOST_WIDE_INT);
19539 }
19540 else
19541 for (i = 0; i < n; i++)
19542 {
19543 insert_int ((HOST_WIDE_INT) val.elt (i), sizeof (HOST_WIDE_INT), dest);
19544 dest += sizeof (HOST_WIDE_INT);
19545 }
19546 }
19547
19548 /* Writes floating point values to dw_vec_const array. */
19549
19550 static void
19551 insert_float (const_rtx rtl, unsigned char *array)
19552 {
19553 long val[4];
19554 int i;
19555 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19556
19557 real_to_target (val, CONST_DOUBLE_REAL_VALUE (rtl), mode);
19558
19559 /* real_to_target puts 32-bit pieces in each long. Pack them. */
19560 for (i = 0; i < GET_MODE_SIZE (mode) / 4; i++)
19561 {
19562 insert_int (val[i], 4, array);
19563 array += 4;
19564 }
19565 }
19566
19567 /* Attach a DW_AT_const_value attribute for a variable or a parameter which
19568 does not have a "location" either in memory or in a register. These
19569 things can arise in GNU C when a constant is passed as an actual parameter
19570 to an inlined function. They can also arise in C++ where declared
19571 constants do not necessarily get memory "homes". */
19572
19573 static bool
19574 add_const_value_attribute (dw_die_ref die, rtx rtl)
19575 {
19576 switch (GET_CODE (rtl))
19577 {
19578 case CONST_INT:
19579 {
19580 HOST_WIDE_INT val = INTVAL (rtl);
19581
19582 if (val < 0)
19583 add_AT_int (die, DW_AT_const_value, val);
19584 else
19585 add_AT_unsigned (die, DW_AT_const_value, (unsigned HOST_WIDE_INT) val);
19586 }
19587 return true;
19588
19589 case CONST_WIDE_INT:
19590 {
19591 wide_int w1 = rtx_mode_t (rtl, MAX_MODE_INT);
19592 unsigned int prec = MIN (wi::min_precision (w1, UNSIGNED),
19593 (unsigned int)CONST_WIDE_INT_NUNITS (rtl) * HOST_BITS_PER_WIDE_INT);
19594 wide_int w = wi::zext (w1, prec);
19595 add_AT_wide (die, DW_AT_const_value, w);
19596 }
19597 return true;
19598
19599 case CONST_DOUBLE:
19600 /* Note that a CONST_DOUBLE rtx could represent either an integer or a
19601 floating-point constant. A CONST_DOUBLE is used whenever the
19602 constant requires more than one word in order to be adequately
19603 represented. */
19604 if (TARGET_SUPPORTS_WIDE_INT == 0
19605 && !SCALAR_FLOAT_MODE_P (GET_MODE (rtl)))
19606 add_AT_double (die, DW_AT_const_value,
19607 CONST_DOUBLE_HIGH (rtl), CONST_DOUBLE_LOW (rtl));
19608 else
19609 {
19610 scalar_float_mode mode = as_a <scalar_float_mode> (GET_MODE (rtl));
19611 unsigned int length = GET_MODE_SIZE (mode);
19612 unsigned char *array = ggc_vec_alloc<unsigned char> (length);
19613
19614 insert_float (rtl, array);
19615 add_AT_vec (die, DW_AT_const_value, length / 4, 4, array);
19616 }
19617 return true;
19618
19619 case CONST_VECTOR:
19620 {
19621 unsigned int length;
19622 if (!CONST_VECTOR_NUNITS (rtl).is_constant (&length))
19623 return false;
19624
19625 machine_mode mode = GET_MODE (rtl);
19626 unsigned int elt_size = GET_MODE_UNIT_SIZE (mode);
19627 unsigned char *array
19628 = ggc_vec_alloc<unsigned char> (length * elt_size);
19629 unsigned int i;
19630 unsigned char *p;
19631 machine_mode imode = GET_MODE_INNER (mode);
19632
19633 switch (GET_MODE_CLASS (mode))
19634 {
19635 case MODE_VECTOR_INT:
19636 for (i = 0, p = array; i < length; i++, p += elt_size)
19637 {
19638 rtx elt = CONST_VECTOR_ELT (rtl, i);
19639 insert_wide_int (rtx_mode_t (elt, imode), p, elt_size);
19640 }
19641 break;
19642
19643 case MODE_VECTOR_FLOAT:
19644 for (i = 0, p = array; i < length; i++, p += elt_size)
19645 {
19646 rtx elt = CONST_VECTOR_ELT (rtl, i);
19647 insert_float (elt, p);
19648 }
19649 break;
19650
19651 default:
19652 gcc_unreachable ();
19653 }
19654
19655 add_AT_vec (die, DW_AT_const_value, length, elt_size, array);
19656 }
19657 return true;
19658
19659 case CONST_STRING:
19660 if (dwarf_version >= 4 || !dwarf_strict)
19661 {
19662 dw_loc_descr_ref loc_result;
19663 resolve_one_addr (&rtl);
19664 rtl_addr:
19665 loc_result = new_addr_loc_descr (rtl, dtprel_false);
19666 add_loc_descr (&loc_result, new_loc_descr (DW_OP_stack_value, 0, 0));
19667 add_AT_loc (die, DW_AT_location, loc_result);
19668 vec_safe_push (used_rtx_array, rtl);
19669 return true;
19670 }
19671 return false;
19672
19673 case CONST:
19674 if (CONSTANT_P (XEXP (rtl, 0)))
19675 return add_const_value_attribute (die, XEXP (rtl, 0));
19676 /* FALLTHROUGH */
19677 case SYMBOL_REF:
19678 if (!const_ok_for_output (rtl))
19679 return false;
19680 /* FALLTHROUGH */
19681 case LABEL_REF:
19682 if (dwarf_version >= 4 || !dwarf_strict)
19683 goto rtl_addr;
19684 return false;
19685
19686 case PLUS:
19687 /* In cases where an inlined instance of an inline function is passed
19688 the address of an `auto' variable (which is local to the caller) we
19689 can get a situation where the DECL_RTL of the artificial local
19690 variable (for the inlining) which acts as a stand-in for the
19691 corresponding formal parameter (of the inline function) will look
19692 like (plus:SI (reg:SI FRAME_PTR) (const_int ...)). This is not
19693 exactly a compile-time constant expression, but it isn't the address
19694 of the (artificial) local variable either. Rather, it represents the
19695 *value* which the artificial local variable always has during its
19696 lifetime. We currently have no way to represent such quasi-constant
19697 values in Dwarf, so for now we just punt and generate nothing. */
19698 return false;
19699
19700 case HIGH:
19701 case CONST_FIXED:
19702 case MINUS:
19703 case SIGN_EXTEND:
19704 case ZERO_EXTEND:
19705 case CONST_POLY_INT:
19706 return false;
19707
19708 case MEM:
19709 if (GET_CODE (XEXP (rtl, 0)) == CONST_STRING
19710 && MEM_READONLY_P (rtl)
19711 && GET_MODE (rtl) == BLKmode)
19712 {
19713 add_AT_string (die, DW_AT_const_value, XSTR (XEXP (rtl, 0), 0));
19714 return true;
19715 }
19716 return false;
19717
19718 default:
19719 /* No other kinds of rtx should be possible here. */
19720 gcc_unreachable ();
19721 }
19722 return false;
19723 }
19724
19725 /* Determine whether the evaluation of EXPR references any variables
19726 or functions which aren't otherwise used (and therefore may not be
19727 output). */
19728 static tree
19729 reference_to_unused (tree * tp, int * walk_subtrees,
19730 void * data ATTRIBUTE_UNUSED)
19731 {
19732 if (! EXPR_P (*tp) && ! CONSTANT_CLASS_P (*tp))
19733 *walk_subtrees = 0;
19734
19735 if (DECL_P (*tp) && ! TREE_PUBLIC (*tp) && ! TREE_USED (*tp)
19736 && ! TREE_ASM_WRITTEN (*tp))
19737 return *tp;
19738 /* ??? The C++ FE emits debug information for using decls, so
19739 putting gcc_unreachable here falls over. See PR31899. For now
19740 be conservative. */
19741 else if (!symtab->global_info_ready && VAR_OR_FUNCTION_DECL_P (*tp))
19742 return *tp;
19743 else if (VAR_P (*tp))
19744 {
19745 varpool_node *node = varpool_node::get (*tp);
19746 if (!node || !node->definition)
19747 return *tp;
19748 }
19749 else if (TREE_CODE (*tp) == FUNCTION_DECL
19750 && (!DECL_EXTERNAL (*tp) || DECL_DECLARED_INLINE_P (*tp)))
19751 {
19752 /* The call graph machinery must have finished analyzing,
19753 optimizing and gimplifying the CU by now.
19754 So if *TP has no call graph node associated
19755 to it, it means *TP will not be emitted. */
19756 if (!cgraph_node::get (*tp))
19757 return *tp;
19758 }
19759 else if (TREE_CODE (*tp) == STRING_CST && !TREE_ASM_WRITTEN (*tp))
19760 return *tp;
19761
19762 return NULL_TREE;
19763 }
19764
19765 /* Generate an RTL constant from a decl initializer INIT with decl type TYPE,
19766 for use in a later add_const_value_attribute call. */
19767
19768 static rtx
19769 rtl_for_decl_init (tree init, tree type)
19770 {
19771 rtx rtl = NULL_RTX;
19772
19773 STRIP_NOPS (init);
19774
19775 /* If a variable is initialized with a string constant without embedded
19776 zeros, build CONST_STRING. */
19777 if (TREE_CODE (init) == STRING_CST && TREE_CODE (type) == ARRAY_TYPE)
19778 {
19779 tree enttype = TREE_TYPE (type);
19780 tree domain = TYPE_DOMAIN (type);
19781 scalar_int_mode mode;
19782
19783 if (is_int_mode (TYPE_MODE (enttype), &mode)
19784 && GET_MODE_SIZE (mode) == 1
19785 && domain
19786 && TYPE_MAX_VALUE (domain)
19787 && TREE_CODE (TYPE_MAX_VALUE (domain)) == INTEGER_CST
19788 && integer_zerop (TYPE_MIN_VALUE (domain))
19789 && compare_tree_int (TYPE_MAX_VALUE (domain),
19790 TREE_STRING_LENGTH (init) - 1) == 0
19791 && ((size_t) TREE_STRING_LENGTH (init)
19792 == strlen (TREE_STRING_POINTER (init)) + 1))
19793 {
19794 rtl = gen_rtx_CONST_STRING (VOIDmode,
19795 ggc_strdup (TREE_STRING_POINTER (init)));
19796 rtl = gen_rtx_MEM (BLKmode, rtl);
19797 MEM_READONLY_P (rtl) = 1;
19798 }
19799 }
19800 /* Other aggregates, and complex values, could be represented using
19801 CONCAT: FIXME! */
19802 else if (AGGREGATE_TYPE_P (type)
19803 || (TREE_CODE (init) == VIEW_CONVERT_EXPR
19804 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (init, 0))))
19805 || TREE_CODE (type) == COMPLEX_TYPE)
19806 ;
19807 /* Vectors only work if their mode is supported by the target.
19808 FIXME: generic vectors ought to work too. */
19809 else if (TREE_CODE (type) == VECTOR_TYPE
19810 && !VECTOR_MODE_P (TYPE_MODE (type)))
19811 ;
19812 /* If the initializer is something that we know will expand into an
19813 immediate RTL constant, expand it now. We must be careful not to
19814 reference variables which won't be output. */
19815 else if (initializer_constant_valid_p (init, type)
19816 && ! walk_tree (&init, reference_to_unused, NULL, NULL))
19817 {
19818 /* Convert vector CONSTRUCTOR initializers to VECTOR_CST if
19819 possible. */
19820 if (TREE_CODE (type) == VECTOR_TYPE)
19821 switch (TREE_CODE (init))
19822 {
19823 case VECTOR_CST:
19824 break;
19825 case CONSTRUCTOR:
19826 if (TREE_CONSTANT (init))
19827 {
19828 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (init);
19829 bool constant_p = true;
19830 tree value;
19831 unsigned HOST_WIDE_INT ix;
19832
19833 /* Even when ctor is constant, it might contain non-*_CST
19834 elements (e.g. { 1.0/0.0 - 1.0/0.0, 0.0 }) and those don't
19835 belong into VECTOR_CST nodes. */
19836 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
19837 if (!CONSTANT_CLASS_P (value))
19838 {
19839 constant_p = false;
19840 break;
19841 }
19842
19843 if (constant_p)
19844 {
19845 init = build_vector_from_ctor (type, elts);
19846 break;
19847 }
19848 }
19849 /* FALLTHRU */
19850
19851 default:
19852 return NULL;
19853 }
19854
19855 rtl = expand_expr (init, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
19856
19857 /* If expand_expr returns a MEM, it wasn't immediate. */
19858 gcc_assert (!rtl || !MEM_P (rtl));
19859 }
19860
19861 return rtl;
19862 }
19863
19864 /* Generate RTL for the variable DECL to represent its location. */
19865
19866 static rtx
19867 rtl_for_decl_location (tree decl)
19868 {
19869 rtx rtl;
19870
19871 /* Here we have to decide where we are going to say the parameter "lives"
19872 (as far as the debugger is concerned). We only have a couple of
19873 choices. GCC provides us with DECL_RTL and with DECL_INCOMING_RTL.
19874
19875 DECL_RTL normally indicates where the parameter lives during most of the
19876 activation of the function. If optimization is enabled however, this
19877 could be either NULL or else a pseudo-reg. Both of those cases indicate
19878 that the parameter doesn't really live anywhere (as far as the code
19879 generation parts of GCC are concerned) during most of the function's
19880 activation. That will happen (for example) if the parameter is never
19881 referenced within the function.
19882
19883 We could just generate a location descriptor here for all non-NULL
19884 non-pseudo values of DECL_RTL and ignore all of the rest, but we can be
19885 a little nicer than that if we also consider DECL_INCOMING_RTL in cases
19886 where DECL_RTL is NULL or is a pseudo-reg.
19887
19888 Note however that we can only get away with using DECL_INCOMING_RTL as
19889 a backup substitute for DECL_RTL in certain limited cases. In cases
19890 where DECL_ARG_TYPE (decl) indicates the same type as TREE_TYPE (decl),
19891 we can be sure that the parameter was passed using the same type as it is
19892 declared to have within the function, and that its DECL_INCOMING_RTL
19893 points us to a place where a value of that type is passed.
19894
19895 In cases where DECL_ARG_TYPE (decl) and TREE_TYPE (decl) are different,
19896 we cannot (in general) use DECL_INCOMING_RTL as a substitute for DECL_RTL
19897 because in these cases DECL_INCOMING_RTL points us to a value of some
19898 type which is *different* from the type of the parameter itself. Thus,
19899 if we tried to use DECL_INCOMING_RTL to generate a location attribute in
19900 such cases, the debugger would end up (for example) trying to fetch a
19901 `float' from a place which actually contains the first part of a
19902 `double'. That would lead to really incorrect and confusing
19903 output at debug-time.
19904
19905 So, in general, we *do not* use DECL_INCOMING_RTL as a backup for DECL_RTL
19906 in cases where DECL_ARG_TYPE (decl) != TREE_TYPE (decl). There
19907 are a couple of exceptions however. On little-endian machines we can
19908 get away with using DECL_INCOMING_RTL even when DECL_ARG_TYPE (decl) is
19909 not the same as TREE_TYPE (decl), but only when DECL_ARG_TYPE (decl) is
19910 an integral type that is smaller than TREE_TYPE (decl). These cases arise
19911 when (on a little-endian machine) a non-prototyped function has a
19912 parameter declared to be of type `short' or `char'. In such cases,
19913 TREE_TYPE (decl) will be `short' or `char', DECL_ARG_TYPE (decl) will
19914 be `int', and DECL_INCOMING_RTL will point to the lowest-order byte of the
19915 passed `int' value. If the debugger then uses that address to fetch
19916 a `short' or a `char' (on a little-endian machine) the result will be
19917 the correct data, so we allow for such exceptional cases below.
19918
19919 Note that our goal here is to describe the place where the given formal
19920 parameter lives during most of the function's activation (i.e. between the
19921 end of the prologue and the start of the epilogue). We'll do that as best
19922 as we can. Note however that if the given formal parameter is modified
19923 sometime during the execution of the function, then a stack backtrace (at
19924 debug-time) will show the function as having been called with the *new*
19925 value rather than the value which was originally passed in. This happens
19926 rarely enough that it is not a major problem, but it *is* a problem, and
19927 I'd like to fix it.
19928
19929 A future version of dwarf2out.c may generate two additional attributes for
19930 any given DW_TAG_formal_parameter DIE which will describe the "passed
19931 type" and the "passed location" for the given formal parameter in addition
19932 to the attributes we now generate to indicate the "declared type" and the
19933 "active location" for each parameter. This additional set of attributes
19934 could be used by debuggers for stack backtraces. Separately, note that
19935 sometimes DECL_RTL can be NULL and DECL_INCOMING_RTL can be NULL also.
19936 This happens (for example) for inlined-instances of inline function formal
19937 parameters which are never referenced. This really shouldn't be
19938 happening. All PARM_DECL nodes should get valid non-NULL
19939 DECL_INCOMING_RTL values. FIXME. */
19940
19941 /* Use DECL_RTL as the "location" unless we find something better. */
19942 rtl = DECL_RTL_IF_SET (decl);
19943
19944 /* When generating abstract instances, ignore everything except
19945 constants, symbols living in memory, and symbols living in
19946 fixed registers. */
19947 if (! reload_completed)
19948 {
19949 if (rtl
19950 && (CONSTANT_P (rtl)
19951 || (MEM_P (rtl)
19952 && CONSTANT_P (XEXP (rtl, 0)))
19953 || (REG_P (rtl)
19954 && VAR_P (decl)
19955 && TREE_STATIC (decl))))
19956 {
19957 rtl = targetm.delegitimize_address (rtl);
19958 return rtl;
19959 }
19960 rtl = NULL_RTX;
19961 }
19962 else if (TREE_CODE (decl) == PARM_DECL)
19963 {
19964 if (rtl == NULL_RTX
19965 || is_pseudo_reg (rtl)
19966 || (MEM_P (rtl)
19967 && is_pseudo_reg (XEXP (rtl, 0))
19968 && DECL_INCOMING_RTL (decl)
19969 && MEM_P (DECL_INCOMING_RTL (decl))
19970 && GET_MODE (rtl) == GET_MODE (DECL_INCOMING_RTL (decl))))
19971 {
19972 tree declared_type = TREE_TYPE (decl);
19973 tree passed_type = DECL_ARG_TYPE (decl);
19974 machine_mode dmode = TYPE_MODE (declared_type);
19975 machine_mode pmode = TYPE_MODE (passed_type);
19976
19977 /* This decl represents a formal parameter which was optimized out.
19978 Note that DECL_INCOMING_RTL may be NULL in here, but we handle
19979 all cases where (rtl == NULL_RTX) just below. */
19980 if (dmode == pmode)
19981 rtl = DECL_INCOMING_RTL (decl);
19982 else if ((rtl == NULL_RTX || is_pseudo_reg (rtl))
19983 && SCALAR_INT_MODE_P (dmode)
19984 && known_le (GET_MODE_SIZE (dmode), GET_MODE_SIZE (pmode))
19985 && DECL_INCOMING_RTL (decl))
19986 {
19987 rtx inc = DECL_INCOMING_RTL (decl);
19988 if (REG_P (inc))
19989 rtl = inc;
19990 else if (MEM_P (inc))
19991 {
19992 if (BYTES_BIG_ENDIAN)
19993 rtl = adjust_address_nv (inc, dmode,
19994 GET_MODE_SIZE (pmode)
19995 - GET_MODE_SIZE (dmode));
19996 else
19997 rtl = inc;
19998 }
19999 }
20000 }
20001
20002 /* If the parm was passed in registers, but lives on the stack, then
20003 make a big endian correction if the mode of the type of the
20004 parameter is not the same as the mode of the rtl. */
20005 /* ??? This is the same series of checks that are made in dbxout.c before
20006 we reach the big endian correction code there. It isn't clear if all
20007 of these checks are necessary here, but keeping them all is the safe
20008 thing to do. */
20009 else if (MEM_P (rtl)
20010 && XEXP (rtl, 0) != const0_rtx
20011 && ! CONSTANT_P (XEXP (rtl, 0))
20012 /* Not passed in memory. */
20013 && !MEM_P (DECL_INCOMING_RTL (decl))
20014 /* Not passed by invisible reference. */
20015 && (!REG_P (XEXP (rtl, 0))
20016 || REGNO (XEXP (rtl, 0)) == HARD_FRAME_POINTER_REGNUM
20017 || REGNO (XEXP (rtl, 0)) == STACK_POINTER_REGNUM
20018 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
20019 || REGNO (XEXP (rtl, 0)) == ARG_POINTER_REGNUM
20020 #endif
20021 )
20022 /* Big endian correction check. */
20023 && BYTES_BIG_ENDIAN
20024 && TYPE_MODE (TREE_TYPE (decl)) != GET_MODE (rtl)
20025 && known_lt (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))),
20026 UNITS_PER_WORD))
20027 {
20028 machine_mode addr_mode = get_address_mode (rtl);
20029 poly_int64 offset = (UNITS_PER_WORD
20030 - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (decl))));
20031
20032 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
20033 plus_constant (addr_mode, XEXP (rtl, 0), offset));
20034 }
20035 }
20036 else if (VAR_P (decl)
20037 && rtl
20038 && MEM_P (rtl)
20039 && GET_MODE (rtl) != TYPE_MODE (TREE_TYPE (decl)))
20040 {
20041 machine_mode addr_mode = get_address_mode (rtl);
20042 poly_int64 offset = byte_lowpart_offset (TYPE_MODE (TREE_TYPE (decl)),
20043 GET_MODE (rtl));
20044
20045 /* If a variable is declared "register" yet is smaller than
20046 a register, then if we store the variable to memory, it
20047 looks like we're storing a register-sized value, when in
20048 fact we are not. We need to adjust the offset of the
20049 storage location to reflect the actual value's bytes,
20050 else gdb will not be able to display it. */
20051 if (maybe_ne (offset, 0))
20052 rtl = gen_rtx_MEM (TYPE_MODE (TREE_TYPE (decl)),
20053 plus_constant (addr_mode, XEXP (rtl, 0), offset));
20054 }
20055
20056 /* A variable with no DECL_RTL but a DECL_INITIAL is a compile-time constant,
20057 and will have been substituted directly into all expressions that use it.
20058 C does not have such a concept, but C++ and other languages do. */
20059 if (!rtl && VAR_P (decl) && DECL_INITIAL (decl))
20060 rtl = rtl_for_decl_init (DECL_INITIAL (decl), TREE_TYPE (decl));
20061
20062 if (rtl)
20063 rtl = targetm.delegitimize_address (rtl);
20064
20065 /* If we don't look past the constant pool, we risk emitting a
20066 reference to a constant pool entry that isn't referenced from
20067 code, and thus is not emitted. */
20068 if (rtl)
20069 rtl = avoid_constant_pool_reference (rtl);
20070
20071 /* Try harder to get a rtl. If this symbol ends up not being emitted
20072 in the current CU, resolve_addr will remove the expression referencing
20073 it. */
20074 if (rtl == NULL_RTX
20075 && !(early_dwarf && (flag_generate_lto || flag_generate_offload))
20076 && VAR_P (decl)
20077 && !DECL_EXTERNAL (decl)
20078 && TREE_STATIC (decl)
20079 && DECL_NAME (decl)
20080 && !DECL_HARD_REGISTER (decl)
20081 && DECL_MODE (decl) != VOIDmode)
20082 {
20083 rtl = make_decl_rtl_for_debug (decl);
20084 if (!MEM_P (rtl)
20085 || GET_CODE (XEXP (rtl, 0)) != SYMBOL_REF
20086 || SYMBOL_REF_DECL (XEXP (rtl, 0)) != decl)
20087 rtl = NULL_RTX;
20088 }
20089
20090 return rtl;
20091 }
20092
20093 /* Check whether decl is a Fortran COMMON symbol. If not, NULL_TREE is
20094 returned. If so, the decl for the COMMON block is returned, and the
20095 value is the offset into the common block for the symbol. */
20096
20097 static tree
20098 fortran_common (tree decl, HOST_WIDE_INT *value)
20099 {
20100 tree val_expr, cvar;
20101 machine_mode mode;
20102 poly_int64 bitsize, bitpos;
20103 tree offset;
20104 HOST_WIDE_INT cbitpos;
20105 int unsignedp, reversep, volatilep = 0;
20106
20107 /* If the decl isn't a VAR_DECL, or if it isn't static, or if
20108 it does not have a value (the offset into the common area), or if it
20109 is thread local (as opposed to global) then it isn't common, and shouldn't
20110 be handled as such. */
20111 if (!VAR_P (decl)
20112 || !TREE_STATIC (decl)
20113 || !DECL_HAS_VALUE_EXPR_P (decl)
20114 || !is_fortran ())
20115 return NULL_TREE;
20116
20117 val_expr = DECL_VALUE_EXPR (decl);
20118 if (TREE_CODE (val_expr) != COMPONENT_REF)
20119 return NULL_TREE;
20120
20121 cvar = get_inner_reference (val_expr, &bitsize, &bitpos, &offset, &mode,
20122 &unsignedp, &reversep, &volatilep);
20123
20124 if (cvar == NULL_TREE
20125 || !VAR_P (cvar)
20126 || DECL_ARTIFICIAL (cvar)
20127 || !TREE_PUBLIC (cvar)
20128 /* We don't expect to have to cope with variable offsets,
20129 since at present all static data must have a constant size. */
20130 || !bitpos.is_constant (&cbitpos))
20131 return NULL_TREE;
20132
20133 *value = 0;
20134 if (offset != NULL)
20135 {
20136 if (!tree_fits_shwi_p (offset))
20137 return NULL_TREE;
20138 *value = tree_to_shwi (offset);
20139 }
20140 if (cbitpos != 0)
20141 *value += cbitpos / BITS_PER_UNIT;
20142
20143 return cvar;
20144 }
20145
20146 /* Generate *either* a DW_AT_location attribute or else a DW_AT_const_value
20147 data attribute for a variable or a parameter. We generate the
20148 DW_AT_const_value attribute only in those cases where the given variable
20149 or parameter does not have a true "location" either in memory or in a
20150 register. This can happen (for example) when a constant is passed as an
20151 actual argument in a call to an inline function. (It's possible that
20152 these things can crop up in other ways also.) Note that one type of
20153 constant value which can be passed into an inlined function is a constant
20154 pointer. This can happen for example if an actual argument in an inlined
20155 function call evaluates to a compile-time constant address.
20156
20157 CACHE_P is true if it is worth caching the location list for DECL,
20158 so that future calls can reuse it rather than regenerate it from scratch.
20159 This is true for BLOCK_NONLOCALIZED_VARS in inlined subroutines,
20160 since we will need to refer to them each time the function is inlined. */
20161
20162 static bool
20163 add_location_or_const_value_attribute (dw_die_ref die, tree decl, bool cache_p)
20164 {
20165 rtx rtl;
20166 dw_loc_list_ref list;
20167 var_loc_list *loc_list;
20168 cached_dw_loc_list *cache;
20169
20170 if (early_dwarf)
20171 return false;
20172
20173 if (TREE_CODE (decl) == ERROR_MARK)
20174 return false;
20175
20176 if (get_AT (die, DW_AT_location)
20177 || get_AT (die, DW_AT_const_value))
20178 return true;
20179
20180 gcc_assert (VAR_P (decl) || TREE_CODE (decl) == PARM_DECL
20181 || TREE_CODE (decl) == RESULT_DECL);
20182
20183 /* Try to get some constant RTL for this decl, and use that as the value of
20184 the location. */
20185
20186 rtl = rtl_for_decl_location (decl);
20187 if (rtl && (CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20188 && add_const_value_attribute (die, rtl))
20189 return true;
20190
20191 /* See if we have single element location list that is equivalent to
20192 a constant value. That way we are better to use add_const_value_attribute
20193 rather than expanding constant value equivalent. */
20194 loc_list = lookup_decl_loc (decl);
20195 if (loc_list
20196 && loc_list->first
20197 && loc_list->first->next == NULL
20198 && NOTE_P (loc_list->first->loc)
20199 && NOTE_VAR_LOCATION (loc_list->first->loc)
20200 && NOTE_VAR_LOCATION_LOC (loc_list->first->loc))
20201 {
20202 struct var_loc_node *node;
20203
20204 node = loc_list->first;
20205 rtl = NOTE_VAR_LOCATION_LOC (node->loc);
20206 if (GET_CODE (rtl) == EXPR_LIST)
20207 rtl = XEXP (rtl, 0);
20208 if ((CONSTANT_P (rtl) || GET_CODE (rtl) == CONST_STRING)
20209 && add_const_value_attribute (die, rtl))
20210 return true;
20211 }
20212 /* If this decl is from BLOCK_NONLOCALIZED_VARS, we might need its
20213 list several times. See if we've already cached the contents. */
20214 list = NULL;
20215 if (loc_list == NULL || cached_dw_loc_list_table == NULL)
20216 cache_p = false;
20217 if (cache_p)
20218 {
20219 cache = cached_dw_loc_list_table->find_with_hash (decl, DECL_UID (decl));
20220 if (cache)
20221 list = cache->loc_list;
20222 }
20223 if (list == NULL)
20224 {
20225 list = loc_list_from_tree (decl, decl_by_reference_p (decl) ? 0 : 2,
20226 NULL);
20227 /* It is usually worth caching this result if the decl is from
20228 BLOCK_NONLOCALIZED_VARS and if the list has at least two elements. */
20229 if (cache_p && list && list->dw_loc_next)
20230 {
20231 cached_dw_loc_list **slot
20232 = cached_dw_loc_list_table->find_slot_with_hash (decl,
20233 DECL_UID (decl),
20234 INSERT);
20235 cache = ggc_cleared_alloc<cached_dw_loc_list> ();
20236 cache->decl_id = DECL_UID (decl);
20237 cache->loc_list = list;
20238 *slot = cache;
20239 }
20240 }
20241 if (list)
20242 {
20243 add_AT_location_description (die, DW_AT_location, list);
20244 return true;
20245 }
20246 /* None of that worked, so it must not really have a location;
20247 try adding a constant value attribute from the DECL_INITIAL. */
20248 return tree_add_const_value_attribute_for_decl (die, decl);
20249 }
20250
20251 /* Helper function for tree_add_const_value_attribute. Natively encode
20252 initializer INIT into an array. Return true if successful. */
20253
20254 static bool
20255 native_encode_initializer (tree init, unsigned char *array, int size)
20256 {
20257 tree type;
20258
20259 if (init == NULL_TREE)
20260 return false;
20261
20262 STRIP_NOPS (init);
20263 switch (TREE_CODE (init))
20264 {
20265 case STRING_CST:
20266 type = TREE_TYPE (init);
20267 if (TREE_CODE (type) == ARRAY_TYPE)
20268 {
20269 tree enttype = TREE_TYPE (type);
20270 scalar_int_mode mode;
20271
20272 if (!is_int_mode (TYPE_MODE (enttype), &mode)
20273 || GET_MODE_SIZE (mode) != 1)
20274 return false;
20275 if (int_size_in_bytes (type) != size)
20276 return false;
20277 if (size > TREE_STRING_LENGTH (init))
20278 {
20279 memcpy (array, TREE_STRING_POINTER (init),
20280 TREE_STRING_LENGTH (init));
20281 memset (array + TREE_STRING_LENGTH (init),
20282 '\0', size - TREE_STRING_LENGTH (init));
20283 }
20284 else
20285 memcpy (array, TREE_STRING_POINTER (init), size);
20286 return true;
20287 }
20288 return false;
20289 case CONSTRUCTOR:
20290 type = TREE_TYPE (init);
20291 if (int_size_in_bytes (type) != size)
20292 return false;
20293 if (TREE_CODE (type) == ARRAY_TYPE)
20294 {
20295 HOST_WIDE_INT min_index;
20296 unsigned HOST_WIDE_INT cnt;
20297 int curpos = 0, fieldsize;
20298 constructor_elt *ce;
20299
20300 if (TYPE_DOMAIN (type) == NULL_TREE
20301 || !tree_fits_shwi_p (TYPE_MIN_VALUE (TYPE_DOMAIN (type))))
20302 return false;
20303
20304 fieldsize = int_size_in_bytes (TREE_TYPE (type));
20305 if (fieldsize <= 0)
20306 return false;
20307
20308 min_index = tree_to_shwi (TYPE_MIN_VALUE (TYPE_DOMAIN (type)));
20309 memset (array, '\0', size);
20310 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
20311 {
20312 tree val = ce->value;
20313 tree index = ce->index;
20314 int pos = curpos;
20315 if (index && TREE_CODE (index) == RANGE_EXPR)
20316 pos = (tree_to_shwi (TREE_OPERAND (index, 0)) - min_index)
20317 * fieldsize;
20318 else if (index)
20319 pos = (tree_to_shwi (index) - min_index) * fieldsize;
20320
20321 if (val)
20322 {
20323 STRIP_NOPS (val);
20324 if (!native_encode_initializer (val, array + pos, fieldsize))
20325 return false;
20326 }
20327 curpos = pos + fieldsize;
20328 if (index && TREE_CODE (index) == RANGE_EXPR)
20329 {
20330 int count = tree_to_shwi (TREE_OPERAND (index, 1))
20331 - tree_to_shwi (TREE_OPERAND (index, 0));
20332 while (count-- > 0)
20333 {
20334 if (val)
20335 memcpy (array + curpos, array + pos, fieldsize);
20336 curpos += fieldsize;
20337 }
20338 }
20339 gcc_assert (curpos <= size);
20340 }
20341 return true;
20342 }
20343 else if (TREE_CODE (type) == RECORD_TYPE
20344 || TREE_CODE (type) == UNION_TYPE)
20345 {
20346 tree field = NULL_TREE;
20347 unsigned HOST_WIDE_INT cnt;
20348 constructor_elt *ce;
20349
20350 if (int_size_in_bytes (type) != size)
20351 return false;
20352
20353 if (TREE_CODE (type) == RECORD_TYPE)
20354 field = TYPE_FIELDS (type);
20355
20356 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (init), cnt, ce)
20357 {
20358 tree val = ce->value;
20359 int pos, fieldsize;
20360
20361 if (ce->index != 0)
20362 field = ce->index;
20363
20364 if (val)
20365 STRIP_NOPS (val);
20366
20367 if (field == NULL_TREE || DECL_BIT_FIELD (field))
20368 return false;
20369
20370 if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
20371 && TYPE_DOMAIN (TREE_TYPE (field))
20372 && ! TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (field))))
20373 return false;
20374 else if (DECL_SIZE_UNIT (field) == NULL_TREE
20375 || !tree_fits_shwi_p (DECL_SIZE_UNIT (field)))
20376 return false;
20377 fieldsize = tree_to_shwi (DECL_SIZE_UNIT (field));
20378 pos = int_byte_position (field);
20379 gcc_assert (pos + fieldsize <= size);
20380 if (val && fieldsize != 0
20381 && !native_encode_initializer (val, array + pos, fieldsize))
20382 return false;
20383 }
20384 return true;
20385 }
20386 return false;
20387 case VIEW_CONVERT_EXPR:
20388 case NON_LVALUE_EXPR:
20389 return native_encode_initializer (TREE_OPERAND (init, 0), array, size);
20390 default:
20391 return native_encode_expr (init, array, size) == size;
20392 }
20393 }
20394
20395 /* Attach a DW_AT_const_value attribute to DIE. The value of the
20396 attribute is the const value T. */
20397
20398 static bool
20399 tree_add_const_value_attribute (dw_die_ref die, tree t)
20400 {
20401 tree init;
20402 tree type = TREE_TYPE (t);
20403 rtx rtl;
20404
20405 if (!t || !TREE_TYPE (t) || TREE_TYPE (t) == error_mark_node)
20406 return false;
20407
20408 init = t;
20409 gcc_assert (!DECL_P (init));
20410
20411 if (TREE_CODE (init) == INTEGER_CST)
20412 {
20413 if (tree_fits_uhwi_p (init))
20414 {
20415 add_AT_unsigned (die, DW_AT_const_value, tree_to_uhwi (init));
20416 return true;
20417 }
20418 if (tree_fits_shwi_p (init))
20419 {
20420 add_AT_int (die, DW_AT_const_value, tree_to_shwi (init));
20421 return true;
20422 }
20423 }
20424 if (! early_dwarf)
20425 {
20426 rtl = rtl_for_decl_init (init, type);
20427 if (rtl)
20428 return add_const_value_attribute (die, rtl);
20429 }
20430 /* If the host and target are sane, try harder. */
20431 if (CHAR_BIT == 8 && BITS_PER_UNIT == 8
20432 && initializer_constant_valid_p (init, type))
20433 {
20434 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (init));
20435 if (size > 0 && (int) size == size)
20436 {
20437 unsigned char *array = ggc_cleared_vec_alloc<unsigned char> (size);
20438
20439 if (native_encode_initializer (init, array, size))
20440 {
20441 add_AT_vec (die, DW_AT_const_value, size, 1, array);
20442 return true;
20443 }
20444 ggc_free (array);
20445 }
20446 }
20447 return false;
20448 }
20449
20450 /* Attach a DW_AT_const_value attribute to VAR_DIE. The value of the
20451 attribute is the const value of T, where T is an integral constant
20452 variable with static storage duration
20453 (so it can't be a PARM_DECL or a RESULT_DECL). */
20454
20455 static bool
20456 tree_add_const_value_attribute_for_decl (dw_die_ref var_die, tree decl)
20457 {
20458
20459 if (!decl
20460 || (!VAR_P (decl) && TREE_CODE (decl) != CONST_DECL)
20461 || (VAR_P (decl) && !TREE_STATIC (decl)))
20462 return false;
20463
20464 if (TREE_READONLY (decl)
20465 && ! TREE_THIS_VOLATILE (decl)
20466 && DECL_INITIAL (decl))
20467 /* OK */;
20468 else
20469 return false;
20470
20471 /* Don't add DW_AT_const_value if abstract origin already has one. */
20472 if (get_AT (var_die, DW_AT_const_value))
20473 return false;
20474
20475 return tree_add_const_value_attribute (var_die, DECL_INITIAL (decl));
20476 }
20477
20478 /* Convert the CFI instructions for the current function into a
20479 location list. This is used for DW_AT_frame_base when we targeting
20480 a dwarf2 consumer that does not support the dwarf3
20481 DW_OP_call_frame_cfa. OFFSET is a constant to be added to all CFA
20482 expressions. */
20483
20484 static dw_loc_list_ref
20485 convert_cfa_to_fb_loc_list (HOST_WIDE_INT offset)
20486 {
20487 int ix;
20488 dw_fde_ref fde;
20489 dw_loc_list_ref list, *list_tail;
20490 dw_cfi_ref cfi;
20491 dw_cfa_location last_cfa, next_cfa;
20492 const char *start_label, *last_label, *section;
20493 dw_cfa_location remember;
20494
20495 fde = cfun->fde;
20496 gcc_assert (fde != NULL);
20497
20498 section = secname_for_decl (current_function_decl);
20499 list_tail = &list;
20500 list = NULL;
20501
20502 memset (&next_cfa, 0, sizeof (next_cfa));
20503 next_cfa.reg = INVALID_REGNUM;
20504 remember = next_cfa;
20505
20506 start_label = fde->dw_fde_begin;
20507
20508 /* ??? Bald assumption that the CIE opcode list does not contain
20509 advance opcodes. */
20510 FOR_EACH_VEC_ELT (*cie_cfi_vec, ix, cfi)
20511 lookup_cfa_1 (cfi, &next_cfa, &remember);
20512
20513 last_cfa = next_cfa;
20514 last_label = start_label;
20515
20516 if (fde->dw_fde_second_begin && fde->dw_fde_switch_cfi_index == 0)
20517 {
20518 /* If the first partition contained no CFI adjustments, the
20519 CIE opcodes apply to the whole first partition. */
20520 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20521 fde->dw_fde_begin, 0, fde->dw_fde_end, 0, section);
20522 list_tail =&(*list_tail)->dw_loc_next;
20523 start_label = last_label = fde->dw_fde_second_begin;
20524 }
20525
20526 FOR_EACH_VEC_SAFE_ELT (fde->dw_fde_cfi, ix, cfi)
20527 {
20528 switch (cfi->dw_cfi_opc)
20529 {
20530 case DW_CFA_set_loc:
20531 case DW_CFA_advance_loc1:
20532 case DW_CFA_advance_loc2:
20533 case DW_CFA_advance_loc4:
20534 if (!cfa_equal_p (&last_cfa, &next_cfa))
20535 {
20536 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20537 start_label, 0, last_label, 0, section);
20538
20539 list_tail = &(*list_tail)->dw_loc_next;
20540 last_cfa = next_cfa;
20541 start_label = last_label;
20542 }
20543 last_label = cfi->dw_cfi_oprnd1.dw_cfi_addr;
20544 break;
20545
20546 case DW_CFA_advance_loc:
20547 /* The encoding is complex enough that we should never emit this. */
20548 gcc_unreachable ();
20549
20550 default:
20551 lookup_cfa_1 (cfi, &next_cfa, &remember);
20552 break;
20553 }
20554 if (ix + 1 == fde->dw_fde_switch_cfi_index)
20555 {
20556 if (!cfa_equal_p (&last_cfa, &next_cfa))
20557 {
20558 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20559 start_label, 0, last_label, 0, section);
20560
20561 list_tail = &(*list_tail)->dw_loc_next;
20562 last_cfa = next_cfa;
20563 start_label = last_label;
20564 }
20565 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20566 start_label, 0, fde->dw_fde_end, 0, section);
20567 list_tail = &(*list_tail)->dw_loc_next;
20568 start_label = last_label = fde->dw_fde_second_begin;
20569 }
20570 }
20571
20572 if (!cfa_equal_p (&last_cfa, &next_cfa))
20573 {
20574 *list_tail = new_loc_list (build_cfa_loc (&last_cfa, offset),
20575 start_label, 0, last_label, 0, section);
20576 list_tail = &(*list_tail)->dw_loc_next;
20577 start_label = last_label;
20578 }
20579
20580 *list_tail = new_loc_list (build_cfa_loc (&next_cfa, offset),
20581 start_label, 0,
20582 fde->dw_fde_second_begin
20583 ? fde->dw_fde_second_end : fde->dw_fde_end, 0,
20584 section);
20585
20586 maybe_gen_llsym (list);
20587
20588 return list;
20589 }
20590
20591 /* Compute a displacement from the "steady-state frame pointer" to the
20592 frame base (often the same as the CFA), and store it in
20593 frame_pointer_fb_offset. OFFSET is added to the displacement
20594 before the latter is negated. */
20595
20596 static void
20597 compute_frame_pointer_to_fb_displacement (poly_int64 offset)
20598 {
20599 rtx reg, elim;
20600
20601 #ifdef FRAME_POINTER_CFA_OFFSET
20602 reg = frame_pointer_rtx;
20603 offset += FRAME_POINTER_CFA_OFFSET (current_function_decl);
20604 #else
20605 reg = arg_pointer_rtx;
20606 offset += ARG_POINTER_CFA_OFFSET (current_function_decl);
20607 #endif
20608
20609 elim = (ira_use_lra_p
20610 ? lra_eliminate_regs (reg, VOIDmode, NULL_RTX)
20611 : eliminate_regs (reg, VOIDmode, NULL_RTX));
20612 elim = strip_offset_and_add (elim, &offset);
20613
20614 frame_pointer_fb_offset = -offset;
20615
20616 /* ??? AVR doesn't set up valid eliminations when there is no stack frame
20617 in which to eliminate. This is because it's stack pointer isn't
20618 directly accessible as a register within the ISA. To work around
20619 this, assume that while we cannot provide a proper value for
20620 frame_pointer_fb_offset, we won't need one either. We can use
20621 hard frame pointer in debug info even if frame pointer isn't used
20622 since hard frame pointer in debug info is encoded with DW_OP_fbreg
20623 which uses the DW_AT_frame_base attribute, not hard frame pointer
20624 directly. */
20625 frame_pointer_fb_offset_valid
20626 = (elim == hard_frame_pointer_rtx || elim == stack_pointer_rtx);
20627 }
20628
20629 /* Generate a DW_AT_name attribute given some string value to be included as
20630 the value of the attribute. */
20631
20632 static void
20633 add_name_attribute (dw_die_ref die, const char *name_string)
20634 {
20635 if (name_string != NULL && *name_string != 0)
20636 {
20637 if (demangle_name_func)
20638 name_string = (*demangle_name_func) (name_string);
20639
20640 add_AT_string (die, DW_AT_name, name_string);
20641 }
20642 }
20643
20644 /* Generate a DW_AT_description attribute given some string value to be included
20645 as the value of the attribute. */
20646
20647 static void
20648 add_desc_attribute (dw_die_ref die, const char *name_string)
20649 {
20650 if (!flag_describe_dies || (dwarf_version < 3 && dwarf_strict))
20651 return;
20652
20653 if (name_string == NULL || *name_string == 0)
20654 return;
20655
20656 if (demangle_name_func)
20657 name_string = (*demangle_name_func) (name_string);
20658
20659 add_AT_string (die, DW_AT_description, name_string);
20660 }
20661
20662 /* Generate a DW_AT_description attribute given some decl to be included
20663 as the value of the attribute. */
20664
20665 static void
20666 add_desc_attribute (dw_die_ref die, tree decl)
20667 {
20668 tree decl_name;
20669
20670 if (!flag_describe_dies || (dwarf_version < 3 && dwarf_strict))
20671 return;
20672
20673 if (decl == NULL_TREE || !DECL_P (decl))
20674 return;
20675 decl_name = DECL_NAME (decl);
20676
20677 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
20678 {
20679 const char *name = dwarf2_name (decl, 0);
20680 add_desc_attribute (die, name ? name : IDENTIFIER_POINTER (decl_name));
20681 }
20682 else
20683 {
20684 char *desc = print_generic_expr_to_str (decl);
20685 add_desc_attribute (die, desc);
20686 free (desc);
20687 }
20688 }
20689
20690 /* Retrieve the descriptive type of TYPE, if any, make sure it has a
20691 DIE and attach a DW_AT_GNAT_descriptive_type attribute to the DIE
20692 of TYPE accordingly.
20693
20694 ??? This is a temporary measure until after we're able to generate
20695 regular DWARF for the complex Ada type system. */
20696
20697 static void
20698 add_gnat_descriptive_type_attribute (dw_die_ref die, tree type,
20699 dw_die_ref context_die)
20700 {
20701 tree dtype;
20702 dw_die_ref dtype_die;
20703
20704 if (!lang_hooks.types.descriptive_type)
20705 return;
20706
20707 dtype = lang_hooks.types.descriptive_type (type);
20708 if (!dtype)
20709 return;
20710
20711 dtype_die = lookup_type_die (dtype);
20712 if (!dtype_die)
20713 {
20714 gen_type_die (dtype, context_die);
20715 dtype_die = lookup_type_die (dtype);
20716 gcc_assert (dtype_die);
20717 }
20718
20719 add_AT_die_ref (die, DW_AT_GNAT_descriptive_type, dtype_die);
20720 }
20721
20722 /* Retrieve the comp_dir string suitable for use with DW_AT_comp_dir. */
20723
20724 static const char *
20725 comp_dir_string (void)
20726 {
20727 const char *wd;
20728 char *wd_plus_sep = NULL;
20729 static const char *cached_wd = NULL;
20730
20731 if (cached_wd != NULL)
20732 return cached_wd;
20733
20734 wd = get_src_pwd ();
20735 if (wd == NULL)
20736 return NULL;
20737
20738 if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR)
20739 {
20740 size_t wdlen = strlen (wd);
20741 wd_plus_sep = XNEWVEC (char, wdlen + 2);
20742 strcpy (wd_plus_sep, wd);
20743 wd_plus_sep [wdlen] = DIR_SEPARATOR;
20744 wd_plus_sep [wdlen + 1] = 0;
20745 wd = wd_plus_sep;
20746 }
20747
20748 cached_wd = remap_debug_filename (wd);
20749
20750 /* remap_debug_filename can just pass through wd or return a new gc string.
20751 These two types can't be both stored in a GTY(())-tagged string, but since
20752 the cached value lives forever just copy it if needed. */
20753 if (cached_wd != wd)
20754 {
20755 cached_wd = xstrdup (cached_wd);
20756 if (DWARF2_DIR_SHOULD_END_WITH_SEPARATOR && wd_plus_sep != NULL)
20757 free (wd_plus_sep);
20758 }
20759
20760 return cached_wd;
20761 }
20762
20763 /* Generate a DW_AT_comp_dir attribute for DIE. */
20764
20765 static void
20766 add_comp_dir_attribute (dw_die_ref die)
20767 {
20768 const char * wd = comp_dir_string ();
20769 if (wd != NULL)
20770 add_AT_string (die, DW_AT_comp_dir, wd);
20771 }
20772
20773 /* Given a tree node VALUE describing a scalar attribute ATTR (i.e. a bound, a
20774 pointer computation, ...), output a representation for that bound according
20775 to the accepted FORMS (see enum dw_scalar_form) and add it to DIE. See
20776 loc_list_from_tree for the meaning of CONTEXT. */
20777
20778 static void
20779 add_scalar_info (dw_die_ref die, enum dwarf_attribute attr, tree value,
20780 int forms, struct loc_descr_context *context)
20781 {
20782 dw_die_ref context_die, decl_die = NULL;
20783 dw_loc_list_ref list;
20784 bool strip_conversions = true;
20785 bool placeholder_seen = false;
20786
20787 while (strip_conversions)
20788 switch (TREE_CODE (value))
20789 {
20790 case ERROR_MARK:
20791 case SAVE_EXPR:
20792 return;
20793
20794 CASE_CONVERT:
20795 case VIEW_CONVERT_EXPR:
20796 value = TREE_OPERAND (value, 0);
20797 break;
20798
20799 default:
20800 strip_conversions = false;
20801 break;
20802 }
20803
20804 /* If possible and permitted, output the attribute as a constant. */
20805 if ((forms & dw_scalar_form_constant) != 0
20806 && TREE_CODE (value) == INTEGER_CST)
20807 {
20808 unsigned int prec = simple_type_size_in_bits (TREE_TYPE (value));
20809
20810 /* If HOST_WIDE_INT is big enough then represent the bound as
20811 a constant value. We need to choose a form based on
20812 whether the type is signed or unsigned. We cannot just
20813 call add_AT_unsigned if the value itself is positive
20814 (add_AT_unsigned might add the unsigned value encoded as
20815 DW_FORM_data[1248]). Some DWARF consumers will lookup the
20816 bounds type and then sign extend any unsigned values found
20817 for signed types. This is needed only for
20818 DW_AT_{lower,upper}_bound, since for most other attributes,
20819 consumers will treat DW_FORM_data[1248] as unsigned values,
20820 regardless of the underlying type. */
20821 if (prec <= HOST_BITS_PER_WIDE_INT
20822 || tree_fits_uhwi_p (value))
20823 {
20824 if (TYPE_UNSIGNED (TREE_TYPE (value)))
20825 add_AT_unsigned (die, attr, TREE_INT_CST_LOW (value));
20826 else
20827 add_AT_int (die, attr, TREE_INT_CST_LOW (value));
20828 }
20829 else
20830 /* Otherwise represent the bound as an unsigned value with
20831 the precision of its type. The precision and signedness
20832 of the type will be necessary to re-interpret it
20833 unambiguously. */
20834 add_AT_wide (die, attr, wi::to_wide (value));
20835 return;
20836 }
20837
20838 /* Otherwise, if it's possible and permitted too, output a reference to
20839 another DIE. */
20840 if ((forms & dw_scalar_form_reference) != 0)
20841 {
20842 tree decl = NULL_TREE;
20843
20844 /* Some type attributes reference an outer type. For instance, the upper
20845 bound of an array may reference an embedding record (this happens in
20846 Ada). */
20847 if (TREE_CODE (value) == COMPONENT_REF
20848 && TREE_CODE (TREE_OPERAND (value, 0)) == PLACEHOLDER_EXPR
20849 && TREE_CODE (TREE_OPERAND (value, 1)) == FIELD_DECL)
20850 decl = TREE_OPERAND (value, 1);
20851
20852 else if (VAR_P (value)
20853 || TREE_CODE (value) == PARM_DECL
20854 || TREE_CODE (value) == RESULT_DECL)
20855 decl = value;
20856
20857 if (decl != NULL_TREE)
20858 {
20859 decl_die = lookup_decl_die (decl);
20860
20861 /* ??? Can this happen, or should the variable have been bound
20862 first? Probably it can, since I imagine that we try to create
20863 the types of parameters in the order in which they exist in
20864 the list, and won't have created a forward reference to a
20865 later parameter. */
20866 if (decl_die != NULL)
20867 {
20868 if (get_AT (decl_die, DW_AT_location)
20869 || get_AT (decl_die, DW_AT_data_member_location)
20870 || get_AT (decl_die, DW_AT_const_value))
20871 {
20872 add_AT_die_ref (die, attr, decl_die);
20873 return;
20874 }
20875 }
20876 }
20877 }
20878
20879 /* Last chance: try to create a stack operation procedure to evaluate the
20880 value. Do nothing if even that is not possible or permitted. */
20881 if ((forms & dw_scalar_form_exprloc) == 0)
20882 return;
20883
20884 list = loc_list_from_tree (value, 2, context);
20885 if (context && context->placeholder_arg)
20886 {
20887 placeholder_seen = context->placeholder_seen;
20888 context->placeholder_seen = false;
20889 }
20890 if (list == NULL || single_element_loc_list_p (list))
20891 {
20892 /* If this attribute is not a reference nor constant, it is
20893 a DWARF expression rather than location description. For that
20894 loc_list_from_tree (value, 0, &context) is needed. */
20895 dw_loc_list_ref list2 = loc_list_from_tree (value, 0, context);
20896 if (list2 && single_element_loc_list_p (list2))
20897 {
20898 if (placeholder_seen)
20899 {
20900 struct dwarf_procedure_info dpi;
20901 dpi.fndecl = NULL_TREE;
20902 dpi.args_count = 1;
20903 if (!resolve_args_picking (list2->expr, 1, &dpi))
20904 return;
20905 }
20906 add_AT_loc (die, attr, list2->expr);
20907 return;
20908 }
20909 }
20910
20911 /* If that failed to give a single element location list, fall back to
20912 outputting this as a reference... still if permitted. */
20913 if (list == NULL
20914 || (forms & dw_scalar_form_reference) == 0
20915 || placeholder_seen)
20916 return;
20917
20918 if (!decl_die)
20919 {
20920 if (current_function_decl == 0)
20921 context_die = comp_unit_die ();
20922 else
20923 context_die = lookup_decl_die (current_function_decl);
20924
20925 decl_die = new_die (DW_TAG_variable, context_die, value);
20926 add_AT_flag (decl_die, DW_AT_artificial, 1);
20927 add_type_attribute (decl_die, TREE_TYPE (value), TYPE_QUAL_CONST, false,
20928 context_die);
20929 }
20930
20931 add_AT_location_description (decl_die, DW_AT_location, list);
20932 add_AT_die_ref (die, attr, decl_die);
20933 }
20934
20935 /* Return the default for DW_AT_lower_bound, or -1 if there is not any
20936 default. */
20937
20938 static int
20939 lower_bound_default (void)
20940 {
20941 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
20942 {
20943 case DW_LANG_C:
20944 case DW_LANG_C89:
20945 case DW_LANG_C99:
20946 case DW_LANG_C11:
20947 case DW_LANG_C_plus_plus:
20948 case DW_LANG_C_plus_plus_11:
20949 case DW_LANG_C_plus_plus_14:
20950 case DW_LANG_ObjC:
20951 case DW_LANG_ObjC_plus_plus:
20952 return 0;
20953 case DW_LANG_Fortran77:
20954 case DW_LANG_Fortran90:
20955 case DW_LANG_Fortran95:
20956 case DW_LANG_Fortran03:
20957 case DW_LANG_Fortran08:
20958 return 1;
20959 case DW_LANG_UPC:
20960 case DW_LANG_D:
20961 case DW_LANG_Python:
20962 return dwarf_version >= 4 ? 0 : -1;
20963 case DW_LANG_Ada95:
20964 case DW_LANG_Ada83:
20965 case DW_LANG_Cobol74:
20966 case DW_LANG_Cobol85:
20967 case DW_LANG_Modula2:
20968 case DW_LANG_PLI:
20969 return dwarf_version >= 4 ? 1 : -1;
20970 default:
20971 return -1;
20972 }
20973 }
20974
20975 /* Given a tree node describing an array bound (either lower or upper) output
20976 a representation for that bound. */
20977
20978 static void
20979 add_bound_info (dw_die_ref subrange_die, enum dwarf_attribute bound_attr,
20980 tree bound, struct loc_descr_context *context)
20981 {
20982 int dflt;
20983
20984 while (1)
20985 switch (TREE_CODE (bound))
20986 {
20987 /* Strip all conversions. */
20988 CASE_CONVERT:
20989 case VIEW_CONVERT_EXPR:
20990 bound = TREE_OPERAND (bound, 0);
20991 break;
20992
20993 /* All fixed-bounds are represented by INTEGER_CST nodes. Lower bounds
20994 are even omitted when they are the default. */
20995 case INTEGER_CST:
20996 /* If the value for this bound is the default one, we can even omit the
20997 attribute. */
20998 if (bound_attr == DW_AT_lower_bound
20999 && tree_fits_shwi_p (bound)
21000 && (dflt = lower_bound_default ()) != -1
21001 && tree_to_shwi (bound) == dflt)
21002 return;
21003
21004 /* FALLTHRU */
21005
21006 default:
21007 /* Because of the complex interaction there can be with other GNAT
21008 encodings, GDB isn't ready yet to handle proper DWARF description
21009 for self-referencial subrange bounds: let GNAT encodings do the
21010 magic in such a case. */
21011 if (is_ada ()
21012 && gnat_encodings != DWARF_GNAT_ENCODINGS_MINIMAL
21013 && contains_placeholder_p (bound))
21014 return;
21015
21016 add_scalar_info (subrange_die, bound_attr, bound,
21017 dw_scalar_form_constant
21018 | dw_scalar_form_exprloc
21019 | dw_scalar_form_reference,
21020 context);
21021 return;
21022 }
21023 }
21024
21025 /* Add subscript info to TYPE_DIE, describing an array TYPE, collapsing
21026 possibly nested array subscripts in a flat sequence if COLLAPSE_P is true.
21027 Note that the block of subscript information for an array type also
21028 includes information about the element type of the given array type.
21029
21030 This function reuses previously set type and bound information if
21031 available. */
21032
21033 static void
21034 add_subscript_info (dw_die_ref type_die, tree type, bool collapse_p)
21035 {
21036 unsigned dimension_number;
21037 tree lower, upper;
21038 dw_die_ref child = type_die->die_child;
21039
21040 for (dimension_number = 0;
21041 TREE_CODE (type) == ARRAY_TYPE && (dimension_number == 0 || collapse_p);
21042 type = TREE_TYPE (type), dimension_number++)
21043 {
21044 tree domain = TYPE_DOMAIN (type);
21045
21046 if (TYPE_STRING_FLAG (type) && is_fortran () && dimension_number > 0)
21047 break;
21048
21049 /* Arrays come in three flavors: Unspecified bounds, fixed bounds,
21050 and (in GNU C only) variable bounds. Handle all three forms
21051 here. */
21052
21053 /* Find and reuse a previously generated DW_TAG_subrange_type if
21054 available.
21055
21056 For multi-dimensional arrays, as we iterate through the
21057 various dimensions in the enclosing for loop above, we also
21058 iterate through the DIE children and pick at each
21059 DW_TAG_subrange_type previously generated (if available).
21060 Each child DW_TAG_subrange_type DIE describes the range of
21061 the current dimension. At this point we should have as many
21062 DW_TAG_subrange_type's as we have dimensions in the
21063 array. */
21064 dw_die_ref subrange_die = NULL;
21065 if (child)
21066 while (1)
21067 {
21068 child = child->die_sib;
21069 if (child->die_tag == DW_TAG_subrange_type)
21070 subrange_die = child;
21071 if (child == type_die->die_child)
21072 {
21073 /* If we wrapped around, stop looking next time. */
21074 child = NULL;
21075 break;
21076 }
21077 if (child->die_tag == DW_TAG_subrange_type)
21078 break;
21079 }
21080 if (!subrange_die)
21081 subrange_die = new_die (DW_TAG_subrange_type, type_die, NULL);
21082
21083 if (domain)
21084 {
21085 /* We have an array type with specified bounds. */
21086 lower = TYPE_MIN_VALUE (domain);
21087 upper = TYPE_MAX_VALUE (domain);
21088
21089 /* Define the index type. */
21090 if (TREE_TYPE (domain)
21091 && !get_AT (subrange_die, DW_AT_type))
21092 {
21093 /* ??? This is probably an Ada unnamed subrange type. Ignore the
21094 TREE_TYPE field. We can't emit debug info for this
21095 because it is an unnamed integral type. */
21096 if (TREE_CODE (domain) == INTEGER_TYPE
21097 && TYPE_NAME (domain) == NULL_TREE
21098 && TREE_CODE (TREE_TYPE (domain)) == INTEGER_TYPE
21099 && TYPE_NAME (TREE_TYPE (domain)) == NULL_TREE)
21100 ;
21101 else
21102 add_type_attribute (subrange_die, TREE_TYPE (domain),
21103 TYPE_UNQUALIFIED, false, type_die);
21104 }
21105
21106 /* ??? If upper is NULL, the array has unspecified length,
21107 but it does have a lower bound. This happens with Fortran
21108 dimension arr(N:*)
21109 Since the debugger is definitely going to need to know N
21110 to produce useful results, go ahead and output the lower
21111 bound solo, and hope the debugger can cope. */
21112
21113 if (!get_AT (subrange_die, DW_AT_lower_bound))
21114 add_bound_info (subrange_die, DW_AT_lower_bound, lower, NULL);
21115 if (!get_AT (subrange_die, DW_AT_upper_bound)
21116 && !get_AT (subrange_die, DW_AT_count))
21117 {
21118 if (upper)
21119 add_bound_info (subrange_die, DW_AT_upper_bound, upper, NULL);
21120 else if ((is_c () || is_cxx ()) && COMPLETE_TYPE_P (type))
21121 /* Zero-length array. */
21122 add_bound_info (subrange_die, DW_AT_count,
21123 build_int_cst (TREE_TYPE (lower), 0), NULL);
21124 }
21125 }
21126
21127 /* Otherwise we have an array type with an unspecified length. The
21128 DWARF-2 spec does not say how to handle this; let's just leave out the
21129 bounds. */
21130 }
21131 }
21132
21133 /* Add a DW_AT_byte_size attribute to DIE with TREE_NODE's size. */
21134
21135 static void
21136 add_byte_size_attribute (dw_die_ref die, tree tree_node)
21137 {
21138 dw_die_ref decl_die;
21139 HOST_WIDE_INT size;
21140 dw_loc_descr_ref size_expr = NULL;
21141
21142 switch (TREE_CODE (tree_node))
21143 {
21144 case ERROR_MARK:
21145 size = 0;
21146 break;
21147 case ENUMERAL_TYPE:
21148 case RECORD_TYPE:
21149 case UNION_TYPE:
21150 case QUAL_UNION_TYPE:
21151 if (TREE_CODE (TYPE_SIZE_UNIT (tree_node)) == VAR_DECL
21152 && (decl_die = lookup_decl_die (TYPE_SIZE_UNIT (tree_node))))
21153 {
21154 add_AT_die_ref (die, DW_AT_byte_size, decl_die);
21155 return;
21156 }
21157 size_expr = type_byte_size (tree_node, &size);
21158 break;
21159 case FIELD_DECL:
21160 /* For a data member of a struct or union, the DW_AT_byte_size is
21161 generally given as the number of bytes normally allocated for an
21162 object of the *declared* type of the member itself. This is true
21163 even for bit-fields. */
21164 size = int_size_in_bytes (field_type (tree_node));
21165 break;
21166 default:
21167 gcc_unreachable ();
21168 }
21169
21170 /* Support for dynamically-sized objects was introduced by DWARFv3.
21171 At the moment, GDB does not handle variable byte sizes very well,
21172 though. */
21173 if ((dwarf_version >= 3 || !dwarf_strict)
21174 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL
21175 && size_expr != NULL)
21176 add_AT_loc (die, DW_AT_byte_size, size_expr);
21177
21178 /* Note that `size' might be -1 when we get to this point. If it is, that
21179 indicates that the byte size of the entity in question is variable and
21180 that we could not generate a DWARF expression that computes it. */
21181 if (size >= 0)
21182 add_AT_unsigned (die, DW_AT_byte_size, size);
21183 }
21184
21185 /* Add a DW_AT_alignment attribute to DIE with TREE_NODE's non-default
21186 alignment. */
21187
21188 static void
21189 add_alignment_attribute (dw_die_ref die, tree tree_node)
21190 {
21191 if (dwarf_version < 5 && dwarf_strict)
21192 return;
21193
21194 unsigned align;
21195
21196 if (DECL_P (tree_node))
21197 {
21198 if (!DECL_USER_ALIGN (tree_node))
21199 return;
21200
21201 align = DECL_ALIGN_UNIT (tree_node);
21202 }
21203 else if (TYPE_P (tree_node))
21204 {
21205 if (!TYPE_USER_ALIGN (tree_node))
21206 return;
21207
21208 align = TYPE_ALIGN_UNIT (tree_node);
21209 }
21210 else
21211 gcc_unreachable ();
21212
21213 add_AT_unsigned (die, DW_AT_alignment, align);
21214 }
21215
21216 /* For a FIELD_DECL node which represents a bit-field, output an attribute
21217 which specifies the distance in bits from the highest order bit of the
21218 "containing object" for the bit-field to the highest order bit of the
21219 bit-field itself.
21220
21221 For any given bit-field, the "containing object" is a hypothetical object
21222 (of some integral or enum type) within which the given bit-field lives. The
21223 type of this hypothetical "containing object" is always the same as the
21224 declared type of the individual bit-field itself. The determination of the
21225 exact location of the "containing object" for a bit-field is rather
21226 complicated. It's handled by the `field_byte_offset' function (above).
21227
21228 CTX is required: see the comment for VLR_CONTEXT.
21229
21230 Note that it is the size (in bytes) of the hypothetical "containing object"
21231 which will be given in the DW_AT_byte_size attribute for this bit-field.
21232 (See `byte_size_attribute' above). */
21233
21234 static inline void
21235 add_bit_offset_attribute (dw_die_ref die, tree decl, struct vlr_context *ctx)
21236 {
21237 HOST_WIDE_INT object_offset_in_bytes;
21238 tree original_type = DECL_BIT_FIELD_TYPE (decl);
21239 HOST_WIDE_INT bitpos_int;
21240 HOST_WIDE_INT highest_order_object_bit_offset;
21241 HOST_WIDE_INT highest_order_field_bit_offset;
21242 HOST_WIDE_INT bit_offset;
21243
21244 field_byte_offset (decl, ctx, &object_offset_in_bytes);
21245
21246 /* Must be a field and a bit field. */
21247 gcc_assert (original_type && TREE_CODE (decl) == FIELD_DECL);
21248
21249 /* We can't yet handle bit-fields whose offsets are variable, so if we
21250 encounter such things, just return without generating any attribute
21251 whatsoever. Likewise for variable or too large size. */
21252 if (! tree_fits_shwi_p (bit_position (decl))
21253 || ! tree_fits_uhwi_p (DECL_SIZE (decl)))
21254 return;
21255
21256 bitpos_int = int_bit_position (decl);
21257
21258 /* Note that the bit offset is always the distance (in bits) from the
21259 highest-order bit of the "containing object" to the highest-order bit of
21260 the bit-field itself. Since the "high-order end" of any object or field
21261 is different on big-endian and little-endian machines, the computation
21262 below must take account of these differences. */
21263 highest_order_object_bit_offset = object_offset_in_bytes * BITS_PER_UNIT;
21264 highest_order_field_bit_offset = bitpos_int;
21265
21266 if (! BYTES_BIG_ENDIAN)
21267 {
21268 highest_order_field_bit_offset += tree_to_shwi (DECL_SIZE (decl));
21269 highest_order_object_bit_offset +=
21270 simple_type_size_in_bits (original_type);
21271 }
21272
21273 bit_offset
21274 = (! BYTES_BIG_ENDIAN
21275 ? highest_order_object_bit_offset - highest_order_field_bit_offset
21276 : highest_order_field_bit_offset - highest_order_object_bit_offset);
21277
21278 if (bit_offset < 0)
21279 add_AT_int (die, DW_AT_bit_offset, bit_offset);
21280 else
21281 add_AT_unsigned (die, DW_AT_bit_offset, (unsigned HOST_WIDE_INT) bit_offset);
21282 }
21283
21284 /* For a FIELD_DECL node which represents a bit field, output an attribute
21285 which specifies the length in bits of the given field. */
21286
21287 static inline void
21288 add_bit_size_attribute (dw_die_ref die, tree decl)
21289 {
21290 /* Must be a field and a bit field. */
21291 gcc_assert (TREE_CODE (decl) == FIELD_DECL
21292 && DECL_BIT_FIELD_TYPE (decl));
21293
21294 if (tree_fits_uhwi_p (DECL_SIZE (decl)))
21295 add_AT_unsigned (die, DW_AT_bit_size, tree_to_uhwi (DECL_SIZE (decl)));
21296 }
21297
21298 /* If the compiled language is ANSI C, then add a 'prototyped'
21299 attribute, if arg types are given for the parameters of a function. */
21300
21301 static inline void
21302 add_prototyped_attribute (dw_die_ref die, tree func_type)
21303 {
21304 switch (get_AT_unsigned (comp_unit_die (), DW_AT_language))
21305 {
21306 case DW_LANG_C:
21307 case DW_LANG_C89:
21308 case DW_LANG_C99:
21309 case DW_LANG_C11:
21310 case DW_LANG_ObjC:
21311 if (prototype_p (func_type))
21312 add_AT_flag (die, DW_AT_prototyped, 1);
21313 break;
21314 default:
21315 break;
21316 }
21317 }
21318
21319 /* Add an 'abstract_origin' attribute below a given DIE. The DIE is found
21320 by looking in the type declaration, the object declaration equate table or
21321 the block mapping. */
21322
21323 static inline void
21324 add_abstract_origin_attribute (dw_die_ref die, tree origin)
21325 {
21326 dw_die_ref origin_die = NULL;
21327
21328 /* For late LTO debug output we want to refer directly to the abstract
21329 DIE in the early debug rather to the possibly existing concrete
21330 instance and avoid creating that just for this purpose. */
21331 sym_off_pair *desc;
21332 if (in_lto_p
21333 && external_die_map
21334 && (desc = external_die_map->get (origin)))
21335 {
21336 add_AT_external_die_ref (die, DW_AT_abstract_origin,
21337 desc->sym, desc->off);
21338 return;
21339 }
21340
21341 if (DECL_P (origin))
21342 origin_die = lookup_decl_die (origin);
21343 else if (TYPE_P (origin))
21344 origin_die = lookup_type_die (origin);
21345 else if (TREE_CODE (origin) == BLOCK)
21346 origin_die = lookup_block_die (origin);
21347
21348 /* XXX: Functions that are never lowered don't always have correct block
21349 trees (in the case of java, they simply have no block tree, in some other
21350 languages). For these functions, there is nothing we can really do to
21351 output correct debug info for inlined functions in all cases. Rather
21352 than die, we'll just produce deficient debug info now, in that we will
21353 have variables without a proper abstract origin. In the future, when all
21354 functions are lowered, we should re-add a gcc_assert (origin_die)
21355 here. */
21356
21357 if (origin_die)
21358 add_AT_die_ref (die, DW_AT_abstract_origin, origin_die);
21359 }
21360
21361 /* We do not currently support the pure_virtual attribute. */
21362
21363 static inline void
21364 add_pure_or_virtual_attribute (dw_die_ref die, tree func_decl)
21365 {
21366 if (DECL_VINDEX (func_decl))
21367 {
21368 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
21369
21370 if (tree_fits_shwi_p (DECL_VINDEX (func_decl)))
21371 add_AT_loc (die, DW_AT_vtable_elem_location,
21372 new_loc_descr (DW_OP_constu,
21373 tree_to_shwi (DECL_VINDEX (func_decl)),
21374 0));
21375
21376 /* GNU extension: Record what type this method came from originally. */
21377 if (debug_info_level > DINFO_LEVEL_TERSE
21378 && DECL_CONTEXT (func_decl))
21379 add_AT_die_ref (die, DW_AT_containing_type,
21380 lookup_type_die (DECL_CONTEXT (func_decl)));
21381 }
21382 }
21383 \f
21384 /* Add a DW_AT_linkage_name or DW_AT_MIPS_linkage_name attribute for the
21385 given decl. This used to be a vendor extension until after DWARF 4
21386 standardized it. */
21387
21388 static void
21389 add_linkage_attr (dw_die_ref die, tree decl)
21390 {
21391 const char *name = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (decl));
21392
21393 /* Mimic what assemble_name_raw does with a leading '*'. */
21394 if (name[0] == '*')
21395 name = &name[1];
21396
21397 if (dwarf_version >= 4)
21398 add_AT_string (die, DW_AT_linkage_name, name);
21399 else
21400 add_AT_string (die, DW_AT_MIPS_linkage_name, name);
21401 }
21402
21403 /* Add source coordinate attributes for the given decl. */
21404
21405 static void
21406 add_src_coords_attributes (dw_die_ref die, tree decl)
21407 {
21408 expanded_location s;
21409
21410 if (LOCATION_LOCUS (DECL_SOURCE_LOCATION (decl)) == UNKNOWN_LOCATION)
21411 return;
21412 s = expand_location (DECL_SOURCE_LOCATION (decl));
21413 add_AT_file (die, DW_AT_decl_file, lookup_filename (s.file));
21414 add_AT_unsigned (die, DW_AT_decl_line, s.line);
21415 if (debug_column_info && s.column)
21416 add_AT_unsigned (die, DW_AT_decl_column, s.column);
21417 }
21418
21419 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl. */
21420
21421 static void
21422 add_linkage_name_raw (dw_die_ref die, tree decl)
21423 {
21424 /* Defer until we have an assembler name set. */
21425 if (!DECL_ASSEMBLER_NAME_SET_P (decl))
21426 {
21427 limbo_die_node *asm_name;
21428
21429 asm_name = ggc_cleared_alloc<limbo_die_node> ();
21430 asm_name->die = die;
21431 asm_name->created_for = decl;
21432 asm_name->next = deferred_asm_name;
21433 deferred_asm_name = asm_name;
21434 }
21435 else if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl))
21436 add_linkage_attr (die, decl);
21437 }
21438
21439 /* Add DW_AT_{,MIPS_}linkage_name attribute for the given decl if desired. */
21440
21441 static void
21442 add_linkage_name (dw_die_ref die, tree decl)
21443 {
21444 if (debug_info_level > DINFO_LEVEL_NONE
21445 && VAR_OR_FUNCTION_DECL_P (decl)
21446 && TREE_PUBLIC (decl)
21447 && !(VAR_P (decl) && DECL_REGISTER (decl))
21448 && die->die_tag != DW_TAG_member)
21449 add_linkage_name_raw (die, decl);
21450 }
21451
21452 /* Add a DW_AT_name attribute and source coordinate attribute for the
21453 given decl, but only if it actually has a name. */
21454
21455 static void
21456 add_name_and_src_coords_attributes (dw_die_ref die, tree decl,
21457 bool no_linkage_name)
21458 {
21459 tree decl_name;
21460
21461 decl_name = DECL_NAME (decl);
21462 if (decl_name != NULL && IDENTIFIER_POINTER (decl_name) != NULL)
21463 {
21464 const char *name = dwarf2_name (decl, 0);
21465 if (name)
21466 add_name_attribute (die, name);
21467 else
21468 add_desc_attribute (die, decl);
21469
21470 if (! DECL_ARTIFICIAL (decl))
21471 add_src_coords_attributes (die, decl);
21472
21473 if (!no_linkage_name)
21474 add_linkage_name (die, decl);
21475 }
21476 else
21477 add_desc_attribute (die, decl);
21478
21479 #ifdef VMS_DEBUGGING_INFO
21480 /* Get the function's name, as described by its RTL. This may be different
21481 from the DECL_NAME name used in the source file. */
21482 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_ASM_WRITTEN (decl))
21483 {
21484 add_AT_addr (die, DW_AT_VMS_rtnbeg_pd_address,
21485 XEXP (DECL_RTL (decl), 0), false);
21486 vec_safe_push (used_rtx_array, XEXP (DECL_RTL (decl), 0));
21487 }
21488 #endif /* VMS_DEBUGGING_INFO */
21489 }
21490
21491 /* Add VALUE as a DW_AT_discr_value attribute to DIE. */
21492
21493 static void
21494 add_discr_value (dw_die_ref die, dw_discr_value *value)
21495 {
21496 dw_attr_node attr;
21497
21498 attr.dw_attr = DW_AT_discr_value;
21499 attr.dw_attr_val.val_class = dw_val_class_discr_value;
21500 attr.dw_attr_val.val_entry = NULL;
21501 attr.dw_attr_val.v.val_discr_value.pos = value->pos;
21502 if (value->pos)
21503 attr.dw_attr_val.v.val_discr_value.v.uval = value->v.uval;
21504 else
21505 attr.dw_attr_val.v.val_discr_value.v.sval = value->v.sval;
21506 add_dwarf_attr (die, &attr);
21507 }
21508
21509 /* Add DISCR_LIST as a DW_AT_discr_list to DIE. */
21510
21511 static void
21512 add_discr_list (dw_die_ref die, dw_discr_list_ref discr_list)
21513 {
21514 dw_attr_node attr;
21515
21516 attr.dw_attr = DW_AT_discr_list;
21517 attr.dw_attr_val.val_class = dw_val_class_discr_list;
21518 attr.dw_attr_val.val_entry = NULL;
21519 attr.dw_attr_val.v.val_discr_list = discr_list;
21520 add_dwarf_attr (die, &attr);
21521 }
21522
21523 static inline dw_discr_list_ref
21524 AT_discr_list (dw_attr_node *attr)
21525 {
21526 return attr->dw_attr_val.v.val_discr_list;
21527 }
21528
21529 #ifdef VMS_DEBUGGING_INFO
21530 /* Output the debug main pointer die for VMS */
21531
21532 void
21533 dwarf2out_vms_debug_main_pointer (void)
21534 {
21535 char label[MAX_ARTIFICIAL_LABEL_BYTES];
21536 dw_die_ref die;
21537
21538 /* Allocate the VMS debug main subprogram die. */
21539 die = new_die_raw (DW_TAG_subprogram);
21540 add_name_attribute (die, VMS_DEBUG_MAIN_POINTER);
21541 ASM_GENERATE_INTERNAL_LABEL (label, PROLOGUE_END_LABEL,
21542 current_function_funcdef_no);
21543 add_AT_lbl_id (die, DW_AT_entry_pc, label);
21544
21545 /* Make it the first child of comp_unit_die (). */
21546 die->die_parent = comp_unit_die ();
21547 if (comp_unit_die ()->die_child)
21548 {
21549 die->die_sib = comp_unit_die ()->die_child->die_sib;
21550 comp_unit_die ()->die_child->die_sib = die;
21551 }
21552 else
21553 {
21554 die->die_sib = die;
21555 comp_unit_die ()->die_child = die;
21556 }
21557 }
21558 #endif /* VMS_DEBUGGING_INFO */
21559
21560 /* walk_tree helper function for uses_local_type, below. */
21561
21562 static tree
21563 uses_local_type_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
21564 {
21565 if (!TYPE_P (*tp))
21566 *walk_subtrees = 0;
21567 else
21568 {
21569 tree name = TYPE_NAME (*tp);
21570 if (name && DECL_P (name) && decl_function_context (name))
21571 return *tp;
21572 }
21573 return NULL_TREE;
21574 }
21575
21576 /* If TYPE involves a function-local type (including a local typedef to a
21577 non-local type), returns that type; otherwise returns NULL_TREE. */
21578
21579 static tree
21580 uses_local_type (tree type)
21581 {
21582 tree used = walk_tree_without_duplicates (&type, uses_local_type_r, NULL);
21583 return used;
21584 }
21585
21586 /* Return the DIE for the scope that immediately contains this type.
21587 Non-named types that do not involve a function-local type get global
21588 scope. Named types nested in namespaces or other types get their
21589 containing scope. All other types (i.e. function-local named types) get
21590 the current active scope. */
21591
21592 static dw_die_ref
21593 scope_die_for (tree t, dw_die_ref context_die)
21594 {
21595 dw_die_ref scope_die = NULL;
21596 tree containing_scope;
21597
21598 /* Non-types always go in the current scope. */
21599 gcc_assert (TYPE_P (t));
21600
21601 /* Use the scope of the typedef, rather than the scope of the type
21602 it refers to. */
21603 if (TYPE_NAME (t) && DECL_P (TYPE_NAME (t)))
21604 containing_scope = DECL_CONTEXT (TYPE_NAME (t));
21605 else
21606 containing_scope = TYPE_CONTEXT (t);
21607
21608 /* Use the containing namespace if there is one. */
21609 if (containing_scope && TREE_CODE (containing_scope) == NAMESPACE_DECL)
21610 {
21611 if (context_die == lookup_decl_die (containing_scope))
21612 /* OK */;
21613 else if (debug_info_level > DINFO_LEVEL_TERSE)
21614 context_die = get_context_die (containing_scope);
21615 else
21616 containing_scope = NULL_TREE;
21617 }
21618
21619 /* Ignore function type "scopes" from the C frontend. They mean that
21620 a tagged type is local to a parmlist of a function declarator, but
21621 that isn't useful to DWARF. */
21622 if (containing_scope && TREE_CODE (containing_scope) == FUNCTION_TYPE)
21623 containing_scope = NULL_TREE;
21624
21625 if (SCOPE_FILE_SCOPE_P (containing_scope))
21626 {
21627 /* If T uses a local type keep it local as well, to avoid references
21628 to function-local DIEs from outside the function. */
21629 if (current_function_decl && uses_local_type (t))
21630 scope_die = context_die;
21631 else
21632 scope_die = comp_unit_die ();
21633 }
21634 else if (TYPE_P (containing_scope))
21635 {
21636 /* For types, we can just look up the appropriate DIE. */
21637 if (debug_info_level > DINFO_LEVEL_TERSE)
21638 scope_die = get_context_die (containing_scope);
21639 else
21640 {
21641 scope_die = lookup_type_die_strip_naming_typedef (containing_scope);
21642 if (scope_die == NULL)
21643 scope_die = comp_unit_die ();
21644 }
21645 }
21646 else
21647 scope_die = context_die;
21648
21649 return scope_die;
21650 }
21651
21652 /* Returns nonzero if CONTEXT_DIE is internal to a function. */
21653
21654 static inline int
21655 local_scope_p (dw_die_ref context_die)
21656 {
21657 for (; context_die; context_die = context_die->die_parent)
21658 if (context_die->die_tag == DW_TAG_inlined_subroutine
21659 || context_die->die_tag == DW_TAG_subprogram)
21660 return 1;
21661
21662 return 0;
21663 }
21664
21665 /* Returns nonzero if CONTEXT_DIE is a class. */
21666
21667 static inline int
21668 class_scope_p (dw_die_ref context_die)
21669 {
21670 return (context_die
21671 && (context_die->die_tag == DW_TAG_structure_type
21672 || context_die->die_tag == DW_TAG_class_type
21673 || context_die->die_tag == DW_TAG_interface_type
21674 || context_die->die_tag == DW_TAG_union_type));
21675 }
21676
21677 /* Returns nonzero if CONTEXT_DIE is a class or namespace, for deciding
21678 whether or not to treat a DIE in this context as a declaration. */
21679
21680 static inline int
21681 class_or_namespace_scope_p (dw_die_ref context_die)
21682 {
21683 return (class_scope_p (context_die)
21684 || (context_die && context_die->die_tag == DW_TAG_namespace));
21685 }
21686
21687 /* Many forms of DIEs require a "type description" attribute. This
21688 routine locates the proper "type descriptor" die for the type given
21689 by 'type' plus any additional qualifiers given by 'cv_quals', and
21690 adds a DW_AT_type attribute below the given die. */
21691
21692 static void
21693 add_type_attribute (dw_die_ref object_die, tree type, int cv_quals,
21694 bool reverse, dw_die_ref context_die)
21695 {
21696 enum tree_code code = TREE_CODE (type);
21697 dw_die_ref type_die = NULL;
21698
21699 /* ??? If this type is an unnamed subrange type of an integral, floating-point
21700 or fixed-point type, use the inner type. This is because we have no
21701 support for unnamed types in base_type_die. This can happen if this is
21702 an Ada subrange type. Correct solution is emit a subrange type die. */
21703 if ((code == INTEGER_TYPE || code == REAL_TYPE || code == FIXED_POINT_TYPE)
21704 && TREE_TYPE (type) != 0 && TYPE_NAME (type) == 0)
21705 type = TREE_TYPE (type), code = TREE_CODE (type);
21706
21707 if (code == ERROR_MARK
21708 /* Handle a special case. For functions whose return type is void, we
21709 generate *no* type attribute. (Note that no object may have type
21710 `void', so this only applies to function return types). */
21711 || code == VOID_TYPE)
21712 return;
21713
21714 type_die = modified_type_die (type,
21715 cv_quals | TYPE_QUALS (type),
21716 reverse,
21717 context_die);
21718
21719 if (type_die != NULL)
21720 add_AT_die_ref (object_die, DW_AT_type, type_die);
21721 }
21722
21723 /* Given an object die, add the calling convention attribute for the
21724 function call type. */
21725 static void
21726 add_calling_convention_attribute (dw_die_ref subr_die, tree decl)
21727 {
21728 enum dwarf_calling_convention value = DW_CC_normal;
21729
21730 value = ((enum dwarf_calling_convention)
21731 targetm.dwarf_calling_convention (TREE_TYPE (decl)));
21732
21733 if (is_fortran ()
21734 && id_equal (DECL_ASSEMBLER_NAME (decl), "MAIN__"))
21735 {
21736 /* DWARF 2 doesn't provide a way to identify a program's source-level
21737 entry point. DW_AT_calling_convention attributes are only meant
21738 to describe functions' calling conventions. However, lacking a
21739 better way to signal the Fortran main program, we used this for
21740 a long time, following existing custom. Now, DWARF 4 has
21741 DW_AT_main_subprogram, which we add below, but some tools still
21742 rely on the old way, which we thus keep. */
21743 value = DW_CC_program;
21744
21745 if (dwarf_version >= 4 || !dwarf_strict)
21746 add_AT_flag (subr_die, DW_AT_main_subprogram, 1);
21747 }
21748
21749 /* Only add the attribute if the backend requests it, and
21750 is not DW_CC_normal. */
21751 if (value && (value != DW_CC_normal))
21752 add_AT_unsigned (subr_die, DW_AT_calling_convention, value);
21753 }
21754
21755 /* Given a tree pointer to a struct, class, union, or enum type node, return
21756 a pointer to the (string) tag name for the given type, or zero if the type
21757 was declared without a tag. */
21758
21759 static const char *
21760 type_tag (const_tree type)
21761 {
21762 const char *name = 0;
21763
21764 if (TYPE_NAME (type) != 0)
21765 {
21766 tree t = 0;
21767
21768 /* Find the IDENTIFIER_NODE for the type name. */
21769 if (TREE_CODE (TYPE_NAME (type)) == IDENTIFIER_NODE
21770 && !TYPE_NAMELESS (type))
21771 t = TYPE_NAME (type);
21772
21773 /* The g++ front end makes the TYPE_NAME of *each* tagged type point to
21774 a TYPE_DECL node, regardless of whether or not a `typedef' was
21775 involved. */
21776 else if (TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
21777 && ! DECL_IGNORED_P (TYPE_NAME (type)))
21778 {
21779 /* We want to be extra verbose. Don't call dwarf_name if
21780 DECL_NAME isn't set. The default hook for decl_printable_name
21781 doesn't like that, and in this context it's correct to return
21782 0, instead of "<anonymous>" or the like. */
21783 if (DECL_NAME (TYPE_NAME (type))
21784 && !DECL_NAMELESS (TYPE_NAME (type)))
21785 name = lang_hooks.dwarf_name (TYPE_NAME (type), 2);
21786 }
21787
21788 /* Now get the name as a string, or invent one. */
21789 if (!name && t != 0)
21790 name = IDENTIFIER_POINTER (t);
21791 }
21792
21793 return (name == 0 || *name == '\0') ? 0 : name;
21794 }
21795
21796 /* Return the type associated with a data member, make a special check
21797 for bit field types. */
21798
21799 static inline tree
21800 member_declared_type (const_tree member)
21801 {
21802 return (DECL_BIT_FIELD_TYPE (member)
21803 ? DECL_BIT_FIELD_TYPE (member) : TREE_TYPE (member));
21804 }
21805
21806 /* Get the decl's label, as described by its RTL. This may be different
21807 from the DECL_NAME name used in the source file. */
21808
21809 #if 0
21810 static const char *
21811 decl_start_label (tree decl)
21812 {
21813 rtx x;
21814 const char *fnname;
21815
21816 x = DECL_RTL (decl);
21817 gcc_assert (MEM_P (x));
21818
21819 x = XEXP (x, 0);
21820 gcc_assert (GET_CODE (x) == SYMBOL_REF);
21821
21822 fnname = XSTR (x, 0);
21823 return fnname;
21824 }
21825 #endif
21826 \f
21827 /* For variable-length arrays that have been previously generated, but
21828 may be incomplete due to missing subscript info, fill the subscript
21829 info. Return TRUE if this is one of those cases. */
21830 static bool
21831 fill_variable_array_bounds (tree type)
21832 {
21833 if (TREE_ASM_WRITTEN (type)
21834 && TREE_CODE (type) == ARRAY_TYPE
21835 && variably_modified_type_p (type, NULL))
21836 {
21837 dw_die_ref array_die = lookup_type_die (type);
21838 if (!array_die)
21839 return false;
21840 add_subscript_info (array_die, type, !is_ada ());
21841 return true;
21842 }
21843 return false;
21844 }
21845
21846 /* These routines generate the internal representation of the DIE's for
21847 the compilation unit. Debugging information is collected by walking
21848 the declaration trees passed in from dwarf2out_decl(). */
21849
21850 static void
21851 gen_array_type_die (tree type, dw_die_ref context_die)
21852 {
21853 dw_die_ref array_die;
21854
21855 /* GNU compilers represent multidimensional array types as sequences of one
21856 dimensional array types whose element types are themselves array types.
21857 We sometimes squish that down to a single array_type DIE with multiple
21858 subscripts in the Dwarf debugging info. The draft Dwarf specification
21859 say that we are allowed to do this kind of compression in C, because
21860 there is no difference between an array of arrays and a multidimensional
21861 array. We don't do this for Ada to remain as close as possible to the
21862 actual representation, which is especially important against the language
21863 flexibilty wrt arrays of variable size. */
21864
21865 bool collapse_nested_arrays = !is_ada ();
21866
21867 if (fill_variable_array_bounds (type))
21868 return;
21869
21870 dw_die_ref scope_die = scope_die_for (type, context_die);
21871 tree element_type;
21872
21873 /* Emit DW_TAG_string_type for Fortran character types (with kind 1 only, as
21874 DW_TAG_string_type doesn't have DW_AT_type attribute). */
21875 if (TREE_CODE (type) == ARRAY_TYPE
21876 && TYPE_STRING_FLAG (type)
21877 && is_fortran ()
21878 && TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (char_type_node))
21879 {
21880 HOST_WIDE_INT size;
21881
21882 array_die = new_die (DW_TAG_string_type, scope_die, type);
21883 add_name_attribute (array_die, type_tag (type));
21884 equate_type_number_to_die (type, array_die);
21885 size = int_size_in_bytes (type);
21886 if (size >= 0)
21887 add_AT_unsigned (array_die, DW_AT_byte_size, size);
21888 /* ??? We can't annotate types late, but for LTO we may not
21889 generate a location early either (gfortran.dg/save_6.f90). */
21890 else if (! (early_dwarf && (flag_generate_lto || flag_generate_offload))
21891 && TYPE_DOMAIN (type) != NULL_TREE
21892 && TYPE_MAX_VALUE (TYPE_DOMAIN (type)) != NULL_TREE)
21893 {
21894 tree szdecl = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
21895 tree rszdecl = szdecl;
21896
21897 size = int_size_in_bytes (TREE_TYPE (szdecl));
21898 if (!DECL_P (szdecl))
21899 {
21900 if (TREE_CODE (szdecl) == INDIRECT_REF
21901 && DECL_P (TREE_OPERAND (szdecl, 0)))
21902 {
21903 rszdecl = TREE_OPERAND (szdecl, 0);
21904 if (int_size_in_bytes (TREE_TYPE (rszdecl))
21905 != DWARF2_ADDR_SIZE)
21906 size = 0;
21907 }
21908 else
21909 size = 0;
21910 }
21911 if (size > 0)
21912 {
21913 dw_loc_list_ref loc
21914 = loc_list_from_tree (rszdecl, szdecl == rszdecl ? 2 : 0,
21915 NULL);
21916 if (loc)
21917 {
21918 add_AT_location_description (array_die, DW_AT_string_length,
21919 loc);
21920 if (size != DWARF2_ADDR_SIZE)
21921 add_AT_unsigned (array_die, dwarf_version >= 5
21922 ? DW_AT_string_length_byte_size
21923 : DW_AT_byte_size, size);
21924 }
21925 }
21926 }
21927 return;
21928 }
21929
21930 array_die = new_die (DW_TAG_array_type, scope_die, type);
21931 add_name_attribute (array_die, type_tag (type));
21932 equate_type_number_to_die (type, array_die);
21933
21934 if (TREE_CODE (type) == VECTOR_TYPE)
21935 add_AT_flag (array_die, DW_AT_GNU_vector, 1);
21936
21937 /* For Fortran multidimensional arrays use DW_ORD_col_major ordering. */
21938 if (is_fortran ()
21939 && TREE_CODE (type) == ARRAY_TYPE
21940 && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE
21941 && !TYPE_STRING_FLAG (TREE_TYPE (type)))
21942 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
21943
21944 #if 0
21945 /* We default the array ordering. Debuggers will probably do the right
21946 things even if DW_AT_ordering is not present. It's not even an issue
21947 until we start to get into multidimensional arrays anyway. If a debugger
21948 is ever caught doing the Wrong Thing for multi-dimensional arrays,
21949 then we'll have to put the DW_AT_ordering attribute back in. (But if
21950 and when we find out that we need to put these in, we will only do so
21951 for multidimensional arrays. */
21952 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
21953 #endif
21954
21955 if (TREE_CODE (type) == VECTOR_TYPE)
21956 {
21957 /* For VECTOR_TYPEs we use an array die with appropriate bounds. */
21958 dw_die_ref subrange_die = new_die (DW_TAG_subrange_type, array_die, NULL);
21959 add_bound_info (subrange_die, DW_AT_lower_bound, size_zero_node, NULL);
21960 add_bound_info (subrange_die, DW_AT_upper_bound,
21961 size_int (TYPE_VECTOR_SUBPARTS (type) - 1), NULL);
21962 }
21963 else
21964 add_subscript_info (array_die, type, collapse_nested_arrays);
21965
21966 /* Add representation of the type of the elements of this array type and
21967 emit the corresponding DIE if we haven't done it already. */
21968 element_type = TREE_TYPE (type);
21969 if (collapse_nested_arrays)
21970 while (TREE_CODE (element_type) == ARRAY_TYPE)
21971 {
21972 if (TYPE_STRING_FLAG (element_type) && is_fortran ())
21973 break;
21974 element_type = TREE_TYPE (element_type);
21975 }
21976
21977 add_type_attribute (array_die, element_type, TYPE_UNQUALIFIED,
21978 TREE_CODE (type) == ARRAY_TYPE
21979 && TYPE_REVERSE_STORAGE_ORDER (type),
21980 context_die);
21981
21982 add_gnat_descriptive_type_attribute (array_die, type, context_die);
21983 if (TYPE_ARTIFICIAL (type))
21984 add_AT_flag (array_die, DW_AT_artificial, 1);
21985
21986 if (get_AT (array_die, DW_AT_name))
21987 add_pubtype (type, array_die);
21988
21989 add_alignment_attribute (array_die, type);
21990 }
21991
21992 /* This routine generates DIE for array with hidden descriptor, details
21993 are filled into *info by a langhook. */
21994
21995 static void
21996 gen_descr_array_type_die (tree type, struct array_descr_info *info,
21997 dw_die_ref context_die)
21998 {
21999 const dw_die_ref scope_die = scope_die_for (type, context_die);
22000 const dw_die_ref array_die = new_die (DW_TAG_array_type, scope_die, type);
22001 struct loc_descr_context context = { type, info->base_decl, NULL,
22002 false, false };
22003 enum dwarf_tag subrange_tag = DW_TAG_subrange_type;
22004 int dim;
22005
22006 add_name_attribute (array_die, type_tag (type));
22007 equate_type_number_to_die (type, array_die);
22008
22009 if (info->ndimensions > 1)
22010 switch (info->ordering)
22011 {
22012 case array_descr_ordering_row_major:
22013 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_row_major);
22014 break;
22015 case array_descr_ordering_column_major:
22016 add_AT_unsigned (array_die, DW_AT_ordering, DW_ORD_col_major);
22017 break;
22018 default:
22019 break;
22020 }
22021
22022 if (dwarf_version >= 3 || !dwarf_strict)
22023 {
22024 if (info->data_location)
22025 add_scalar_info (array_die, DW_AT_data_location, info->data_location,
22026 dw_scalar_form_exprloc, &context);
22027 if (info->associated)
22028 add_scalar_info (array_die, DW_AT_associated, info->associated,
22029 dw_scalar_form_constant
22030 | dw_scalar_form_exprloc
22031 | dw_scalar_form_reference, &context);
22032 if (info->allocated)
22033 add_scalar_info (array_die, DW_AT_allocated, info->allocated,
22034 dw_scalar_form_constant
22035 | dw_scalar_form_exprloc
22036 | dw_scalar_form_reference, &context);
22037 if (info->stride)
22038 {
22039 const enum dwarf_attribute attr
22040 = (info->stride_in_bits) ? DW_AT_bit_stride : DW_AT_byte_stride;
22041 const int forms
22042 = (info->stride_in_bits)
22043 ? dw_scalar_form_constant
22044 : (dw_scalar_form_constant
22045 | dw_scalar_form_exprloc
22046 | dw_scalar_form_reference);
22047
22048 add_scalar_info (array_die, attr, info->stride, forms, &context);
22049 }
22050 }
22051 if (dwarf_version >= 5)
22052 {
22053 if (info->rank)
22054 {
22055 add_scalar_info (array_die, DW_AT_rank, info->rank,
22056 dw_scalar_form_constant
22057 | dw_scalar_form_exprloc, &context);
22058 subrange_tag = DW_TAG_generic_subrange;
22059 context.placeholder_arg = true;
22060 }
22061 }
22062
22063 add_gnat_descriptive_type_attribute (array_die, type, context_die);
22064
22065 for (dim = 0; dim < info->ndimensions; dim++)
22066 {
22067 dw_die_ref subrange_die = new_die (subrange_tag, array_die, NULL);
22068
22069 if (info->dimen[dim].bounds_type)
22070 add_type_attribute (subrange_die,
22071 info->dimen[dim].bounds_type, TYPE_UNQUALIFIED,
22072 false, context_die);
22073 if (info->dimen[dim].lower_bound)
22074 add_bound_info (subrange_die, DW_AT_lower_bound,
22075 info->dimen[dim].lower_bound, &context);
22076 if (info->dimen[dim].upper_bound)
22077 add_bound_info (subrange_die, DW_AT_upper_bound,
22078 info->dimen[dim].upper_bound, &context);
22079 if ((dwarf_version >= 3 || !dwarf_strict) && info->dimen[dim].stride)
22080 add_scalar_info (subrange_die, DW_AT_byte_stride,
22081 info->dimen[dim].stride,
22082 dw_scalar_form_constant
22083 | dw_scalar_form_exprloc
22084 | dw_scalar_form_reference,
22085 &context);
22086 }
22087
22088 gen_type_die (info->element_type, context_die);
22089 add_type_attribute (array_die, info->element_type, TYPE_UNQUALIFIED,
22090 TREE_CODE (type) == ARRAY_TYPE
22091 && TYPE_REVERSE_STORAGE_ORDER (type),
22092 context_die);
22093
22094 if (get_AT (array_die, DW_AT_name))
22095 add_pubtype (type, array_die);
22096
22097 add_alignment_attribute (array_die, type);
22098 }
22099
22100 #if 0
22101 static void
22102 gen_entry_point_die (tree decl, dw_die_ref context_die)
22103 {
22104 tree origin = decl_ultimate_origin (decl);
22105 dw_die_ref decl_die = new_die (DW_TAG_entry_point, context_die, decl);
22106
22107 if (origin != NULL)
22108 add_abstract_origin_attribute (decl_die, origin);
22109 else
22110 {
22111 add_name_and_src_coords_attributes (decl_die, decl);
22112 add_type_attribute (decl_die, TREE_TYPE (TREE_TYPE (decl)),
22113 TYPE_UNQUALIFIED, false, context_die);
22114 }
22115
22116 if (DECL_ABSTRACT_P (decl))
22117 equate_decl_number_to_die (decl, decl_die);
22118 else
22119 add_AT_lbl_id (decl_die, DW_AT_low_pc, decl_start_label (decl));
22120 }
22121 #endif
22122
22123 /* Walk through the list of incomplete types again, trying once more to
22124 emit full debugging info for them. */
22125
22126 static void
22127 retry_incomplete_types (void)
22128 {
22129 set_early_dwarf s;
22130 int i;
22131
22132 for (i = vec_safe_length (incomplete_types) - 1; i >= 0; i--)
22133 if (should_emit_struct_debug ((*incomplete_types)[i], DINFO_USAGE_DIR_USE))
22134 gen_type_die ((*incomplete_types)[i], comp_unit_die ());
22135 vec_safe_truncate (incomplete_types, 0);
22136 }
22137
22138 /* Determine what tag to use for a record type. */
22139
22140 static enum dwarf_tag
22141 record_type_tag (tree type)
22142 {
22143 if (! lang_hooks.types.classify_record)
22144 return DW_TAG_structure_type;
22145
22146 switch (lang_hooks.types.classify_record (type))
22147 {
22148 case RECORD_IS_STRUCT:
22149 return DW_TAG_structure_type;
22150
22151 case RECORD_IS_CLASS:
22152 return DW_TAG_class_type;
22153
22154 case RECORD_IS_INTERFACE:
22155 if (dwarf_version >= 3 || !dwarf_strict)
22156 return DW_TAG_interface_type;
22157 return DW_TAG_structure_type;
22158
22159 default:
22160 gcc_unreachable ();
22161 }
22162 }
22163
22164 /* Generate a DIE to represent an enumeration type. Note that these DIEs
22165 include all of the information about the enumeration values also. Each
22166 enumerated type name/value is listed as a child of the enumerated type
22167 DIE. */
22168
22169 static dw_die_ref
22170 gen_enumeration_type_die (tree type, dw_die_ref context_die)
22171 {
22172 dw_die_ref type_die = lookup_type_die (type);
22173 dw_die_ref orig_type_die = type_die;
22174
22175 if (type_die == NULL)
22176 {
22177 type_die = new_die (DW_TAG_enumeration_type,
22178 scope_die_for (type, context_die), type);
22179 equate_type_number_to_die (type, type_die);
22180 add_name_attribute (type_die, type_tag (type));
22181 if ((dwarf_version >= 4 || !dwarf_strict)
22182 && ENUM_IS_SCOPED (type))
22183 add_AT_flag (type_die, DW_AT_enum_class, 1);
22184 if (ENUM_IS_OPAQUE (type) && TYPE_SIZE (type))
22185 add_AT_flag (type_die, DW_AT_declaration, 1);
22186 if (!dwarf_strict)
22187 add_AT_unsigned (type_die, DW_AT_encoding,
22188 TYPE_UNSIGNED (type)
22189 ? DW_ATE_unsigned
22190 : DW_ATE_signed);
22191 }
22192 else if (! TYPE_SIZE (type) || ENUM_IS_OPAQUE (type))
22193 return type_die;
22194 else
22195 remove_AT (type_die, DW_AT_declaration);
22196
22197 /* Handle a GNU C/C++ extension, i.e. incomplete enum types. If the
22198 given enum type is incomplete, do not generate the DW_AT_byte_size
22199 attribute or the DW_AT_element_list attribute. */
22200 if (TYPE_SIZE (type))
22201 {
22202 tree link;
22203
22204 if (!ENUM_IS_OPAQUE (type))
22205 TREE_ASM_WRITTEN (type) = 1;
22206 if (!orig_type_die || !get_AT (type_die, DW_AT_byte_size))
22207 add_byte_size_attribute (type_die, type);
22208 if (!orig_type_die || !get_AT (type_die, DW_AT_alignment))
22209 add_alignment_attribute (type_die, type);
22210 if ((dwarf_version >= 3 || !dwarf_strict)
22211 && (!orig_type_die || !get_AT (type_die, DW_AT_type)))
22212 {
22213 tree underlying = lang_hooks.types.enum_underlying_base_type (type);
22214 add_type_attribute (type_die, underlying, TYPE_UNQUALIFIED, false,
22215 context_die);
22216 }
22217 if (TYPE_STUB_DECL (type) != NULL_TREE)
22218 {
22219 if (!orig_type_die || !get_AT (type_die, DW_AT_decl_file))
22220 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
22221 if (!orig_type_die || !get_AT (type_die, DW_AT_accessibility))
22222 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
22223 }
22224
22225 /* If the first reference to this type was as the return type of an
22226 inline function, then it may not have a parent. Fix this now. */
22227 if (type_die->die_parent == NULL)
22228 add_child_die (scope_die_for (type, context_die), type_die);
22229
22230 for (link = TYPE_VALUES (type);
22231 link != NULL; link = TREE_CHAIN (link))
22232 {
22233 dw_die_ref enum_die = new_die (DW_TAG_enumerator, type_die, link);
22234 tree value = TREE_VALUE (link);
22235
22236 gcc_assert (!ENUM_IS_OPAQUE (type));
22237 add_name_attribute (enum_die,
22238 IDENTIFIER_POINTER (TREE_PURPOSE (link)));
22239
22240 if (TREE_CODE (value) == CONST_DECL)
22241 value = DECL_INITIAL (value);
22242
22243 if (simple_type_size_in_bits (TREE_TYPE (value))
22244 <= HOST_BITS_PER_WIDE_INT || tree_fits_shwi_p (value))
22245 {
22246 /* For constant forms created by add_AT_unsigned DWARF
22247 consumers (GDB, elfutils, etc.) always zero extend
22248 the value. Only when the actual value is negative
22249 do we need to use add_AT_int to generate a constant
22250 form that can represent negative values. */
22251 HOST_WIDE_INT val = TREE_INT_CST_LOW (value);
22252 if (TYPE_UNSIGNED (TREE_TYPE (value)) || val >= 0)
22253 add_AT_unsigned (enum_die, DW_AT_const_value,
22254 (unsigned HOST_WIDE_INT) val);
22255 else
22256 add_AT_int (enum_die, DW_AT_const_value, val);
22257 }
22258 else
22259 /* Enumeration constants may be wider than HOST_WIDE_INT. Handle
22260 that here. TODO: This should be re-worked to use correct
22261 signed/unsigned double tags for all cases. */
22262 add_AT_wide (enum_die, DW_AT_const_value, wi::to_wide (value));
22263 }
22264
22265 add_gnat_descriptive_type_attribute (type_die, type, context_die);
22266 if (TYPE_ARTIFICIAL (type)
22267 && (!orig_type_die || !get_AT (type_die, DW_AT_artificial)))
22268 add_AT_flag (type_die, DW_AT_artificial, 1);
22269 }
22270 else
22271 add_AT_flag (type_die, DW_AT_declaration, 1);
22272
22273 add_pubtype (type, type_die);
22274
22275 return type_die;
22276 }
22277
22278 /* Generate a DIE to represent either a real live formal parameter decl or to
22279 represent just the type of some formal parameter position in some function
22280 type.
22281
22282 Note that this routine is a bit unusual because its argument may be a
22283 ..._DECL node (i.e. either a PARM_DECL or perhaps a VAR_DECL which
22284 represents an inlining of some PARM_DECL) or else some sort of a ..._TYPE
22285 node. If it's the former then this function is being called to output a
22286 DIE to represent a formal parameter object (or some inlining thereof). If
22287 it's the latter, then this function is only being called to output a
22288 DW_TAG_formal_parameter DIE to stand as a placeholder for some formal
22289 argument type of some subprogram type.
22290 If EMIT_NAME_P is true, name and source coordinate attributes
22291 are emitted. */
22292
22293 static dw_die_ref
22294 gen_formal_parameter_die (tree node, tree origin, bool emit_name_p,
22295 dw_die_ref context_die)
22296 {
22297 tree node_or_origin = node ? node : origin;
22298 tree ultimate_origin;
22299 dw_die_ref parm_die = NULL;
22300
22301 if (DECL_P (node_or_origin))
22302 {
22303 parm_die = lookup_decl_die (node);
22304
22305 /* If the contexts differ, we may not be talking about the same
22306 thing.
22307 ??? When in LTO the DIE parent is the "abstract" copy and the
22308 context_die is the specification "copy". But this whole block
22309 should eventually be no longer needed. */
22310 if (parm_die && parm_die->die_parent != context_die && !in_lto_p)
22311 {
22312 if (!DECL_ABSTRACT_P (node))
22313 {
22314 /* This can happen when creating an inlined instance, in
22315 which case we need to create a new DIE that will get
22316 annotated with DW_AT_abstract_origin. */
22317 parm_die = NULL;
22318 }
22319 else
22320 gcc_unreachable ();
22321 }
22322
22323 if (parm_die && parm_die->die_parent == NULL)
22324 {
22325 /* Check that parm_die already has the right attributes that
22326 we would have added below. If any attributes are
22327 missing, fall through to add them. */
22328 if (! DECL_ABSTRACT_P (node_or_origin)
22329 && !get_AT (parm_die, DW_AT_location)
22330 && !get_AT (parm_die, DW_AT_const_value))
22331 /* We are missing location info, and are about to add it. */
22332 ;
22333 else
22334 {
22335 add_child_die (context_die, parm_die);
22336 return parm_die;
22337 }
22338 }
22339 }
22340
22341 /* If we have a previously generated DIE, use it, unless this is an
22342 concrete instance (origin != NULL), in which case we need a new
22343 DIE with a corresponding DW_AT_abstract_origin. */
22344 bool reusing_die;
22345 if (parm_die && origin == NULL)
22346 reusing_die = true;
22347 else
22348 {
22349 parm_die = new_die (DW_TAG_formal_parameter, context_die, node);
22350 reusing_die = false;
22351 }
22352
22353 switch (TREE_CODE_CLASS (TREE_CODE (node_or_origin)))
22354 {
22355 case tcc_declaration:
22356 ultimate_origin = decl_ultimate_origin (node_or_origin);
22357 if (node || ultimate_origin)
22358 origin = ultimate_origin;
22359
22360 if (reusing_die)
22361 goto add_location;
22362
22363 if (origin != NULL)
22364 add_abstract_origin_attribute (parm_die, origin);
22365 else if (emit_name_p)
22366 add_name_and_src_coords_attributes (parm_die, node);
22367 if (origin == NULL
22368 || (! DECL_ABSTRACT_P (node_or_origin)
22369 && variably_modified_type_p (TREE_TYPE (node_or_origin),
22370 decl_function_context
22371 (node_or_origin))))
22372 {
22373 tree type = TREE_TYPE (node_or_origin);
22374 if (decl_by_reference_p (node_or_origin))
22375 add_type_attribute (parm_die, TREE_TYPE (type),
22376 TYPE_UNQUALIFIED,
22377 false, context_die);
22378 else
22379 add_type_attribute (parm_die, type,
22380 decl_quals (node_or_origin),
22381 false, context_die);
22382 }
22383 if (origin == NULL && DECL_ARTIFICIAL (node))
22384 add_AT_flag (parm_die, DW_AT_artificial, 1);
22385 add_location:
22386 if (node && node != origin)
22387 equate_decl_number_to_die (node, parm_die);
22388 if (! DECL_ABSTRACT_P (node_or_origin))
22389 add_location_or_const_value_attribute (parm_die, node_or_origin,
22390 node == NULL);
22391
22392 break;
22393
22394 case tcc_type:
22395 /* We were called with some kind of a ..._TYPE node. */
22396 add_type_attribute (parm_die, node_or_origin, TYPE_UNQUALIFIED, false,
22397 context_die);
22398 break;
22399
22400 default:
22401 gcc_unreachable ();
22402 }
22403
22404 return parm_die;
22405 }
22406
22407 /* Generate and return a DW_TAG_GNU_formal_parameter_pack. Also generate
22408 children DW_TAG_formal_parameter DIEs representing the arguments of the
22409 parameter pack.
22410
22411 PARM_PACK must be a function parameter pack.
22412 PACK_ARG is the first argument of the parameter pack. Its TREE_CHAIN
22413 must point to the subsequent arguments of the function PACK_ARG belongs to.
22414 SUBR_DIE is the DIE of the function PACK_ARG belongs to.
22415 If NEXT_ARG is non NULL, *NEXT_ARG is set to the function argument
22416 following the last one for which a DIE was generated. */
22417
22418 static dw_die_ref
22419 gen_formal_parameter_pack_die (tree parm_pack,
22420 tree pack_arg,
22421 dw_die_ref subr_die,
22422 tree *next_arg)
22423 {
22424 tree arg;
22425 dw_die_ref parm_pack_die;
22426
22427 gcc_assert (parm_pack
22428 && lang_hooks.function_parameter_pack_p (parm_pack)
22429 && subr_die);
22430
22431 parm_pack_die = new_die (DW_TAG_GNU_formal_parameter_pack, subr_die, parm_pack);
22432 add_src_coords_attributes (parm_pack_die, parm_pack);
22433
22434 for (arg = pack_arg; arg; arg = DECL_CHAIN (arg))
22435 {
22436 if (! lang_hooks.decls.function_parm_expanded_from_pack_p (arg,
22437 parm_pack))
22438 break;
22439 gen_formal_parameter_die (arg, NULL,
22440 false /* Don't emit name attribute. */,
22441 parm_pack_die);
22442 }
22443 if (next_arg)
22444 *next_arg = arg;
22445 return parm_pack_die;
22446 }
22447
22448 /* Generate a special type of DIE used as a stand-in for a trailing ellipsis
22449 at the end of an (ANSI prototyped) formal parameters list. */
22450
22451 static void
22452 gen_unspecified_parameters_die (tree decl_or_type, dw_die_ref context_die)
22453 {
22454 new_die (DW_TAG_unspecified_parameters, context_die, decl_or_type);
22455 }
22456
22457 /* Generate a list of nameless DW_TAG_formal_parameter DIEs (and perhaps a
22458 DW_TAG_unspecified_parameters DIE) to represent the types of the formal
22459 parameters as specified in some function type specification (except for
22460 those which appear as part of a function *definition*). */
22461
22462 static void
22463 gen_formal_types_die (tree function_or_method_type, dw_die_ref context_die)
22464 {
22465 tree link;
22466 tree formal_type = NULL;
22467 tree first_parm_type;
22468 tree arg;
22469
22470 if (TREE_CODE (function_or_method_type) == FUNCTION_DECL)
22471 {
22472 arg = DECL_ARGUMENTS (function_or_method_type);
22473 function_or_method_type = TREE_TYPE (function_or_method_type);
22474 }
22475 else
22476 arg = NULL_TREE;
22477
22478 first_parm_type = TYPE_ARG_TYPES (function_or_method_type);
22479
22480 /* Make our first pass over the list of formal parameter types and output a
22481 DW_TAG_formal_parameter DIE for each one. */
22482 for (link = first_parm_type; link; )
22483 {
22484 dw_die_ref parm_die;
22485
22486 formal_type = TREE_VALUE (link);
22487 if (formal_type == void_type_node)
22488 break;
22489
22490 /* Output a (nameless) DIE to represent the formal parameter itself. */
22491 parm_die = gen_formal_parameter_die (formal_type, NULL,
22492 true /* Emit name attribute. */,
22493 context_die);
22494 if (TREE_CODE (function_or_method_type) == METHOD_TYPE
22495 && link == first_parm_type)
22496 {
22497 add_AT_flag (parm_die, DW_AT_artificial, 1);
22498 if (dwarf_version >= 3 || !dwarf_strict)
22499 add_AT_die_ref (context_die, DW_AT_object_pointer, parm_die);
22500 }
22501 else if (arg && DECL_ARTIFICIAL (arg))
22502 add_AT_flag (parm_die, DW_AT_artificial, 1);
22503
22504 link = TREE_CHAIN (link);
22505 if (arg)
22506 arg = DECL_CHAIN (arg);
22507 }
22508
22509 /* If this function type has an ellipsis, add a
22510 DW_TAG_unspecified_parameters DIE to the end of the parameter list. */
22511 if (formal_type != void_type_node)
22512 gen_unspecified_parameters_die (function_or_method_type, context_die);
22513
22514 /* Make our second (and final) pass over the list of formal parameter types
22515 and output DIEs to represent those types (as necessary). */
22516 for (link = TYPE_ARG_TYPES (function_or_method_type);
22517 link && TREE_VALUE (link);
22518 link = TREE_CHAIN (link))
22519 gen_type_die (TREE_VALUE (link), context_die);
22520 }
22521
22522 /* We want to generate the DIE for TYPE so that we can generate the
22523 die for MEMBER, which has been defined; we will need to refer back
22524 to the member declaration nested within TYPE. If we're trying to
22525 generate minimal debug info for TYPE, processing TYPE won't do the
22526 trick; we need to attach the member declaration by hand. */
22527
22528 static void
22529 gen_type_die_for_member (tree type, tree member, dw_die_ref context_die)
22530 {
22531 gen_type_die (type, context_die);
22532
22533 /* If we're trying to avoid duplicate debug info, we may not have
22534 emitted the member decl for this function. Emit it now. */
22535 if (TYPE_STUB_DECL (type)
22536 && TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))
22537 && ! lookup_decl_die (member))
22538 {
22539 dw_die_ref type_die;
22540 gcc_assert (!decl_ultimate_origin (member));
22541
22542 type_die = lookup_type_die_strip_naming_typedef (type);
22543 if (TREE_CODE (member) == FUNCTION_DECL)
22544 gen_subprogram_die (member, type_die);
22545 else if (TREE_CODE (member) == FIELD_DECL)
22546 {
22547 /* Ignore the nameless fields that are used to skip bits but handle
22548 C++ anonymous unions and structs. */
22549 if (DECL_NAME (member) != NULL_TREE
22550 || TREE_CODE (TREE_TYPE (member)) == UNION_TYPE
22551 || TREE_CODE (TREE_TYPE (member)) == RECORD_TYPE)
22552 {
22553 struct vlr_context vlr_ctx = {
22554 DECL_CONTEXT (member), /* struct_type */
22555 NULL_TREE /* variant_part_offset */
22556 };
22557 gen_type_die (member_declared_type (member), type_die);
22558 gen_field_die (member, &vlr_ctx, type_die);
22559 }
22560 }
22561 else
22562 gen_variable_die (member, NULL_TREE, type_die);
22563 }
22564 }
22565 \f
22566 /* Forward declare these functions, because they are mutually recursive
22567 with their set_block_* pairing functions. */
22568 static void set_decl_origin_self (tree);
22569
22570 /* Given a pointer to some BLOCK node, if the BLOCK_ABSTRACT_ORIGIN for the
22571 given BLOCK node is NULL, set the BLOCK_ABSTRACT_ORIGIN for the node so
22572 that it points to the node itself, thus indicating that the node is its
22573 own (abstract) origin. Additionally, if the BLOCK_ABSTRACT_ORIGIN for
22574 the given node is NULL, recursively descend the decl/block tree which
22575 it is the root of, and for each other ..._DECL or BLOCK node contained
22576 therein whose DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also
22577 still NULL, set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN
22578 values to point to themselves. */
22579
22580 static void
22581 set_block_origin_self (tree stmt)
22582 {
22583 if (BLOCK_ABSTRACT_ORIGIN (stmt) == NULL_TREE)
22584 {
22585 BLOCK_ABSTRACT_ORIGIN (stmt) = stmt;
22586
22587 {
22588 tree local_decl;
22589
22590 for (local_decl = BLOCK_VARS (stmt);
22591 local_decl != NULL_TREE;
22592 local_decl = DECL_CHAIN (local_decl))
22593 /* Do not recurse on nested functions since the inlining status
22594 of parent and child can be different as per the DWARF spec. */
22595 if (TREE_CODE (local_decl) != FUNCTION_DECL
22596 && !DECL_EXTERNAL (local_decl))
22597 set_decl_origin_self (local_decl);
22598 }
22599
22600 {
22601 tree subblock;
22602
22603 for (subblock = BLOCK_SUBBLOCKS (stmt);
22604 subblock != NULL_TREE;
22605 subblock = BLOCK_CHAIN (subblock))
22606 set_block_origin_self (subblock); /* Recurse. */
22607 }
22608 }
22609 }
22610
22611 /* Given a pointer to some ..._DECL node, if the DECL_ABSTRACT_ORIGIN for
22612 the given ..._DECL node is NULL, set the DECL_ABSTRACT_ORIGIN for the
22613 node to so that it points to the node itself, thus indicating that the
22614 node represents its own (abstract) origin. Additionally, if the
22615 DECL_ABSTRACT_ORIGIN for the given node is NULL, recursively descend
22616 the decl/block tree of which the given node is the root of, and for
22617 each other ..._DECL or BLOCK node contained therein whose
22618 DECL_ABSTRACT_ORIGINs or BLOCK_ABSTRACT_ORIGINs are also still NULL,
22619 set *their* DECL_ABSTRACT_ORIGIN or BLOCK_ABSTRACT_ORIGIN values to
22620 point to themselves. */
22621
22622 static void
22623 set_decl_origin_self (tree decl)
22624 {
22625 if (DECL_ABSTRACT_ORIGIN (decl) == NULL_TREE)
22626 {
22627 DECL_ABSTRACT_ORIGIN (decl) = decl;
22628 if (TREE_CODE (decl) == FUNCTION_DECL)
22629 {
22630 tree arg;
22631
22632 for (arg = DECL_ARGUMENTS (decl); arg; arg = DECL_CHAIN (arg))
22633 DECL_ABSTRACT_ORIGIN (arg) = arg;
22634 if (DECL_INITIAL (decl) != NULL_TREE
22635 && DECL_INITIAL (decl) != error_mark_node)
22636 set_block_origin_self (DECL_INITIAL (decl));
22637 }
22638 }
22639 }
22640 \f
22641 /* Mark the early DIE for DECL as the abstract instance. */
22642
22643 static void
22644 dwarf2out_abstract_function (tree decl)
22645 {
22646 dw_die_ref old_die;
22647
22648 /* Make sure we have the actual abstract inline, not a clone. */
22649 decl = DECL_ORIGIN (decl);
22650
22651 if (DECL_IGNORED_P (decl))
22652 return;
22653
22654 /* In LTO we're all set. We already created abstract instances
22655 early and we want to avoid creating a concrete instance of that
22656 if we don't output it. */
22657 if (in_lto_p)
22658 return;
22659
22660 old_die = lookup_decl_die (decl);
22661 gcc_assert (old_die != NULL);
22662 if (get_AT (old_die, DW_AT_inline))
22663 /* We've already generated the abstract instance. */
22664 return;
22665
22666 /* Go ahead and put DW_AT_inline on the DIE. */
22667 if (DECL_DECLARED_INLINE_P (decl))
22668 {
22669 if (cgraph_function_possibly_inlined_p (decl))
22670 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_inlined);
22671 else
22672 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_declared_not_inlined);
22673 }
22674 else
22675 {
22676 if (cgraph_function_possibly_inlined_p (decl))
22677 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_inlined);
22678 else
22679 add_AT_unsigned (old_die, DW_AT_inline, DW_INL_not_inlined);
22680 }
22681
22682 if (DECL_DECLARED_INLINE_P (decl)
22683 && lookup_attribute ("artificial", DECL_ATTRIBUTES (decl)))
22684 add_AT_flag (old_die, DW_AT_artificial, 1);
22685
22686 set_decl_origin_self (decl);
22687 }
22688
22689 /* Helper function of premark_used_types() which gets called through
22690 htab_traverse.
22691
22692 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22693 marked as unused by prune_unused_types. */
22694
22695 bool
22696 premark_used_types_helper (tree const &type, void *)
22697 {
22698 dw_die_ref die;
22699
22700 die = lookup_type_die (type);
22701 if (die != NULL)
22702 die->die_perennial_p = 1;
22703 return true;
22704 }
22705
22706 /* Helper function of premark_types_used_by_global_vars which gets called
22707 through htab_traverse.
22708
22709 Marks the DIE of a given type in *SLOT as perennial, so it never gets
22710 marked as unused by prune_unused_types. The DIE of the type is marked
22711 only if the global variable using the type will actually be emitted. */
22712
22713 int
22714 premark_types_used_by_global_vars_helper (types_used_by_vars_entry **slot,
22715 void *)
22716 {
22717 struct types_used_by_vars_entry *entry;
22718 dw_die_ref die;
22719
22720 entry = (struct types_used_by_vars_entry *) *slot;
22721 gcc_assert (entry->type != NULL
22722 && entry->var_decl != NULL);
22723 die = lookup_type_die (entry->type);
22724 if (die)
22725 {
22726 /* Ask cgraph if the global variable really is to be emitted.
22727 If yes, then we'll keep the DIE of ENTRY->TYPE. */
22728 varpool_node *node = varpool_node::get (entry->var_decl);
22729 if (node && node->definition)
22730 {
22731 die->die_perennial_p = 1;
22732 /* Keep the parent DIEs as well. */
22733 while ((die = die->die_parent) && die->die_perennial_p == 0)
22734 die->die_perennial_p = 1;
22735 }
22736 }
22737 return 1;
22738 }
22739
22740 /* Mark all members of used_types_hash as perennial. */
22741
22742 static void
22743 premark_used_types (struct function *fun)
22744 {
22745 if (fun && fun->used_types_hash)
22746 fun->used_types_hash->traverse<void *, premark_used_types_helper> (NULL);
22747 }
22748
22749 /* Mark all members of types_used_by_vars_entry as perennial. */
22750
22751 static void
22752 premark_types_used_by_global_vars (void)
22753 {
22754 if (types_used_by_vars_hash)
22755 types_used_by_vars_hash
22756 ->traverse<void *, premark_types_used_by_global_vars_helper> (NULL);
22757 }
22758
22759 /* Mark all variables used by the symtab as perennial. */
22760
22761 static void
22762 premark_used_variables (void)
22763 {
22764 /* Mark DIEs in the symtab as used. */
22765 varpool_node *var;
22766 FOR_EACH_VARIABLE (var)
22767 {
22768 dw_die_ref die = lookup_decl_die (var->decl);
22769 if (die)
22770 die->die_perennial_p = 1;
22771 }
22772 }
22773
22774 /* Generate a DW_TAG_call_site DIE in function DECL under SUBR_DIE
22775 for CA_LOC call arg loc node. */
22776
22777 static dw_die_ref
22778 gen_call_site_die (tree decl, dw_die_ref subr_die,
22779 struct call_arg_loc_node *ca_loc)
22780 {
22781 dw_die_ref stmt_die = NULL, die;
22782 tree block = ca_loc->block;
22783
22784 while (block
22785 && block != DECL_INITIAL (decl)
22786 && TREE_CODE (block) == BLOCK)
22787 {
22788 stmt_die = lookup_block_die (block);
22789 if (stmt_die)
22790 break;
22791 block = BLOCK_SUPERCONTEXT (block);
22792 }
22793 if (stmt_die == NULL)
22794 stmt_die = subr_die;
22795 die = new_die (dwarf_TAG (DW_TAG_call_site), stmt_die, NULL_TREE);
22796 add_AT_lbl_id (die, dwarf_AT (DW_AT_call_return_pc), ca_loc->label);
22797 if (ca_loc->tail_call_p)
22798 add_AT_flag (die, dwarf_AT (DW_AT_call_tail_call), 1);
22799 if (ca_loc->symbol_ref)
22800 {
22801 dw_die_ref tdie = lookup_decl_die (SYMBOL_REF_DECL (ca_loc->symbol_ref));
22802 if (tdie)
22803 add_AT_die_ref (die, dwarf_AT (DW_AT_call_origin), tdie);
22804 else
22805 add_AT_addr (die, dwarf_AT (DW_AT_call_origin), ca_loc->symbol_ref,
22806 false);
22807 }
22808 return die;
22809 }
22810
22811 /* Generate a DIE to represent a declared function (either file-scope or
22812 block-local). */
22813
22814 static void
22815 gen_subprogram_die (tree decl, dw_die_ref context_die)
22816 {
22817 tree origin = decl_ultimate_origin (decl);
22818 dw_die_ref subr_die;
22819 dw_die_ref old_die = lookup_decl_die (decl);
22820
22821 /* This function gets called multiple times for different stages of
22822 the debug process. For example, for func() in this code:
22823
22824 namespace S
22825 {
22826 void func() { ... }
22827 }
22828
22829 ...we get called 4 times. Twice in early debug and twice in
22830 late debug:
22831
22832 Early debug
22833 -----------
22834
22835 1. Once while generating func() within the namespace. This is
22836 the declaration. The declaration bit below is set, as the
22837 context is the namespace.
22838
22839 A new DIE will be generated with DW_AT_declaration set.
22840
22841 2. Once for func() itself. This is the specification. The
22842 declaration bit below is clear as the context is the CU.
22843
22844 We will use the cached DIE from (1) to create a new DIE with
22845 DW_AT_specification pointing to the declaration in (1).
22846
22847 Late debug via rest_of_handle_final()
22848 -------------------------------------
22849
22850 3. Once generating func() within the namespace. This is also the
22851 declaration, as in (1), but this time we will early exit below
22852 as we have a cached DIE and a declaration needs no additional
22853 annotations (no locations), as the source declaration line
22854 info is enough.
22855
22856 4. Once for func() itself. As in (2), this is the specification,
22857 but this time we will re-use the cached DIE, and just annotate
22858 it with the location information that should now be available.
22859
22860 For something without namespaces, but with abstract instances, we
22861 are also called a multiple times:
22862
22863 class Base
22864 {
22865 public:
22866 Base (); // constructor declaration (1)
22867 };
22868
22869 Base::Base () { } // constructor specification (2)
22870
22871 Early debug
22872 -----------
22873
22874 1. Once for the Base() constructor by virtue of it being a
22875 member of the Base class. This is done via
22876 rest_of_type_compilation.
22877
22878 This is a declaration, so a new DIE will be created with
22879 DW_AT_declaration.
22880
22881 2. Once for the Base() constructor definition, but this time
22882 while generating the abstract instance of the base
22883 constructor (__base_ctor) which is being generated via early
22884 debug of reachable functions.
22885
22886 Even though we have a cached version of the declaration (1),
22887 we will create a DW_AT_specification of the declaration DIE
22888 in (1).
22889
22890 3. Once for the __base_ctor itself, but this time, we generate
22891 an DW_AT_abstract_origin version of the DW_AT_specification in
22892 (2).
22893
22894 Late debug via rest_of_handle_final
22895 -----------------------------------
22896
22897 4. One final time for the __base_ctor (which will have a cached
22898 DIE with DW_AT_abstract_origin created in (3). This time,
22899 we will just annotate the location information now
22900 available.
22901 */
22902 int declaration = (current_function_decl != decl
22903 || class_or_namespace_scope_p (context_die));
22904
22905 /* A declaration that has been previously dumped needs no
22906 additional information. */
22907 if (old_die && declaration)
22908 return;
22909
22910 /* Now that the C++ front end lazily declares artificial member fns, we
22911 might need to retrofit the declaration into its class. */
22912 if (!declaration && !origin && !old_die
22913 && DECL_CONTEXT (decl) && TYPE_P (DECL_CONTEXT (decl))
22914 && !class_or_namespace_scope_p (context_die)
22915 && debug_info_level > DINFO_LEVEL_TERSE)
22916 old_die = force_decl_die (decl);
22917
22918 /* A concrete instance, tag a new DIE with DW_AT_abstract_origin. */
22919 if (origin != NULL)
22920 {
22921 gcc_assert (!declaration || local_scope_p (context_die));
22922
22923 /* Fixup die_parent for the abstract instance of a nested
22924 inline function. */
22925 if (old_die && old_die->die_parent == NULL)
22926 add_child_die (context_die, old_die);
22927
22928 if (old_die && get_AT_ref (old_die, DW_AT_abstract_origin))
22929 {
22930 /* If we have a DW_AT_abstract_origin we have a working
22931 cached version. */
22932 subr_die = old_die;
22933 }
22934 else
22935 {
22936 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
22937 add_abstract_origin_attribute (subr_die, origin);
22938 /* This is where the actual code for a cloned function is.
22939 Let's emit linkage name attribute for it. This helps
22940 debuggers to e.g, set breakpoints into
22941 constructors/destructors when the user asks "break
22942 K::K". */
22943 add_linkage_name (subr_die, decl);
22944 }
22945 }
22946 /* A cached copy, possibly from early dwarf generation. Reuse as
22947 much as possible. */
22948 else if (old_die)
22949 {
22950 if (!get_AT_flag (old_die, DW_AT_declaration)
22951 /* We can have a normal definition following an inline one in the
22952 case of redefinition of GNU C extern inlines.
22953 It seems reasonable to use AT_specification in this case. */
22954 && !get_AT (old_die, DW_AT_inline))
22955 {
22956 /* Detect and ignore this case, where we are trying to output
22957 something we have already output. */
22958 if (get_AT (old_die, DW_AT_low_pc)
22959 || get_AT (old_die, DW_AT_ranges))
22960 return;
22961
22962 /* If we have no location information, this must be a
22963 partially generated DIE from early dwarf generation.
22964 Fall through and generate it. */
22965 }
22966
22967 /* If the definition comes from the same place as the declaration,
22968 maybe use the old DIE. We always want the DIE for this function
22969 that has the *_pc attributes to be under comp_unit_die so the
22970 debugger can find it. We also need to do this for abstract
22971 instances of inlines, since the spec requires the out-of-line copy
22972 to have the same parent. For local class methods, this doesn't
22973 apply; we just use the old DIE. */
22974 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
22975 struct dwarf_file_data * file_index = lookup_filename (s.file);
22976 if (((is_unit_die (old_die->die_parent)
22977 /* This condition fixes the inconsistency/ICE with the
22978 following Fortran test (or some derivative thereof) while
22979 building libgfortran:
22980
22981 module some_m
22982 contains
22983 logical function funky (FLAG)
22984 funky = .true.
22985 end function
22986 end module
22987 */
22988 || (old_die->die_parent
22989 && old_die->die_parent->die_tag == DW_TAG_module)
22990 || local_scope_p (old_die->die_parent)
22991 || context_die == NULL)
22992 && (DECL_ARTIFICIAL (decl)
22993 || (get_AT_file (old_die, DW_AT_decl_file) == file_index
22994 && (get_AT_unsigned (old_die, DW_AT_decl_line)
22995 == (unsigned) s.line)
22996 && (!debug_column_info
22997 || s.column == 0
22998 || (get_AT_unsigned (old_die, DW_AT_decl_column)
22999 == (unsigned) s.column)))))
23000 /* With LTO if there's an abstract instance for
23001 the old DIE, this is a concrete instance and
23002 thus re-use the DIE. */
23003 || get_AT (old_die, DW_AT_abstract_origin))
23004 {
23005 subr_die = old_die;
23006
23007 /* Clear out the declaration attribute, but leave the
23008 parameters so they can be augmented with location
23009 information later. Unless this was a declaration, in
23010 which case, wipe out the nameless parameters and recreate
23011 them further down. */
23012 if (remove_AT (subr_die, DW_AT_declaration))
23013 {
23014
23015 remove_AT (subr_die, DW_AT_object_pointer);
23016 remove_child_TAG (subr_die, DW_TAG_formal_parameter);
23017 }
23018 }
23019 /* Make a specification pointing to the previously built
23020 declaration. */
23021 else
23022 {
23023 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
23024 add_AT_specification (subr_die, old_die);
23025 add_pubname (decl, subr_die);
23026 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
23027 add_AT_file (subr_die, DW_AT_decl_file, file_index);
23028 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
23029 add_AT_unsigned (subr_die, DW_AT_decl_line, s.line);
23030 if (debug_column_info
23031 && s.column
23032 && (get_AT_unsigned (old_die, DW_AT_decl_column)
23033 != (unsigned) s.column))
23034 add_AT_unsigned (subr_die, DW_AT_decl_column, s.column);
23035
23036 /* If the prototype had an 'auto' or 'decltype(auto)' return type,
23037 emit the real type on the definition die. */
23038 if (is_cxx () && debug_info_level > DINFO_LEVEL_TERSE)
23039 {
23040 dw_die_ref die = get_AT_ref (old_die, DW_AT_type);
23041 if (die == auto_die || die == decltype_auto_die)
23042 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
23043 TYPE_UNQUALIFIED, false, context_die);
23044 }
23045
23046 /* When we process the method declaration, we haven't seen
23047 the out-of-class defaulted definition yet, so we have to
23048 recheck now. */
23049 if ((dwarf_version >= 5 || ! dwarf_strict)
23050 && !get_AT (subr_die, DW_AT_defaulted))
23051 {
23052 int defaulted
23053 = lang_hooks.decls.decl_dwarf_attribute (decl,
23054 DW_AT_defaulted);
23055 if (defaulted != -1)
23056 {
23057 /* Other values must have been handled before. */
23058 gcc_assert (defaulted == DW_DEFAULTED_out_of_class);
23059 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
23060 }
23061 }
23062 }
23063 }
23064 /* Create a fresh DIE for anything else. */
23065 else
23066 {
23067 subr_die = new_die (DW_TAG_subprogram, context_die, decl);
23068
23069 if (TREE_PUBLIC (decl))
23070 add_AT_flag (subr_die, DW_AT_external, 1);
23071
23072 add_name_and_src_coords_attributes (subr_die, decl);
23073 add_pubname (decl, subr_die);
23074 if (debug_info_level > DINFO_LEVEL_TERSE)
23075 {
23076 add_prototyped_attribute (subr_die, TREE_TYPE (decl));
23077 add_type_attribute (subr_die, TREE_TYPE (TREE_TYPE (decl)),
23078 TYPE_UNQUALIFIED, false, context_die);
23079 }
23080
23081 add_pure_or_virtual_attribute (subr_die, decl);
23082 if (DECL_ARTIFICIAL (decl))
23083 add_AT_flag (subr_die, DW_AT_artificial, 1);
23084
23085 if (TREE_THIS_VOLATILE (decl) && (dwarf_version >= 5 || !dwarf_strict))
23086 add_AT_flag (subr_die, DW_AT_noreturn, 1);
23087
23088 add_alignment_attribute (subr_die, decl);
23089
23090 add_accessibility_attribute (subr_die, decl);
23091 }
23092
23093 /* Unless we have an existing non-declaration DIE, equate the new
23094 DIE. */
23095 if (!old_die || is_declaration_die (old_die))
23096 equate_decl_number_to_die (decl, subr_die);
23097
23098 if (declaration)
23099 {
23100 if (!old_die || !get_AT (old_die, DW_AT_inline))
23101 {
23102 add_AT_flag (subr_die, DW_AT_declaration, 1);
23103
23104 /* If this is an explicit function declaration then generate
23105 a DW_AT_explicit attribute. */
23106 if ((dwarf_version >= 3 || !dwarf_strict)
23107 && lang_hooks.decls.decl_dwarf_attribute (decl,
23108 DW_AT_explicit) == 1)
23109 add_AT_flag (subr_die, DW_AT_explicit, 1);
23110
23111 /* If this is a C++11 deleted special function member then generate
23112 a DW_AT_deleted attribute. */
23113 if ((dwarf_version >= 5 || !dwarf_strict)
23114 && lang_hooks.decls.decl_dwarf_attribute (decl,
23115 DW_AT_deleted) == 1)
23116 add_AT_flag (subr_die, DW_AT_deleted, 1);
23117
23118 /* If this is a C++11 defaulted special function member then
23119 generate a DW_AT_defaulted attribute. */
23120 if (dwarf_version >= 5 || !dwarf_strict)
23121 {
23122 int defaulted
23123 = lang_hooks.decls.decl_dwarf_attribute (decl,
23124 DW_AT_defaulted);
23125 if (defaulted != -1)
23126 add_AT_unsigned (subr_die, DW_AT_defaulted, defaulted);
23127 }
23128
23129 /* If this is a C++11 non-static member function with & ref-qualifier
23130 then generate a DW_AT_reference attribute. */
23131 if ((dwarf_version >= 5 || !dwarf_strict)
23132 && lang_hooks.decls.decl_dwarf_attribute (decl,
23133 DW_AT_reference) == 1)
23134 add_AT_flag (subr_die, DW_AT_reference, 1);
23135
23136 /* If this is a C++11 non-static member function with &&
23137 ref-qualifier then generate a DW_AT_reference attribute. */
23138 if ((dwarf_version >= 5 || !dwarf_strict)
23139 && lang_hooks.decls.decl_dwarf_attribute (decl,
23140 DW_AT_rvalue_reference)
23141 == 1)
23142 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
23143 }
23144 }
23145 /* For non DECL_EXTERNALs, if range information is available, fill
23146 the DIE with it. */
23147 else if (!DECL_EXTERNAL (decl) && !early_dwarf)
23148 {
23149 HOST_WIDE_INT cfa_fb_offset;
23150
23151 struct function *fun = DECL_STRUCT_FUNCTION (decl);
23152
23153 if (!crtl->has_bb_partition)
23154 {
23155 dw_fde_ref fde = fun->fde;
23156 if (fde->dw_fde_begin)
23157 {
23158 /* We have already generated the labels. */
23159 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
23160 fde->dw_fde_end, false);
23161 }
23162 else
23163 {
23164 /* Create start/end labels and add the range. */
23165 char label_id_low[MAX_ARTIFICIAL_LABEL_BYTES];
23166 char label_id_high[MAX_ARTIFICIAL_LABEL_BYTES];
23167 ASM_GENERATE_INTERNAL_LABEL (label_id_low, FUNC_BEGIN_LABEL,
23168 current_function_funcdef_no);
23169 ASM_GENERATE_INTERNAL_LABEL (label_id_high, FUNC_END_LABEL,
23170 current_function_funcdef_no);
23171 add_AT_low_high_pc (subr_die, label_id_low, label_id_high,
23172 false);
23173 }
23174
23175 #if VMS_DEBUGGING_INFO
23176 /* HP OpenVMS Industry Standard 64: DWARF Extensions
23177 Section 2.3 Prologue and Epilogue Attributes:
23178 When a breakpoint is set on entry to a function, it is generally
23179 desirable for execution to be suspended, not on the very first
23180 instruction of the function, but rather at a point after the
23181 function's frame has been set up, after any language defined local
23182 declaration processing has been completed, and before execution of
23183 the first statement of the function begins. Debuggers generally
23184 cannot properly determine where this point is. Similarly for a
23185 breakpoint set on exit from a function. The prologue and epilogue
23186 attributes allow a compiler to communicate the location(s) to use. */
23187
23188 {
23189 if (fde->dw_fde_vms_end_prologue)
23190 add_AT_vms_delta (subr_die, DW_AT_HP_prologue,
23191 fde->dw_fde_begin, fde->dw_fde_vms_end_prologue);
23192
23193 if (fde->dw_fde_vms_begin_epilogue)
23194 add_AT_vms_delta (subr_die, DW_AT_HP_epilogue,
23195 fde->dw_fde_begin, fde->dw_fde_vms_begin_epilogue);
23196 }
23197 #endif
23198
23199 }
23200 else
23201 {
23202 /* Generate pubnames entries for the split function code ranges. */
23203 dw_fde_ref fde = fun->fde;
23204
23205 if (fde->dw_fde_second_begin)
23206 {
23207 if (dwarf_version >= 3 || !dwarf_strict)
23208 {
23209 /* We should use ranges for non-contiguous code section
23210 addresses. Use the actual code range for the initial
23211 section, since the HOT/COLD labels might precede an
23212 alignment offset. */
23213 bool range_list_added = false;
23214 add_ranges_by_labels (subr_die, fde->dw_fde_begin,
23215 fde->dw_fde_end, &range_list_added,
23216 false);
23217 add_ranges_by_labels (subr_die, fde->dw_fde_second_begin,
23218 fde->dw_fde_second_end,
23219 &range_list_added, false);
23220 if (range_list_added)
23221 add_ranges (NULL);
23222 }
23223 else
23224 {
23225 /* There is no real support in DW2 for this .. so we make
23226 a work-around. First, emit the pub name for the segment
23227 containing the function label. Then make and emit a
23228 simplified subprogram DIE for the second segment with the
23229 name pre-fixed by __hot/cold_sect_of_. We use the same
23230 linkage name for the second die so that gdb will find both
23231 sections when given "b foo". */
23232 const char *name = NULL;
23233 tree decl_name = DECL_NAME (decl);
23234 dw_die_ref seg_die;
23235
23236 /* Do the 'primary' section. */
23237 add_AT_low_high_pc (subr_die, fde->dw_fde_begin,
23238 fde->dw_fde_end, false);
23239
23240 /* Build a minimal DIE for the secondary section. */
23241 seg_die = new_die (DW_TAG_subprogram,
23242 subr_die->die_parent, decl);
23243
23244 if (TREE_PUBLIC (decl))
23245 add_AT_flag (seg_die, DW_AT_external, 1);
23246
23247 if (decl_name != NULL
23248 && IDENTIFIER_POINTER (decl_name) != NULL)
23249 {
23250 name = dwarf2_name (decl, 1);
23251 if (! DECL_ARTIFICIAL (decl))
23252 add_src_coords_attributes (seg_die, decl);
23253
23254 add_linkage_name (seg_die, decl);
23255 }
23256 gcc_assert (name != NULL);
23257 add_pure_or_virtual_attribute (seg_die, decl);
23258 if (DECL_ARTIFICIAL (decl))
23259 add_AT_flag (seg_die, DW_AT_artificial, 1);
23260
23261 name = concat ("__second_sect_of_", name, NULL);
23262 add_AT_low_high_pc (seg_die, fde->dw_fde_second_begin,
23263 fde->dw_fde_second_end, false);
23264 add_name_attribute (seg_die, name);
23265 if (want_pubnames ())
23266 add_pubname_string (name, seg_die);
23267 }
23268 }
23269 else
23270 add_AT_low_high_pc (subr_die, fde->dw_fde_begin, fde->dw_fde_end,
23271 false);
23272 }
23273
23274 cfa_fb_offset = CFA_FRAME_BASE_OFFSET (decl);
23275
23276 /* We define the "frame base" as the function's CFA. This is more
23277 convenient for several reasons: (1) It's stable across the prologue
23278 and epilogue, which makes it better than just a frame pointer,
23279 (2) With dwarf3, there exists a one-byte encoding that allows us
23280 to reference the .debug_frame data by proxy, but failing that,
23281 (3) We can at least reuse the code inspection and interpretation
23282 code that determines the CFA position at various points in the
23283 function. */
23284 if (dwarf_version >= 3 && targetm.debug_unwind_info () == UI_DWARF2)
23285 {
23286 dw_loc_descr_ref op = new_loc_descr (DW_OP_call_frame_cfa, 0, 0);
23287 add_AT_loc (subr_die, DW_AT_frame_base, op);
23288 }
23289 else
23290 {
23291 dw_loc_list_ref list = convert_cfa_to_fb_loc_list (cfa_fb_offset);
23292 if (list->dw_loc_next)
23293 add_AT_loc_list (subr_die, DW_AT_frame_base, list);
23294 else
23295 add_AT_loc (subr_die, DW_AT_frame_base, list->expr);
23296 }
23297
23298 /* Compute a displacement from the "steady-state frame pointer" to
23299 the CFA. The former is what all stack slots and argument slots
23300 will reference in the rtl; the latter is what we've told the
23301 debugger about. We'll need to adjust all frame_base references
23302 by this displacement. */
23303 compute_frame_pointer_to_fb_displacement (cfa_fb_offset);
23304
23305 if (fun->static_chain_decl)
23306 {
23307 /* DWARF requires here a location expression that computes the
23308 address of the enclosing subprogram's frame base. The machinery
23309 in tree-nested.c is supposed to store this specific address in the
23310 last field of the FRAME record. */
23311 const tree frame_type
23312 = TREE_TYPE (TREE_TYPE (fun->static_chain_decl));
23313 const tree fb_decl = tree_last (TYPE_FIELDS (frame_type));
23314
23315 tree fb_expr
23316 = build1 (INDIRECT_REF, frame_type, fun->static_chain_decl);
23317 fb_expr = build3 (COMPONENT_REF, TREE_TYPE (fb_decl),
23318 fb_expr, fb_decl, NULL_TREE);
23319
23320 add_AT_location_description (subr_die, DW_AT_static_link,
23321 loc_list_from_tree (fb_expr, 0, NULL));
23322 }
23323
23324 resolve_variable_values ();
23325 }
23326
23327 /* Generate child dies for template paramaters. */
23328 if (early_dwarf && debug_info_level > DINFO_LEVEL_TERSE)
23329 gen_generic_params_dies (decl);
23330
23331 /* Now output descriptions of the arguments for this function. This gets
23332 (unnecessarily?) complex because of the fact that the DECL_ARGUMENT list
23333 for a FUNCTION_DECL doesn't indicate cases where there was a trailing
23334 `...' at the end of the formal parameter list. In order to find out if
23335 there was a trailing ellipsis or not, we must instead look at the type
23336 associated with the FUNCTION_DECL. This will be a node of type
23337 FUNCTION_TYPE. If the chain of type nodes hanging off of this
23338 FUNCTION_TYPE node ends with a void_type_node then there should *not* be
23339 an ellipsis at the end. */
23340
23341 /* In the case where we are describing a mere function declaration, all we
23342 need to do here (and all we *can* do here) is to describe the *types* of
23343 its formal parameters. */
23344 if (debug_info_level <= DINFO_LEVEL_TERSE)
23345 ;
23346 else if (declaration)
23347 gen_formal_types_die (decl, subr_die);
23348 else
23349 {
23350 /* Generate DIEs to represent all known formal parameters. */
23351 tree parm = DECL_ARGUMENTS (decl);
23352 tree generic_decl = early_dwarf
23353 ? lang_hooks.decls.get_generic_function_decl (decl) : NULL;
23354 tree generic_decl_parm = generic_decl
23355 ? DECL_ARGUMENTS (generic_decl)
23356 : NULL;
23357
23358 /* Now we want to walk the list of parameters of the function and
23359 emit their relevant DIEs.
23360
23361 We consider the case of DECL being an instance of a generic function
23362 as well as it being a normal function.
23363
23364 If DECL is an instance of a generic function we walk the
23365 parameters of the generic function declaration _and_ the parameters of
23366 DECL itself. This is useful because we want to emit specific DIEs for
23367 function parameter packs and those are declared as part of the
23368 generic function declaration. In that particular case,
23369 the parameter pack yields a DW_TAG_GNU_formal_parameter_pack DIE.
23370 That DIE has children DIEs representing the set of arguments
23371 of the pack. Note that the set of pack arguments can be empty.
23372 In that case, the DW_TAG_GNU_formal_parameter_pack DIE will not have any
23373 children DIE.
23374
23375 Otherwise, we just consider the parameters of DECL. */
23376 while (generic_decl_parm || parm)
23377 {
23378 if (generic_decl_parm
23379 && lang_hooks.function_parameter_pack_p (generic_decl_parm))
23380 gen_formal_parameter_pack_die (generic_decl_parm,
23381 parm, subr_die,
23382 &parm);
23383 else if (parm)
23384 {
23385 dw_die_ref parm_die = gen_decl_die (parm, NULL, NULL, subr_die);
23386
23387 if (early_dwarf
23388 && parm == DECL_ARGUMENTS (decl)
23389 && TREE_CODE (TREE_TYPE (decl)) == METHOD_TYPE
23390 && parm_die
23391 && (dwarf_version >= 3 || !dwarf_strict))
23392 add_AT_die_ref (subr_die, DW_AT_object_pointer, parm_die);
23393
23394 parm = DECL_CHAIN (parm);
23395 }
23396
23397 if (generic_decl_parm)
23398 generic_decl_parm = DECL_CHAIN (generic_decl_parm);
23399 }
23400
23401 /* Decide whether we need an unspecified_parameters DIE at the end.
23402 There are 2 more cases to do this for: 1) the ansi ... declaration -
23403 this is detectable when the end of the arg list is not a
23404 void_type_node 2) an unprototyped function declaration (not a
23405 definition). This just means that we have no info about the
23406 parameters at all. */
23407 if (early_dwarf)
23408 {
23409 if (prototype_p (TREE_TYPE (decl)))
23410 {
23411 /* This is the prototyped case, check for.... */
23412 if (stdarg_p (TREE_TYPE (decl)))
23413 gen_unspecified_parameters_die (decl, subr_die);
23414 }
23415 else if (DECL_INITIAL (decl) == NULL_TREE)
23416 gen_unspecified_parameters_die (decl, subr_die);
23417 }
23418 }
23419
23420 if (subr_die != old_die)
23421 /* Add the calling convention attribute if requested. */
23422 add_calling_convention_attribute (subr_die, decl);
23423
23424 /* Output Dwarf info for all of the stuff within the body of the function
23425 (if it has one - it may be just a declaration).
23426
23427 OUTER_SCOPE is a pointer to the outermost BLOCK node created to represent
23428 a function. This BLOCK actually represents the outermost binding contour
23429 for the function, i.e. the contour in which the function's formal
23430 parameters and labels get declared. Curiously, it appears that the front
23431 end doesn't actually put the PARM_DECL nodes for the current function onto
23432 the BLOCK_VARS list for this outer scope, but are strung off of the
23433 DECL_ARGUMENTS list for the function instead.
23434
23435 The BLOCK_VARS list for the `outer_scope' does provide us with a list of
23436 the LABEL_DECL nodes for the function however, and we output DWARF info
23437 for those in decls_for_scope. Just within the `outer_scope' there will be
23438 a BLOCK node representing the function's outermost pair of curly braces,
23439 and any blocks used for the base and member initializers of a C++
23440 constructor function. */
23441 tree outer_scope = DECL_INITIAL (decl);
23442 if (! declaration && outer_scope && TREE_CODE (outer_scope) != ERROR_MARK)
23443 {
23444 int call_site_note_count = 0;
23445 int tail_call_site_note_count = 0;
23446
23447 /* Emit a DW_TAG_variable DIE for a named return value. */
23448 if (DECL_NAME (DECL_RESULT (decl)))
23449 gen_decl_die (DECL_RESULT (decl), NULL, NULL, subr_die);
23450
23451 /* The first time through decls_for_scope we will generate the
23452 DIEs for the locals. The second time, we fill in the
23453 location info. */
23454 decls_for_scope (outer_scope, subr_die);
23455
23456 if (call_arg_locations && (!dwarf_strict || dwarf_version >= 5))
23457 {
23458 struct call_arg_loc_node *ca_loc;
23459 for (ca_loc = call_arg_locations; ca_loc; ca_loc = ca_loc->next)
23460 {
23461 dw_die_ref die = NULL;
23462 rtx tloc = NULL_RTX, tlocc = NULL_RTX;
23463 rtx arg, next_arg;
23464 tree arg_decl = NULL_TREE;
23465
23466 for (arg = (ca_loc->call_arg_loc_note != NULL_RTX
23467 ? XEXP (ca_loc->call_arg_loc_note, 0)
23468 : NULL_RTX);
23469 arg; arg = next_arg)
23470 {
23471 dw_loc_descr_ref reg, val;
23472 machine_mode mode = GET_MODE (XEXP (XEXP (arg, 0), 1));
23473 dw_die_ref cdie, tdie = NULL;
23474
23475 next_arg = XEXP (arg, 1);
23476 if (REG_P (XEXP (XEXP (arg, 0), 0))
23477 && next_arg
23478 && MEM_P (XEXP (XEXP (next_arg, 0), 0))
23479 && REG_P (XEXP (XEXP (XEXP (next_arg, 0), 0), 0))
23480 && REGNO (XEXP (XEXP (arg, 0), 0))
23481 == REGNO (XEXP (XEXP (XEXP (next_arg, 0), 0), 0)))
23482 next_arg = XEXP (next_arg, 1);
23483 if (mode == VOIDmode)
23484 {
23485 mode = GET_MODE (XEXP (XEXP (arg, 0), 0));
23486 if (mode == VOIDmode)
23487 mode = GET_MODE (XEXP (arg, 0));
23488 }
23489 if (mode == VOIDmode || mode == BLKmode)
23490 continue;
23491 /* Get dynamic information about call target only if we
23492 have no static information: we cannot generate both
23493 DW_AT_call_origin and DW_AT_call_target
23494 attributes. */
23495 if (ca_loc->symbol_ref == NULL_RTX)
23496 {
23497 if (XEXP (XEXP (arg, 0), 0) == pc_rtx)
23498 {
23499 tloc = XEXP (XEXP (arg, 0), 1);
23500 continue;
23501 }
23502 else if (GET_CODE (XEXP (XEXP (arg, 0), 0)) == CLOBBER
23503 && XEXP (XEXP (XEXP (arg, 0), 0), 0) == pc_rtx)
23504 {
23505 tlocc = XEXP (XEXP (arg, 0), 1);
23506 continue;
23507 }
23508 }
23509 reg = NULL;
23510 if (REG_P (XEXP (XEXP (arg, 0), 0)))
23511 reg = reg_loc_descriptor (XEXP (XEXP (arg, 0), 0),
23512 VAR_INIT_STATUS_INITIALIZED);
23513 else if (MEM_P (XEXP (XEXP (arg, 0), 0)))
23514 {
23515 rtx mem = XEXP (XEXP (arg, 0), 0);
23516 reg = mem_loc_descriptor (XEXP (mem, 0),
23517 get_address_mode (mem),
23518 GET_MODE (mem),
23519 VAR_INIT_STATUS_INITIALIZED);
23520 }
23521 else if (GET_CODE (XEXP (XEXP (arg, 0), 0))
23522 == DEBUG_PARAMETER_REF)
23523 {
23524 tree tdecl
23525 = DEBUG_PARAMETER_REF_DECL (XEXP (XEXP (arg, 0), 0));
23526 tdie = lookup_decl_die (tdecl);
23527 if (tdie == NULL)
23528 continue;
23529 arg_decl = tdecl;
23530 }
23531 else
23532 continue;
23533 if (reg == NULL
23534 && GET_CODE (XEXP (XEXP (arg, 0), 0))
23535 != DEBUG_PARAMETER_REF)
23536 continue;
23537 val = mem_loc_descriptor (XEXP (XEXP (arg, 0), 1), mode,
23538 VOIDmode,
23539 VAR_INIT_STATUS_INITIALIZED);
23540 if (val == NULL)
23541 continue;
23542 if (die == NULL)
23543 die = gen_call_site_die (decl, subr_die, ca_loc);
23544 cdie = new_die (dwarf_TAG (DW_TAG_call_site_parameter), die,
23545 NULL_TREE);
23546 add_desc_attribute (cdie, arg_decl);
23547 if (reg != NULL)
23548 add_AT_loc (cdie, DW_AT_location, reg);
23549 else if (tdie != NULL)
23550 add_AT_die_ref (cdie, dwarf_AT (DW_AT_call_parameter),
23551 tdie);
23552 add_AT_loc (cdie, dwarf_AT (DW_AT_call_value), val);
23553 if (next_arg != XEXP (arg, 1))
23554 {
23555 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 1));
23556 if (mode == VOIDmode)
23557 mode = GET_MODE (XEXP (XEXP (XEXP (arg, 1), 0), 0));
23558 val = mem_loc_descriptor (XEXP (XEXP (XEXP (arg, 1),
23559 0), 1),
23560 mode, VOIDmode,
23561 VAR_INIT_STATUS_INITIALIZED);
23562 if (val != NULL)
23563 add_AT_loc (cdie, dwarf_AT (DW_AT_call_data_value),
23564 val);
23565 }
23566 }
23567 if (die == NULL
23568 && (ca_loc->symbol_ref || tloc))
23569 die = gen_call_site_die (decl, subr_die, ca_loc);
23570 if (die != NULL && (tloc != NULL_RTX || tlocc != NULL_RTX))
23571 {
23572 dw_loc_descr_ref tval = NULL;
23573
23574 if (tloc != NULL_RTX)
23575 tval = mem_loc_descriptor (tloc,
23576 GET_MODE (tloc) == VOIDmode
23577 ? Pmode : GET_MODE (tloc),
23578 VOIDmode,
23579 VAR_INIT_STATUS_INITIALIZED);
23580 if (tval)
23581 add_AT_loc (die, dwarf_AT (DW_AT_call_target), tval);
23582 else if (tlocc != NULL_RTX)
23583 {
23584 tval = mem_loc_descriptor (tlocc,
23585 GET_MODE (tlocc) == VOIDmode
23586 ? Pmode : GET_MODE (tlocc),
23587 VOIDmode,
23588 VAR_INIT_STATUS_INITIALIZED);
23589 if (tval)
23590 add_AT_loc (die,
23591 dwarf_AT (DW_AT_call_target_clobbered),
23592 tval);
23593 }
23594 }
23595 if (die != NULL)
23596 {
23597 call_site_note_count++;
23598 if (ca_loc->tail_call_p)
23599 tail_call_site_note_count++;
23600 }
23601 }
23602 }
23603 call_arg_locations = NULL;
23604 call_arg_loc_last = NULL;
23605 if (tail_call_site_count >= 0
23606 && tail_call_site_count == tail_call_site_note_count
23607 && (!dwarf_strict || dwarf_version >= 5))
23608 {
23609 if (call_site_count >= 0
23610 && call_site_count == call_site_note_count)
23611 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_calls), 1);
23612 else
23613 add_AT_flag (subr_die, dwarf_AT (DW_AT_call_all_tail_calls), 1);
23614 }
23615 call_site_count = -1;
23616 tail_call_site_count = -1;
23617 }
23618
23619 /* Mark used types after we have created DIEs for the functions scopes. */
23620 premark_used_types (DECL_STRUCT_FUNCTION (decl));
23621 }
23622
23623 /* Returns a hash value for X (which really is a die_struct). */
23624
23625 hashval_t
23626 block_die_hasher::hash (die_struct *d)
23627 {
23628 return (hashval_t) d->decl_id ^ htab_hash_pointer (d->die_parent);
23629 }
23630
23631 /* Return nonzero if decl_id and die_parent of die_struct X is the same
23632 as decl_id and die_parent of die_struct Y. */
23633
23634 bool
23635 block_die_hasher::equal (die_struct *x, die_struct *y)
23636 {
23637 return x->decl_id == y->decl_id && x->die_parent == y->die_parent;
23638 }
23639
23640 /* Hold information about markers for inlined entry points. */
23641 struct GTY ((for_user)) inline_entry_data
23642 {
23643 /* The block that's the inlined_function_outer_scope for an inlined
23644 function. */
23645 tree block;
23646
23647 /* The label at the inlined entry point. */
23648 const char *label_pfx;
23649 unsigned int label_num;
23650
23651 /* The view number to be used as the inlined entry point. */
23652 var_loc_view view;
23653 };
23654
23655 struct inline_entry_data_hasher : ggc_ptr_hash <inline_entry_data>
23656 {
23657 typedef tree compare_type;
23658 static inline hashval_t hash (const inline_entry_data *);
23659 static inline bool equal (const inline_entry_data *, const_tree);
23660 };
23661
23662 /* Hash table routines for inline_entry_data. */
23663
23664 inline hashval_t
23665 inline_entry_data_hasher::hash (const inline_entry_data *data)
23666 {
23667 return htab_hash_pointer (data->block);
23668 }
23669
23670 inline bool
23671 inline_entry_data_hasher::equal (const inline_entry_data *data,
23672 const_tree block)
23673 {
23674 return data->block == block;
23675 }
23676
23677 /* Inlined entry points pending DIE creation in this compilation unit. */
23678
23679 static GTY(()) hash_table<inline_entry_data_hasher> *inline_entry_data_table;
23680
23681
23682 /* Return TRUE if DECL, which may have been previously generated as
23683 OLD_DIE, is a candidate for a DW_AT_specification. DECLARATION is
23684 true if decl (or its origin) is either an extern declaration or a
23685 class/namespace scoped declaration.
23686
23687 The declare_in_namespace support causes us to get two DIEs for one
23688 variable, both of which are declarations. We want to avoid
23689 considering one to be a specification, so we must test for
23690 DECLARATION and DW_AT_declaration. */
23691 static inline bool
23692 decl_will_get_specification_p (dw_die_ref old_die, tree decl, bool declaration)
23693 {
23694 return (old_die && TREE_STATIC (decl) && !declaration
23695 && get_AT_flag (old_die, DW_AT_declaration) == 1);
23696 }
23697
23698 /* Return true if DECL is a local static. */
23699
23700 static inline bool
23701 local_function_static (tree decl)
23702 {
23703 gcc_assert (VAR_P (decl));
23704 return TREE_STATIC (decl)
23705 && DECL_CONTEXT (decl)
23706 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL;
23707 }
23708
23709 /* Generate a DIE to represent a declared data object.
23710 Either DECL or ORIGIN must be non-null. */
23711
23712 static void
23713 gen_variable_die (tree decl, tree origin, dw_die_ref context_die)
23714 {
23715 HOST_WIDE_INT off = 0;
23716 tree com_decl;
23717 tree decl_or_origin = decl ? decl : origin;
23718 tree ultimate_origin;
23719 dw_die_ref var_die;
23720 dw_die_ref old_die = decl ? lookup_decl_die (decl) : NULL;
23721 bool declaration = (DECL_EXTERNAL (decl_or_origin)
23722 || class_or_namespace_scope_p (context_die));
23723 bool specialization_p = false;
23724 bool no_linkage_name = false;
23725
23726 /* While C++ inline static data members have definitions inside of the
23727 class, force the first DIE to be a declaration, then let gen_member_die
23728 reparent it to the class context and call gen_variable_die again
23729 to create the outside of the class DIE for the definition. */
23730 if (!declaration
23731 && old_die == NULL
23732 && decl
23733 && DECL_CONTEXT (decl)
23734 && TYPE_P (DECL_CONTEXT (decl))
23735 && lang_hooks.decls.decl_dwarf_attribute (decl, DW_AT_inline) != -1)
23736 {
23737 declaration = true;
23738 if (dwarf_version < 5)
23739 no_linkage_name = true;
23740 }
23741
23742 ultimate_origin = decl_ultimate_origin (decl_or_origin);
23743 if (decl || ultimate_origin)
23744 origin = ultimate_origin;
23745 com_decl = fortran_common (decl_or_origin, &off);
23746
23747 /* Symbol in common gets emitted as a child of the common block, in the form
23748 of a data member. */
23749 if (com_decl)
23750 {
23751 dw_die_ref com_die;
23752 dw_loc_list_ref loc = NULL;
23753 die_node com_die_arg;
23754
23755 var_die = lookup_decl_die (decl_or_origin);
23756 if (var_die)
23757 {
23758 if (! early_dwarf && get_AT (var_die, DW_AT_location) == NULL)
23759 {
23760 loc = loc_list_from_tree (com_decl, off ? 1 : 2, NULL);
23761 if (loc)
23762 {
23763 if (off)
23764 {
23765 /* Optimize the common case. */
23766 if (single_element_loc_list_p (loc)
23767 && loc->expr->dw_loc_opc == DW_OP_addr
23768 && loc->expr->dw_loc_next == NULL
23769 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr)
23770 == SYMBOL_REF)
23771 {
23772 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23773 loc->expr->dw_loc_oprnd1.v.val_addr
23774 = plus_constant (GET_MODE (x), x , off);
23775 }
23776 else
23777 loc_list_plus_const (loc, off);
23778 }
23779 add_AT_location_description (var_die, DW_AT_location, loc);
23780 remove_AT (var_die, DW_AT_declaration);
23781 }
23782 }
23783 return;
23784 }
23785
23786 if (common_block_die_table == NULL)
23787 common_block_die_table = hash_table<block_die_hasher>::create_ggc (10);
23788
23789 com_die_arg.decl_id = DECL_UID (com_decl);
23790 com_die_arg.die_parent = context_die;
23791 com_die = common_block_die_table->find (&com_die_arg);
23792 if (! early_dwarf)
23793 loc = loc_list_from_tree (com_decl, 2, NULL);
23794 if (com_die == NULL)
23795 {
23796 const char *cnam
23797 = IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (com_decl));
23798 die_node **slot;
23799
23800 com_die = new_die (DW_TAG_common_block, context_die, decl);
23801 add_name_and_src_coords_attributes (com_die, com_decl);
23802 if (loc)
23803 {
23804 add_AT_location_description (com_die, DW_AT_location, loc);
23805 /* Avoid sharing the same loc descriptor between
23806 DW_TAG_common_block and DW_TAG_variable. */
23807 loc = loc_list_from_tree (com_decl, 2, NULL);
23808 }
23809 else if (DECL_EXTERNAL (decl_or_origin))
23810 add_AT_flag (com_die, DW_AT_declaration, 1);
23811 if (want_pubnames ())
23812 add_pubname_string (cnam, com_die); /* ??? needed? */
23813 com_die->decl_id = DECL_UID (com_decl);
23814 slot = common_block_die_table->find_slot (com_die, INSERT);
23815 *slot = com_die;
23816 }
23817 else if (get_AT (com_die, DW_AT_location) == NULL && loc)
23818 {
23819 add_AT_location_description (com_die, DW_AT_location, loc);
23820 loc = loc_list_from_tree (com_decl, 2, NULL);
23821 remove_AT (com_die, DW_AT_declaration);
23822 }
23823 var_die = new_die (DW_TAG_variable, com_die, decl);
23824 add_name_and_src_coords_attributes (var_die, decl_or_origin);
23825 add_type_attribute (var_die, TREE_TYPE (decl_or_origin),
23826 decl_quals (decl_or_origin), false,
23827 context_die);
23828 add_alignment_attribute (var_die, decl);
23829 add_AT_flag (var_die, DW_AT_external, 1);
23830 if (loc)
23831 {
23832 if (off)
23833 {
23834 /* Optimize the common case. */
23835 if (single_element_loc_list_p (loc)
23836 && loc->expr->dw_loc_opc == DW_OP_addr
23837 && loc->expr->dw_loc_next == NULL
23838 && GET_CODE (loc->expr->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF)
23839 {
23840 rtx x = loc->expr->dw_loc_oprnd1.v.val_addr;
23841 loc->expr->dw_loc_oprnd1.v.val_addr
23842 = plus_constant (GET_MODE (x), x, off);
23843 }
23844 else
23845 loc_list_plus_const (loc, off);
23846 }
23847 add_AT_location_description (var_die, DW_AT_location, loc);
23848 }
23849 else if (DECL_EXTERNAL (decl_or_origin))
23850 add_AT_flag (var_die, DW_AT_declaration, 1);
23851 if (decl)
23852 equate_decl_number_to_die (decl, var_die);
23853 return;
23854 }
23855
23856 if (old_die)
23857 {
23858 if (declaration)
23859 {
23860 /* A declaration that has been previously dumped, needs no
23861 further annotations, since it doesn't need location on
23862 the second pass. */
23863 return;
23864 }
23865 else if (decl_will_get_specification_p (old_die, decl, declaration)
23866 && !get_AT (old_die, DW_AT_specification))
23867 {
23868 /* Fall-thru so we can make a new variable die along with a
23869 DW_AT_specification. */
23870 }
23871 else if (origin && old_die->die_parent != context_die)
23872 {
23873 /* If we will be creating an inlined instance, we need a
23874 new DIE that will get annotated with
23875 DW_AT_abstract_origin. */
23876 gcc_assert (!DECL_ABSTRACT_P (decl));
23877 }
23878 else
23879 {
23880 /* If a DIE was dumped early, it still needs location info.
23881 Skip to where we fill the location bits. */
23882 var_die = old_die;
23883
23884 /* ??? In LTRANS we cannot annotate early created variably
23885 modified type DIEs without copying them and adjusting all
23886 references to them. Thus we dumped them again. Also add a
23887 reference to them but beware of -g0 compile and -g link
23888 in which case the reference will be already present. */
23889 tree type = TREE_TYPE (decl_or_origin);
23890 if (in_lto_p
23891 && ! get_AT (var_die, DW_AT_type)
23892 && variably_modified_type_p
23893 (type, decl_function_context (decl_or_origin)))
23894 {
23895 if (decl_by_reference_p (decl_or_origin))
23896 add_type_attribute (var_die, TREE_TYPE (type),
23897 TYPE_UNQUALIFIED, false, context_die);
23898 else
23899 add_type_attribute (var_die, type, decl_quals (decl_or_origin),
23900 false, context_die);
23901 }
23902
23903 goto gen_variable_die_location;
23904 }
23905 }
23906
23907 /* For static data members, the declaration in the class is supposed
23908 to have DW_TAG_member tag in DWARF{3,4} and we emit it for compatibility
23909 also in DWARF2; the specification should still be DW_TAG_variable
23910 referencing the DW_TAG_member DIE. */
23911 if (declaration && class_scope_p (context_die) && dwarf_version < 5)
23912 var_die = new_die (DW_TAG_member, context_die, decl);
23913 else
23914 var_die = new_die (DW_TAG_variable, context_die, decl);
23915
23916 if (origin != NULL)
23917 add_abstract_origin_attribute (var_die, origin);
23918
23919 /* Loop unrolling can create multiple blocks that refer to the same
23920 static variable, so we must test for the DW_AT_declaration flag.
23921
23922 ??? Loop unrolling/reorder_blocks should perhaps be rewritten to
23923 copy decls and set the DECL_ABSTRACT_P flag on them instead of
23924 sharing them.
23925
23926 ??? Duplicated blocks have been rewritten to use .debug_ranges. */
23927 else if (decl_will_get_specification_p (old_die, decl, declaration))
23928 {
23929 /* This is a definition of a C++ class level static. */
23930 add_AT_specification (var_die, old_die);
23931 specialization_p = true;
23932 if (DECL_NAME (decl))
23933 {
23934 expanded_location s = expand_location (DECL_SOURCE_LOCATION (decl));
23935 struct dwarf_file_data * file_index = lookup_filename (s.file);
23936
23937 if (get_AT_file (old_die, DW_AT_decl_file) != file_index)
23938 add_AT_file (var_die, DW_AT_decl_file, file_index);
23939
23940 if (get_AT_unsigned (old_die, DW_AT_decl_line) != (unsigned) s.line)
23941 add_AT_unsigned (var_die, DW_AT_decl_line, s.line);
23942
23943 if (debug_column_info
23944 && s.column
23945 && (get_AT_unsigned (old_die, DW_AT_decl_column)
23946 != (unsigned) s.column))
23947 add_AT_unsigned (var_die, DW_AT_decl_column, s.column);
23948
23949 if (old_die->die_tag == DW_TAG_member)
23950 add_linkage_name (var_die, decl);
23951 }
23952 }
23953 else
23954 add_name_and_src_coords_attributes (var_die, decl, no_linkage_name);
23955
23956 if ((origin == NULL && !specialization_p)
23957 || (origin != NULL
23958 && !DECL_ABSTRACT_P (decl_or_origin)
23959 && variably_modified_type_p (TREE_TYPE (decl_or_origin),
23960 decl_function_context
23961 (decl_or_origin))))
23962 {
23963 tree type = TREE_TYPE (decl_or_origin);
23964
23965 if (decl_by_reference_p (decl_or_origin))
23966 add_type_attribute (var_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
23967 context_die);
23968 else
23969 add_type_attribute (var_die, type, decl_quals (decl_or_origin), false,
23970 context_die);
23971 }
23972
23973 if (origin == NULL && !specialization_p)
23974 {
23975 if (TREE_PUBLIC (decl))
23976 add_AT_flag (var_die, DW_AT_external, 1);
23977
23978 if (DECL_ARTIFICIAL (decl))
23979 add_AT_flag (var_die, DW_AT_artificial, 1);
23980
23981 add_alignment_attribute (var_die, decl);
23982
23983 add_accessibility_attribute (var_die, decl);
23984 }
23985
23986 if (declaration)
23987 add_AT_flag (var_die, DW_AT_declaration, 1);
23988
23989 if (decl && (DECL_ABSTRACT_P (decl)
23990 || !old_die || is_declaration_die (old_die)))
23991 equate_decl_number_to_die (decl, var_die);
23992
23993 gen_variable_die_location:
23994 if (! declaration
23995 && (! DECL_ABSTRACT_P (decl_or_origin)
23996 /* Local static vars are shared between all clones/inlines,
23997 so emit DW_AT_location on the abstract DIE if DECL_RTL is
23998 already set. */
23999 || (VAR_P (decl_or_origin)
24000 && TREE_STATIC (decl_or_origin)
24001 && DECL_RTL_SET_P (decl_or_origin))))
24002 {
24003 if (early_dwarf)
24004 add_pubname (decl_or_origin, var_die);
24005 else
24006 add_location_or_const_value_attribute (var_die, decl_or_origin,
24007 decl == NULL);
24008 }
24009 else
24010 tree_add_const_value_attribute_for_decl (var_die, decl_or_origin);
24011
24012 if ((dwarf_version >= 4 || !dwarf_strict)
24013 && lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
24014 DW_AT_const_expr) == 1
24015 && !get_AT (var_die, DW_AT_const_expr)
24016 && !specialization_p)
24017 add_AT_flag (var_die, DW_AT_const_expr, 1);
24018
24019 if (!dwarf_strict)
24020 {
24021 int inl = lang_hooks.decls.decl_dwarf_attribute (decl_or_origin,
24022 DW_AT_inline);
24023 if (inl != -1
24024 && !get_AT (var_die, DW_AT_inline)
24025 && !specialization_p)
24026 add_AT_unsigned (var_die, DW_AT_inline, inl);
24027 }
24028 }
24029
24030 /* Generate a DIE to represent a named constant. */
24031
24032 static void
24033 gen_const_die (tree decl, dw_die_ref context_die)
24034 {
24035 dw_die_ref const_die;
24036 tree type = TREE_TYPE (decl);
24037
24038 const_die = lookup_decl_die (decl);
24039 if (const_die)
24040 return;
24041
24042 const_die = new_die (DW_TAG_constant, context_die, decl);
24043 equate_decl_number_to_die (decl, const_die);
24044 add_name_and_src_coords_attributes (const_die, decl);
24045 add_type_attribute (const_die, type, TYPE_QUAL_CONST, false, context_die);
24046 if (TREE_PUBLIC (decl))
24047 add_AT_flag (const_die, DW_AT_external, 1);
24048 if (DECL_ARTIFICIAL (decl))
24049 add_AT_flag (const_die, DW_AT_artificial, 1);
24050 tree_add_const_value_attribute_for_decl (const_die, decl);
24051 }
24052
24053 /* Generate a DIE to represent a label identifier. */
24054
24055 static void
24056 gen_label_die (tree decl, dw_die_ref context_die)
24057 {
24058 tree origin = decl_ultimate_origin (decl);
24059 dw_die_ref lbl_die = lookup_decl_die (decl);
24060 rtx insn;
24061 char label[MAX_ARTIFICIAL_LABEL_BYTES];
24062
24063 if (!lbl_die)
24064 {
24065 lbl_die = new_die (DW_TAG_label, context_die, decl);
24066 equate_decl_number_to_die (decl, lbl_die);
24067
24068 if (origin != NULL)
24069 add_abstract_origin_attribute (lbl_die, origin);
24070 else
24071 add_name_and_src_coords_attributes (lbl_die, decl);
24072 }
24073
24074 if (DECL_ABSTRACT_P (decl))
24075 equate_decl_number_to_die (decl, lbl_die);
24076 else if (! early_dwarf)
24077 {
24078 insn = DECL_RTL_IF_SET (decl);
24079
24080 /* Deleted labels are programmer specified labels which have been
24081 eliminated because of various optimizations. We still emit them
24082 here so that it is possible to put breakpoints on them. */
24083 if (insn
24084 && (LABEL_P (insn)
24085 || ((NOTE_P (insn)
24086 && NOTE_KIND (insn) == NOTE_INSN_DELETED_LABEL))))
24087 {
24088 /* When optimization is enabled (via -O) some parts of the compiler
24089 (e.g. jump.c and cse.c) may try to delete CODE_LABEL insns which
24090 represent source-level labels which were explicitly declared by
24091 the user. This really shouldn't be happening though, so catch
24092 it if it ever does happen. */
24093 gcc_assert (!as_a<rtx_insn *> (insn)->deleted ());
24094
24095 ASM_GENERATE_INTERNAL_LABEL (label, "L", CODE_LABEL_NUMBER (insn));
24096 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
24097 }
24098 else if (insn
24099 && NOTE_P (insn)
24100 && NOTE_KIND (insn) == NOTE_INSN_DELETED_DEBUG_LABEL
24101 && CODE_LABEL_NUMBER (insn) != -1)
24102 {
24103 ASM_GENERATE_INTERNAL_LABEL (label, "LDL", CODE_LABEL_NUMBER (insn));
24104 add_AT_lbl_id (lbl_die, DW_AT_low_pc, label);
24105 }
24106 }
24107 }
24108
24109 /* A helper function for gen_inlined_subroutine_die. Add source coordinate
24110 attributes to the DIE for a block STMT, to describe where the inlined
24111 function was called from. This is similar to add_src_coords_attributes. */
24112
24113 static inline void
24114 add_call_src_coords_attributes (tree stmt, dw_die_ref die)
24115 {
24116 /* We can end up with BUILTINS_LOCATION here. */
24117 if (RESERVED_LOCATION_P (BLOCK_SOURCE_LOCATION (stmt)))
24118 return;
24119
24120 expanded_location s = expand_location (BLOCK_SOURCE_LOCATION (stmt));
24121
24122 if (dwarf_version >= 3 || !dwarf_strict)
24123 {
24124 add_AT_file (die, DW_AT_call_file, lookup_filename (s.file));
24125 add_AT_unsigned (die, DW_AT_call_line, s.line);
24126 if (debug_column_info && s.column)
24127 add_AT_unsigned (die, DW_AT_call_column, s.column);
24128 }
24129 }
24130
24131
24132 /* A helper function for gen_lexical_block_die and gen_inlined_subroutine_die.
24133 Add low_pc and high_pc attributes to the DIE for a block STMT. */
24134
24135 static inline void
24136 add_high_low_attributes (tree stmt, dw_die_ref die)
24137 {
24138 char label[MAX_ARTIFICIAL_LABEL_BYTES];
24139
24140 if (inline_entry_data **iedp
24141 = !inline_entry_data_table ? NULL
24142 : inline_entry_data_table->find_slot_with_hash (stmt,
24143 htab_hash_pointer (stmt),
24144 NO_INSERT))
24145 {
24146 inline_entry_data *ied = *iedp;
24147 gcc_assert (MAY_HAVE_DEBUG_MARKER_INSNS);
24148 gcc_assert (debug_inline_points);
24149 gcc_assert (inlined_function_outer_scope_p (stmt));
24150
24151 ASM_GENERATE_INTERNAL_LABEL (label, ied->label_pfx, ied->label_num);
24152 add_AT_lbl_id (die, DW_AT_entry_pc, label);
24153
24154 if (debug_variable_location_views && !ZERO_VIEW_P (ied->view)
24155 && !dwarf_strict)
24156 {
24157 if (!output_asm_line_debug_info ())
24158 add_AT_unsigned (die, DW_AT_GNU_entry_view, ied->view);
24159 else
24160 {
24161 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", ied->view);
24162 /* FIXME: this will resolve to a small number. Could we
24163 possibly emit smaller data? Ideally we'd emit a
24164 uleb128, but that would make the size of DIEs
24165 impossible for the compiler to compute, since it's
24166 the assembler that computes the value of the view
24167 label in this case. Ideally, we'd have a single form
24168 encompassing both the address and the view, and
24169 indirecting them through a table might make things
24170 easier, but even that would be more wasteful,
24171 space-wise, than what we have now. */
24172 add_AT_symview (die, DW_AT_GNU_entry_view, label);
24173 }
24174 }
24175
24176 inline_entry_data_table->clear_slot (iedp);
24177 }
24178
24179 if (BLOCK_FRAGMENT_CHAIN (stmt)
24180 && (dwarf_version >= 3 || !dwarf_strict))
24181 {
24182 tree chain, superblock = NULL_TREE;
24183 dw_die_ref pdie;
24184 dw_attr_node *attr = NULL;
24185
24186 if (!debug_inline_points && inlined_function_outer_scope_p (stmt))
24187 {
24188 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
24189 BLOCK_NUMBER (stmt));
24190 add_AT_lbl_id (die, DW_AT_entry_pc, label);
24191 }
24192
24193 /* Optimize duplicate .debug_ranges lists or even tails of
24194 lists. If this BLOCK has same ranges as its supercontext,
24195 lookup DW_AT_ranges attribute in the supercontext (and
24196 recursively so), verify that the ranges_table contains the
24197 right values and use it instead of adding a new .debug_range. */
24198 for (chain = stmt, pdie = die;
24199 BLOCK_SAME_RANGE (chain);
24200 chain = BLOCK_SUPERCONTEXT (chain))
24201 {
24202 dw_attr_node *new_attr;
24203
24204 pdie = pdie->die_parent;
24205 if (pdie == NULL)
24206 break;
24207 if (BLOCK_SUPERCONTEXT (chain) == NULL_TREE)
24208 break;
24209 new_attr = get_AT (pdie, DW_AT_ranges);
24210 if (new_attr == NULL
24211 || new_attr->dw_attr_val.val_class != dw_val_class_range_list)
24212 break;
24213 attr = new_attr;
24214 superblock = BLOCK_SUPERCONTEXT (chain);
24215 }
24216 if (attr != NULL
24217 && ((*ranges_table)[attr->dw_attr_val.v.val_offset].num
24218 == (int)BLOCK_NUMBER (superblock))
24219 && BLOCK_FRAGMENT_CHAIN (superblock))
24220 {
24221 unsigned long off = attr->dw_attr_val.v.val_offset;
24222 unsigned long supercnt = 0, thiscnt = 0;
24223 for (chain = BLOCK_FRAGMENT_CHAIN (superblock);
24224 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
24225 {
24226 ++supercnt;
24227 gcc_checking_assert ((*ranges_table)[off + supercnt].num
24228 == (int)BLOCK_NUMBER (chain));
24229 }
24230 gcc_checking_assert ((*ranges_table)[off + supercnt + 1].num == 0);
24231 for (chain = BLOCK_FRAGMENT_CHAIN (stmt);
24232 chain; chain = BLOCK_FRAGMENT_CHAIN (chain))
24233 ++thiscnt;
24234 gcc_assert (supercnt >= thiscnt);
24235 add_AT_range_list (die, DW_AT_ranges, off + supercnt - thiscnt,
24236 false);
24237 note_rnglist_head (off + supercnt - thiscnt);
24238 return;
24239 }
24240
24241 unsigned int offset = add_ranges (stmt, true);
24242 add_AT_range_list (die, DW_AT_ranges, offset, false);
24243 note_rnglist_head (offset);
24244
24245 bool prev_in_cold = BLOCK_IN_COLD_SECTION_P (stmt);
24246 chain = BLOCK_FRAGMENT_CHAIN (stmt);
24247 do
24248 {
24249 add_ranges (chain, prev_in_cold != BLOCK_IN_COLD_SECTION_P (chain));
24250 prev_in_cold = BLOCK_IN_COLD_SECTION_P (chain);
24251 chain = BLOCK_FRAGMENT_CHAIN (chain);
24252 }
24253 while (chain);
24254 add_ranges (NULL);
24255 }
24256 else
24257 {
24258 char label_high[MAX_ARTIFICIAL_LABEL_BYTES];
24259 ASM_GENERATE_INTERNAL_LABEL (label, BLOCK_BEGIN_LABEL,
24260 BLOCK_NUMBER (stmt));
24261 ASM_GENERATE_INTERNAL_LABEL (label_high, BLOCK_END_LABEL,
24262 BLOCK_NUMBER (stmt));
24263 add_AT_low_high_pc (die, label, label_high, false);
24264 }
24265 }
24266
24267 /* Generate a DIE for a lexical block. */
24268
24269 static void
24270 gen_lexical_block_die (tree stmt, dw_die_ref context_die)
24271 {
24272 dw_die_ref old_die = lookup_block_die (stmt);
24273 dw_die_ref stmt_die = NULL;
24274 if (!old_die)
24275 {
24276 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24277 equate_block_to_die (stmt, stmt_die);
24278 }
24279
24280 if (BLOCK_ABSTRACT_ORIGIN (stmt))
24281 {
24282 /* If this is an inlined or conrecte instance, create a new lexical
24283 die for anything below to attach DW_AT_abstract_origin to. */
24284 if (old_die)
24285 stmt_die = new_die (DW_TAG_lexical_block, context_die, stmt);
24286
24287 tree origin = block_ultimate_origin (stmt);
24288 if (origin != NULL_TREE && (origin != stmt || old_die))
24289 add_abstract_origin_attribute (stmt_die, origin);
24290
24291 old_die = NULL;
24292 }
24293
24294 if (old_die)
24295 stmt_die = old_die;
24296
24297 /* A non abstract block whose blocks have already been reordered
24298 should have the instruction range for this block. If so, set the
24299 high/low attributes. */
24300 if (!early_dwarf && TREE_ASM_WRITTEN (stmt))
24301 {
24302 gcc_assert (stmt_die);
24303 add_high_low_attributes (stmt, stmt_die);
24304 }
24305
24306 decls_for_scope (stmt, stmt_die);
24307 }
24308
24309 /* Generate a DIE for an inlined subprogram. */
24310
24311 static void
24312 gen_inlined_subroutine_die (tree stmt, dw_die_ref context_die)
24313 {
24314 tree decl = block_ultimate_origin (stmt);
24315
24316 /* Make sure any inlined functions are known to be inlineable. */
24317 gcc_checking_assert (DECL_ABSTRACT_P (decl)
24318 || cgraph_function_possibly_inlined_p (decl));
24319
24320 dw_die_ref subr_die = new_die (DW_TAG_inlined_subroutine, context_die, stmt);
24321
24322 if (call_arg_locations || debug_inline_points)
24323 equate_block_to_die (stmt, subr_die);
24324 add_abstract_origin_attribute (subr_die, decl);
24325 if (TREE_ASM_WRITTEN (stmt))
24326 add_high_low_attributes (stmt, subr_die);
24327 add_call_src_coords_attributes (stmt, subr_die);
24328
24329 /* The inliner creates an extra BLOCK for the parameter setup,
24330 we want to merge that with the actual outermost BLOCK of the
24331 inlined function to avoid duplicate locals in consumers.
24332 Do that by doing the recursion to subblocks on the single subblock
24333 of STMT. */
24334 bool unwrap_one = false;
24335 if (BLOCK_SUBBLOCKS (stmt) && !BLOCK_CHAIN (BLOCK_SUBBLOCKS (stmt)))
24336 {
24337 tree origin = block_ultimate_origin (BLOCK_SUBBLOCKS (stmt));
24338 if (origin
24339 && TREE_CODE (origin) == BLOCK
24340 && BLOCK_SUPERCONTEXT (origin) == decl)
24341 unwrap_one = true;
24342 }
24343 decls_for_scope (stmt, subr_die, !unwrap_one);
24344 if (unwrap_one)
24345 decls_for_scope (BLOCK_SUBBLOCKS (stmt), subr_die);
24346 }
24347
24348 /* Generate a DIE for a field in a record, or structure. CTX is required: see
24349 the comment for VLR_CONTEXT. */
24350
24351 static void
24352 gen_field_die (tree decl, struct vlr_context *ctx, dw_die_ref context_die)
24353 {
24354 dw_die_ref decl_die;
24355
24356 if (TREE_TYPE (decl) == error_mark_node)
24357 return;
24358
24359 decl_die = new_die (DW_TAG_member, context_die, decl);
24360 add_name_and_src_coords_attributes (decl_die, decl);
24361 add_type_attribute (decl_die, member_declared_type (decl), decl_quals (decl),
24362 TYPE_REVERSE_STORAGE_ORDER (DECL_FIELD_CONTEXT (decl)),
24363 context_die);
24364
24365 if (DECL_BIT_FIELD_TYPE (decl))
24366 {
24367 add_byte_size_attribute (decl_die, decl);
24368 add_bit_size_attribute (decl_die, decl);
24369 add_bit_offset_attribute (decl_die, decl, ctx);
24370 }
24371
24372 add_alignment_attribute (decl_die, decl);
24373
24374 /* If we have a variant part offset, then we are supposed to process a member
24375 of a QUAL_UNION_TYPE, which is how we represent variant parts in
24376 trees. */
24377 gcc_assert (ctx->variant_part_offset == NULL_TREE
24378 || TREE_CODE (DECL_FIELD_CONTEXT (decl)) != QUAL_UNION_TYPE);
24379 if (TREE_CODE (DECL_FIELD_CONTEXT (decl)) != UNION_TYPE)
24380 add_data_member_location_attribute (decl_die, decl, ctx);
24381
24382 if (DECL_ARTIFICIAL (decl))
24383 add_AT_flag (decl_die, DW_AT_artificial, 1);
24384
24385 add_accessibility_attribute (decl_die, decl);
24386
24387 /* Equate decl number to die, so that we can look up this decl later on. */
24388 equate_decl_number_to_die (decl, decl_die);
24389 }
24390
24391 /* Generate a DIE for a pointer to a member type. TYPE can be an
24392 OFFSET_TYPE, for a pointer to data member, or a RECORD_TYPE, for a
24393 pointer to member function. */
24394
24395 static void
24396 gen_ptr_to_mbr_type_die (tree type, dw_die_ref context_die)
24397 {
24398 if (lookup_type_die (type))
24399 return;
24400
24401 dw_die_ref ptr_die = new_die (DW_TAG_ptr_to_member_type,
24402 scope_die_for (type, context_die), type);
24403
24404 equate_type_number_to_die (type, ptr_die);
24405 add_AT_die_ref (ptr_die, DW_AT_containing_type,
24406 lookup_type_die (TYPE_OFFSET_BASETYPE (type)));
24407 add_type_attribute (ptr_die, TREE_TYPE (type), TYPE_UNQUALIFIED, false,
24408 context_die);
24409 add_alignment_attribute (ptr_die, type);
24410
24411 if (TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE
24412 && TREE_CODE (TREE_TYPE (type)) != METHOD_TYPE)
24413 {
24414 dw_loc_descr_ref op = new_loc_descr (DW_OP_plus, 0, 0);
24415 add_AT_loc (ptr_die, DW_AT_use_location, op);
24416 }
24417 }
24418
24419 static char *producer_string;
24420
24421 /* Return a heap allocated producer string including command line options
24422 if -grecord-gcc-switches. */
24423
24424 static char *
24425 gen_producer_string (void)
24426 {
24427 size_t j;
24428 auto_vec<const char *> switches;
24429 const char *language_string = lang_hooks.name;
24430 char *producer, *tail;
24431 const char *p;
24432 size_t len = dwarf_record_gcc_switches ? 0 : 3;
24433 size_t plen = strlen (language_string) + 1 + strlen (version_string);
24434
24435 for (j = 1; dwarf_record_gcc_switches && j < save_decoded_options_count; j++)
24436 switch (save_decoded_options[j].opt_index)
24437 {
24438 case OPT_o:
24439 case OPT_d:
24440 case OPT_dumpbase:
24441 case OPT_dumpdir:
24442 case OPT_auxbase:
24443 case OPT_auxbase_strip:
24444 case OPT_quiet:
24445 case OPT_version:
24446 case OPT_v:
24447 case OPT_w:
24448 case OPT_L:
24449 case OPT_D:
24450 case OPT_I:
24451 case OPT_U:
24452 case OPT_SPECIAL_unknown:
24453 case OPT_SPECIAL_ignore:
24454 case OPT_SPECIAL_warn_removed:
24455 case OPT_SPECIAL_program_name:
24456 case OPT_SPECIAL_input_file:
24457 case OPT_grecord_gcc_switches:
24458 case OPT__output_pch_:
24459 case OPT_fdiagnostics_show_location_:
24460 case OPT_fdiagnostics_show_option:
24461 case OPT_fdiagnostics_show_caret:
24462 case OPT_fdiagnostics_show_labels:
24463 case OPT_fdiagnostics_show_line_numbers:
24464 case OPT_fdiagnostics_color_:
24465 case OPT_fdiagnostics_format_:
24466 case OPT_fverbose_asm:
24467 case OPT____:
24468 case OPT__sysroot_:
24469 case OPT_nostdinc:
24470 case OPT_nostdinc__:
24471 case OPT_fpreprocessed:
24472 case OPT_fltrans_output_list_:
24473 case OPT_fresolution_:
24474 case OPT_fdebug_prefix_map_:
24475 case OPT_fmacro_prefix_map_:
24476 case OPT_ffile_prefix_map_:
24477 case OPT_fcompare_debug:
24478 case OPT_fchecking:
24479 case OPT_fchecking_:
24480 /* Ignore these. */
24481 continue;
24482 case OPT_flto_:
24483 {
24484 const char *lto_canonical = "-flto";
24485 switches.safe_push (lto_canonical);
24486 len += strlen (lto_canonical) + 1;
24487 break;
24488 }
24489 default:
24490 if (cl_options[save_decoded_options[j].opt_index].flags
24491 & CL_NO_DWARF_RECORD)
24492 continue;
24493 gcc_checking_assert (save_decoded_options[j].canonical_option[0][0]
24494 == '-');
24495 switch (save_decoded_options[j].canonical_option[0][1])
24496 {
24497 case 'M':
24498 case 'i':
24499 case 'W':
24500 continue;
24501 case 'f':
24502 if (strncmp (save_decoded_options[j].canonical_option[0] + 2,
24503 "dump", 4) == 0)
24504 continue;
24505 break;
24506 default:
24507 break;
24508 }
24509 switches.safe_push (save_decoded_options[j].orig_option_with_args_text);
24510 len += strlen (save_decoded_options[j].orig_option_with_args_text) + 1;
24511 break;
24512 }
24513
24514 producer = XNEWVEC (char, plen + 1 + len + 1);
24515 tail = producer;
24516 sprintf (tail, "%s %s", language_string, version_string);
24517 tail += plen;
24518
24519 FOR_EACH_VEC_ELT (switches, j, p)
24520 {
24521 len = strlen (p);
24522 *tail = ' ';
24523 memcpy (tail + 1, p, len);
24524 tail += len + 1;
24525 }
24526
24527 *tail = '\0';
24528 return producer;
24529 }
24530
24531 /* Given a C and/or C++ language/version string return the "highest".
24532 C++ is assumed to be "higher" than C in this case. Used for merging
24533 LTO translation unit languages. */
24534 static const char *
24535 highest_c_language (const char *lang1, const char *lang2)
24536 {
24537 if (strcmp ("GNU C++17", lang1) == 0 || strcmp ("GNU C++17", lang2) == 0)
24538 return "GNU C++17";
24539 if (strcmp ("GNU C++14", lang1) == 0 || strcmp ("GNU C++14", lang2) == 0)
24540 return "GNU C++14";
24541 if (strcmp ("GNU C++11", lang1) == 0 || strcmp ("GNU C++11", lang2) == 0)
24542 return "GNU C++11";
24543 if (strcmp ("GNU C++98", lang1) == 0 || strcmp ("GNU C++98", lang2) == 0)
24544 return "GNU C++98";
24545
24546 if (strcmp ("GNU C2X", lang1) == 0 || strcmp ("GNU C2X", lang2) == 0)
24547 return "GNU C2X";
24548 if (strcmp ("GNU C17", lang1) == 0 || strcmp ("GNU C17", lang2) == 0)
24549 return "GNU C17";
24550 if (strcmp ("GNU C11", lang1) == 0 || strcmp ("GNU C11", lang2) == 0)
24551 return "GNU C11";
24552 if (strcmp ("GNU C99", lang1) == 0 || strcmp ("GNU C99", lang2) == 0)
24553 return "GNU C99";
24554 if (strcmp ("GNU C89", lang1) == 0 || strcmp ("GNU C89", lang2) == 0)
24555 return "GNU C89";
24556
24557 gcc_unreachable ();
24558 }
24559
24560
24561 /* Generate the DIE for the compilation unit. */
24562
24563 static dw_die_ref
24564 gen_compile_unit_die (const char *filename)
24565 {
24566 dw_die_ref die;
24567 const char *language_string = lang_hooks.name;
24568 int language;
24569
24570 die = new_die (DW_TAG_compile_unit, NULL, NULL);
24571
24572 if (filename)
24573 {
24574 add_name_attribute (die, filename);
24575 /* Don't add cwd for <built-in>. */
24576 if (filename[0] != '<')
24577 add_comp_dir_attribute (die);
24578 }
24579
24580 add_AT_string (die, DW_AT_producer, producer_string ? producer_string : "");
24581
24582 /* If our producer is LTO try to figure out a common language to use
24583 from the global list of translation units. */
24584 if (strcmp (language_string, "GNU GIMPLE") == 0)
24585 {
24586 unsigned i;
24587 tree t;
24588 const char *common_lang = NULL;
24589
24590 FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, t)
24591 {
24592 if (!TRANSLATION_UNIT_LANGUAGE (t))
24593 continue;
24594 if (!common_lang)
24595 common_lang = TRANSLATION_UNIT_LANGUAGE (t);
24596 else if (strcmp (common_lang, TRANSLATION_UNIT_LANGUAGE (t)) == 0)
24597 ;
24598 else if (strncmp (common_lang, "GNU C", 5) == 0
24599 && strncmp (TRANSLATION_UNIT_LANGUAGE (t), "GNU C", 5) == 0)
24600 /* Mixing C and C++ is ok, use C++ in that case. */
24601 common_lang = highest_c_language (common_lang,
24602 TRANSLATION_UNIT_LANGUAGE (t));
24603 else
24604 {
24605 /* Fall back to C. */
24606 common_lang = NULL;
24607 break;
24608 }
24609 }
24610
24611 if (common_lang)
24612 language_string = common_lang;
24613 }
24614
24615 language = DW_LANG_C;
24616 if (strncmp (language_string, "GNU C", 5) == 0
24617 && ISDIGIT (language_string[5]))
24618 {
24619 language = DW_LANG_C89;
24620 if (dwarf_version >= 3 || !dwarf_strict)
24621 {
24622 if (strcmp (language_string, "GNU C89") != 0)
24623 language = DW_LANG_C99;
24624
24625 if (dwarf_version >= 5 /* || !dwarf_strict */)
24626 if (strcmp (language_string, "GNU C11") == 0
24627 || strcmp (language_string, "GNU C17") == 0
24628 || strcmp (language_string, "GNU C2X"))
24629 language = DW_LANG_C11;
24630 }
24631 }
24632 else if (strncmp (language_string, "GNU C++", 7) == 0)
24633 {
24634 language = DW_LANG_C_plus_plus;
24635 if (dwarf_version >= 5 /* || !dwarf_strict */)
24636 {
24637 if (strcmp (language_string, "GNU C++11") == 0)
24638 language = DW_LANG_C_plus_plus_11;
24639 else if (strcmp (language_string, "GNU C++14") == 0)
24640 language = DW_LANG_C_plus_plus_14;
24641 else if (strcmp (language_string, "GNU C++17") == 0)
24642 /* For now. */
24643 language = DW_LANG_C_plus_plus_14;
24644 }
24645 }
24646 else if (strcmp (language_string, "GNU F77") == 0)
24647 language = DW_LANG_Fortran77;
24648 else if (dwarf_version >= 3 || !dwarf_strict)
24649 {
24650 if (strcmp (language_string, "GNU Ada") == 0)
24651 language = DW_LANG_Ada95;
24652 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24653 {
24654 language = DW_LANG_Fortran95;
24655 if (dwarf_version >= 5 /* || !dwarf_strict */)
24656 {
24657 if (strcmp (language_string, "GNU Fortran2003") == 0)
24658 language = DW_LANG_Fortran03;
24659 else if (strcmp (language_string, "GNU Fortran2008") == 0)
24660 language = DW_LANG_Fortran08;
24661 }
24662 }
24663 else if (strcmp (language_string, "GNU Objective-C") == 0)
24664 language = DW_LANG_ObjC;
24665 else if (strcmp (language_string, "GNU Objective-C++") == 0)
24666 language = DW_LANG_ObjC_plus_plus;
24667 else if (strcmp (language_string, "GNU D") == 0)
24668 language = DW_LANG_D;
24669 else if (dwarf_version >= 5 || !dwarf_strict)
24670 {
24671 if (strcmp (language_string, "GNU Go") == 0)
24672 language = DW_LANG_Go;
24673 }
24674 }
24675 /* Use a degraded Fortran setting in strict DWARF2 so is_fortran works. */
24676 else if (strncmp (language_string, "GNU Fortran", 11) == 0)
24677 language = DW_LANG_Fortran90;
24678 /* Likewise for Ada. */
24679 else if (strcmp (language_string, "GNU Ada") == 0)
24680 language = DW_LANG_Ada83;
24681
24682 add_AT_unsigned (die, DW_AT_language, language);
24683
24684 switch (language)
24685 {
24686 case DW_LANG_Fortran77:
24687 case DW_LANG_Fortran90:
24688 case DW_LANG_Fortran95:
24689 case DW_LANG_Fortran03:
24690 case DW_LANG_Fortran08:
24691 /* Fortran has case insensitive identifiers and the front-end
24692 lowercases everything. */
24693 add_AT_unsigned (die, DW_AT_identifier_case, DW_ID_down_case);
24694 break;
24695 default:
24696 /* The default DW_ID_case_sensitive doesn't need to be specified. */
24697 break;
24698 }
24699 return die;
24700 }
24701
24702 /* Generate the DIE for a base class. */
24703
24704 static void
24705 gen_inheritance_die (tree binfo, tree access, tree type,
24706 dw_die_ref context_die)
24707 {
24708 dw_die_ref die = new_die (DW_TAG_inheritance, context_die, binfo);
24709 struct vlr_context ctx = { type, NULL };
24710
24711 add_type_attribute (die, BINFO_TYPE (binfo), TYPE_UNQUALIFIED, false,
24712 context_die);
24713 add_data_member_location_attribute (die, binfo, &ctx);
24714
24715 if (BINFO_VIRTUAL_P (binfo))
24716 add_AT_unsigned (die, DW_AT_virtuality, DW_VIRTUALITY_virtual);
24717
24718 /* In DWARF3+ the default is DW_ACCESS_private only in DW_TAG_class_type
24719 children, otherwise the default is DW_ACCESS_public. In DWARF2
24720 the default has always been DW_ACCESS_private. */
24721 if (access == access_public_node)
24722 {
24723 if (dwarf_version == 2
24724 || context_die->die_tag == DW_TAG_class_type)
24725 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_public);
24726 }
24727 else if (access == access_protected_node)
24728 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_protected);
24729 else if (dwarf_version > 2
24730 && context_die->die_tag != DW_TAG_class_type)
24731 add_AT_unsigned (die, DW_AT_accessibility, DW_ACCESS_private);
24732 }
24733
24734 /* Return whether DECL is a FIELD_DECL that represents the variant part of a
24735 structure. */
24736
24737 static bool
24738 is_variant_part (tree decl)
24739 {
24740 return (TREE_CODE (decl) == FIELD_DECL
24741 && TREE_CODE (TREE_TYPE (decl)) == QUAL_UNION_TYPE);
24742 }
24743
24744 /* Check that OPERAND is a reference to a field in STRUCT_TYPE. If it is,
24745 return the FIELD_DECL. Return NULL_TREE otherwise. */
24746
24747 static tree
24748 analyze_discr_in_predicate (tree operand, tree struct_type)
24749 {
24750 while (CONVERT_EXPR_P (operand))
24751 operand = TREE_OPERAND (operand, 0);
24752
24753 /* Match field access to members of struct_type only. */
24754 if (TREE_CODE (operand) == COMPONENT_REF
24755 && TREE_CODE (TREE_OPERAND (operand, 0)) == PLACEHOLDER_EXPR
24756 && TREE_TYPE (TREE_OPERAND (operand, 0)) == struct_type
24757 && TREE_CODE (TREE_OPERAND (operand, 1)) == FIELD_DECL)
24758 return TREE_OPERAND (operand, 1);
24759 else
24760 return NULL_TREE;
24761 }
24762
24763 /* Check that SRC is a constant integer that can be represented as a native
24764 integer constant (either signed or unsigned). If so, store it into DEST and
24765 return true. Return false otherwise. */
24766
24767 static bool
24768 get_discr_value (tree src, dw_discr_value *dest)
24769 {
24770 tree discr_type = TREE_TYPE (src);
24771
24772 if (lang_hooks.types.get_debug_type)
24773 {
24774 tree debug_type = lang_hooks.types.get_debug_type (discr_type);
24775 if (debug_type != NULL)
24776 discr_type = debug_type;
24777 }
24778
24779 if (TREE_CODE (src) != INTEGER_CST || !INTEGRAL_TYPE_P (discr_type))
24780 return false;
24781
24782 /* Signedness can vary between the original type and the debug type. This
24783 can happen for character types in Ada for instance: the character type
24784 used for code generation can be signed, to be compatible with the C one,
24785 but from a debugger point of view, it must be unsigned. */
24786 bool is_orig_unsigned = TYPE_UNSIGNED (TREE_TYPE (src));
24787 bool is_debug_unsigned = TYPE_UNSIGNED (discr_type);
24788
24789 if (is_orig_unsigned != is_debug_unsigned)
24790 src = fold_convert (discr_type, src);
24791
24792 if (!(is_debug_unsigned ? tree_fits_uhwi_p (src) : tree_fits_shwi_p (src)))
24793 return false;
24794
24795 dest->pos = is_debug_unsigned;
24796 if (is_debug_unsigned)
24797 dest->v.uval = tree_to_uhwi (src);
24798 else
24799 dest->v.sval = tree_to_shwi (src);
24800
24801 return true;
24802 }
24803
24804 /* Try to extract synthetic properties out of VARIANT_PART_DECL, which is a
24805 FIELD_DECL in STRUCT_TYPE that represents a variant part. If unsuccessful,
24806 store NULL_TREE in DISCR_DECL. Otherwise:
24807
24808 - store the discriminant field in STRUCT_TYPE that controls the variant
24809 part to *DISCR_DECL
24810
24811 - put in *DISCR_LISTS_P an array where for each variant, the item
24812 represents the corresponding matching list of discriminant values.
24813
24814 - put in *DISCR_LISTS_LENGTH the number of variants, which is the size of
24815 the above array.
24816
24817 Note that when the array is allocated (i.e. when the analysis is
24818 successful), it is up to the caller to free the array. */
24819
24820 static void
24821 analyze_variants_discr (tree variant_part_decl,
24822 tree struct_type,
24823 tree *discr_decl,
24824 dw_discr_list_ref **discr_lists_p,
24825 unsigned *discr_lists_length)
24826 {
24827 tree variant_part_type = TREE_TYPE (variant_part_decl);
24828 tree variant;
24829 dw_discr_list_ref *discr_lists;
24830 unsigned i;
24831
24832 /* Compute how many variants there are in this variant part. */
24833 *discr_lists_length = 0;
24834 for (variant = TYPE_FIELDS (variant_part_type);
24835 variant != NULL_TREE;
24836 variant = DECL_CHAIN (variant))
24837 ++*discr_lists_length;
24838
24839 *discr_decl = NULL_TREE;
24840 *discr_lists_p
24841 = (dw_discr_list_ref *) xcalloc (*discr_lists_length,
24842 sizeof (**discr_lists_p));
24843 discr_lists = *discr_lists_p;
24844
24845 /* And then analyze all variants to extract discriminant information for all
24846 of them. This analysis is conservative: as soon as we detect something we
24847 do not support, abort everything and pretend we found nothing. */
24848 for (variant = TYPE_FIELDS (variant_part_type), i = 0;
24849 variant != NULL_TREE;
24850 variant = DECL_CHAIN (variant), ++i)
24851 {
24852 tree match_expr = DECL_QUALIFIER (variant);
24853
24854 /* Now, try to analyze the predicate and deduce a discriminant for
24855 it. */
24856 if (match_expr == boolean_true_node)
24857 /* Typically happens for the default variant: it matches all cases that
24858 previous variants rejected. Don't output any matching value for
24859 this one. */
24860 continue;
24861
24862 /* The following loop tries to iterate over each discriminant
24863 possibility: single values or ranges. */
24864 while (match_expr != NULL_TREE)
24865 {
24866 tree next_round_match_expr;
24867 tree candidate_discr = NULL_TREE;
24868 dw_discr_list_ref new_node = NULL;
24869
24870 /* Possibilities are matched one after the other by nested
24871 TRUTH_ORIF_EXPR expressions. Process the current possibility and
24872 continue with the rest at next iteration. */
24873 if (TREE_CODE (match_expr) == TRUTH_ORIF_EXPR)
24874 {
24875 next_round_match_expr = TREE_OPERAND (match_expr, 0);
24876 match_expr = TREE_OPERAND (match_expr, 1);
24877 }
24878 else
24879 next_round_match_expr = NULL_TREE;
24880
24881 if (match_expr == boolean_false_node)
24882 /* This sub-expression matches nothing: just wait for the next
24883 one. */
24884 ;
24885
24886 else if (TREE_CODE (match_expr) == EQ_EXPR)
24887 {
24888 /* We are matching: <discr_field> == <integer_cst>
24889 This sub-expression matches a single value. */
24890 tree integer_cst = TREE_OPERAND (match_expr, 1);
24891
24892 candidate_discr
24893 = analyze_discr_in_predicate (TREE_OPERAND (match_expr, 0),
24894 struct_type);
24895
24896 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24897 if (!get_discr_value (integer_cst,
24898 &new_node->dw_discr_lower_bound))
24899 goto abort;
24900 new_node->dw_discr_range = false;
24901 }
24902
24903 else if (TREE_CODE (match_expr) == TRUTH_ANDIF_EXPR)
24904 {
24905 /* We are matching:
24906 <discr_field> > <integer_cst>
24907 && <discr_field> < <integer_cst>.
24908 This sub-expression matches the range of values between the
24909 two matched integer constants. Note that comparisons can be
24910 inclusive or exclusive. */
24911 tree candidate_discr_1, candidate_discr_2;
24912 tree lower_cst, upper_cst;
24913 bool lower_cst_included, upper_cst_included;
24914 tree lower_op = TREE_OPERAND (match_expr, 0);
24915 tree upper_op = TREE_OPERAND (match_expr, 1);
24916
24917 /* When the comparison is exclusive, the integer constant is not
24918 the discriminant range bound we are looking for: we will have
24919 to increment or decrement it. */
24920 if (TREE_CODE (lower_op) == GE_EXPR)
24921 lower_cst_included = true;
24922 else if (TREE_CODE (lower_op) == GT_EXPR)
24923 lower_cst_included = false;
24924 else
24925 goto abort;
24926
24927 if (TREE_CODE (upper_op) == LE_EXPR)
24928 upper_cst_included = true;
24929 else if (TREE_CODE (upper_op) == LT_EXPR)
24930 upper_cst_included = false;
24931 else
24932 goto abort;
24933
24934 /* Extract the discriminant from the first operand and check it
24935 is consistant with the same analysis in the second
24936 operand. */
24937 candidate_discr_1
24938 = analyze_discr_in_predicate (TREE_OPERAND (lower_op, 0),
24939 struct_type);
24940 candidate_discr_2
24941 = analyze_discr_in_predicate (TREE_OPERAND (upper_op, 0),
24942 struct_type);
24943 if (candidate_discr_1 == candidate_discr_2)
24944 candidate_discr = candidate_discr_1;
24945 else
24946 goto abort;
24947
24948 /* Extract bounds from both. */
24949 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24950 lower_cst = TREE_OPERAND (lower_op, 1);
24951 upper_cst = TREE_OPERAND (upper_op, 1);
24952
24953 if (!lower_cst_included)
24954 lower_cst
24955 = fold_build2 (PLUS_EXPR, TREE_TYPE (lower_cst), lower_cst,
24956 build_int_cst (TREE_TYPE (lower_cst), 1));
24957 if (!upper_cst_included)
24958 upper_cst
24959 = fold_build2 (MINUS_EXPR, TREE_TYPE (upper_cst), upper_cst,
24960 build_int_cst (TREE_TYPE (upper_cst), 1));
24961
24962 if (!get_discr_value (lower_cst,
24963 &new_node->dw_discr_lower_bound)
24964 || !get_discr_value (upper_cst,
24965 &new_node->dw_discr_upper_bound))
24966 goto abort;
24967
24968 new_node->dw_discr_range = true;
24969 }
24970
24971 else if ((candidate_discr
24972 = analyze_discr_in_predicate (match_expr, struct_type))
24973 && TREE_TYPE (candidate_discr) == boolean_type_node)
24974 {
24975 /* We are matching: <discr_field> for a boolean discriminant.
24976 This sub-expression matches boolean_true_node. */
24977 new_node = ggc_cleared_alloc<dw_discr_list_node> ();
24978 if (!get_discr_value (boolean_true_node,
24979 &new_node->dw_discr_lower_bound))
24980 goto abort;
24981 new_node->dw_discr_range = false;
24982 }
24983
24984 else
24985 /* Unsupported sub-expression: we cannot determine the set of
24986 matching discriminant values. Abort everything. */
24987 goto abort;
24988
24989 /* If the discriminant info is not consistant with what we saw so
24990 far, consider the analysis failed and abort everything. */
24991 if (candidate_discr == NULL_TREE
24992 || (*discr_decl != NULL_TREE && candidate_discr != *discr_decl))
24993 goto abort;
24994 else
24995 *discr_decl = candidate_discr;
24996
24997 if (new_node != NULL)
24998 {
24999 new_node->dw_discr_next = discr_lists[i];
25000 discr_lists[i] = new_node;
25001 }
25002 match_expr = next_round_match_expr;
25003 }
25004 }
25005
25006 /* If we reach this point, we could match everything we were interested
25007 in. */
25008 return;
25009
25010 abort:
25011 /* Clean all data structure and return no result. */
25012 free (*discr_lists_p);
25013 *discr_lists_p = NULL;
25014 *discr_decl = NULL_TREE;
25015 }
25016
25017 /* Generate a DIE to represent VARIANT_PART_DECL, a variant part that is part
25018 of STRUCT_TYPE, a record type. This new DIE is emitted as the next child
25019 under CONTEXT_DIE.
25020
25021 Variant parts are supposed to be implemented as a FIELD_DECL whose type is a
25022 QUAL_UNION_TYPE: this is the VARIANT_PART_DECL parameter. The members for
25023 this type, which are record types, represent the available variants and each
25024 has a DECL_QUALIFIER attribute. The discriminant and the discriminant
25025 values are inferred from these attributes.
25026
25027 In trees, the offsets for the fields inside these sub-records are relative
25028 to the variant part itself, whereas the corresponding DIEs should have
25029 offset attributes that are relative to the embedding record base address.
25030 This is why the caller must provide a VARIANT_PART_OFFSET expression: it
25031 must be an expression that computes the offset of the variant part to
25032 describe in DWARF. */
25033
25034 static void
25035 gen_variant_part (tree variant_part_decl, struct vlr_context *vlr_ctx,
25036 dw_die_ref context_die)
25037 {
25038 const tree variant_part_type = TREE_TYPE (variant_part_decl);
25039 tree variant_part_offset = vlr_ctx->variant_part_offset;
25040 struct loc_descr_context ctx = {
25041 vlr_ctx->struct_type, /* context_type */
25042 NULL_TREE, /* base_decl */
25043 NULL, /* dpi */
25044 false, /* placeholder_arg */
25045 false /* placeholder_seen */
25046 };
25047
25048 /* The FIELD_DECL node in STRUCT_TYPE that acts as the discriminant, or
25049 NULL_TREE if there is no such field. */
25050 tree discr_decl = NULL_TREE;
25051 dw_discr_list_ref *discr_lists;
25052 unsigned discr_lists_length = 0;
25053 unsigned i;
25054
25055 dw_die_ref dwarf_proc_die = NULL;
25056 dw_die_ref variant_part_die
25057 = new_die (DW_TAG_variant_part, context_die, variant_part_type);
25058
25059 equate_decl_number_to_die (variant_part_decl, variant_part_die);
25060
25061 analyze_variants_discr (variant_part_decl, vlr_ctx->struct_type,
25062 &discr_decl, &discr_lists, &discr_lists_length);
25063
25064 if (discr_decl != NULL_TREE)
25065 {
25066 dw_die_ref discr_die = lookup_decl_die (discr_decl);
25067
25068 if (discr_die)
25069 add_AT_die_ref (variant_part_die, DW_AT_discr, discr_die);
25070 else
25071 /* We have no DIE for the discriminant, so just discard all
25072 discrimimant information in the output. */
25073 discr_decl = NULL_TREE;
25074 }
25075
25076 /* If the offset for this variant part is more complex than a constant,
25077 create a DWARF procedure for it so that we will not have to generate DWARF
25078 expressions for it for each member. */
25079 if (TREE_CODE (variant_part_offset) != INTEGER_CST
25080 && (dwarf_version >= 3 || !dwarf_strict))
25081 {
25082 const tree dwarf_proc_fndecl
25083 = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL, NULL_TREE,
25084 build_function_type (TREE_TYPE (variant_part_offset),
25085 NULL_TREE));
25086 const tree dwarf_proc_call = build_call_expr (dwarf_proc_fndecl, 0);
25087 const dw_loc_descr_ref dwarf_proc_body
25088 = loc_descriptor_from_tree (variant_part_offset, 0, &ctx);
25089
25090 dwarf_proc_die = new_dwarf_proc_die (dwarf_proc_body,
25091 dwarf_proc_fndecl, context_die);
25092 if (dwarf_proc_die != NULL)
25093 variant_part_offset = dwarf_proc_call;
25094 }
25095
25096 /* Output DIEs for all variants. */
25097 i = 0;
25098 for (tree variant = TYPE_FIELDS (variant_part_type);
25099 variant != NULL_TREE;
25100 variant = DECL_CHAIN (variant), ++i)
25101 {
25102 tree variant_type = TREE_TYPE (variant);
25103 dw_die_ref variant_die;
25104
25105 /* All variants (i.e. members of a variant part) are supposed to be
25106 encoded as structures. Sub-variant parts are QUAL_UNION_TYPE fields
25107 under these records. */
25108 gcc_assert (TREE_CODE (variant_type) == RECORD_TYPE);
25109
25110 variant_die = new_die (DW_TAG_variant, variant_part_die, variant_type);
25111 equate_decl_number_to_die (variant, variant_die);
25112
25113 /* Output discriminant values this variant matches, if any. */
25114 if (discr_decl == NULL || discr_lists[i] == NULL)
25115 /* In the case we have discriminant information at all, this is
25116 probably the default variant: as the standard says, don't
25117 output any discriminant value/list attribute. */
25118 ;
25119 else if (discr_lists[i]->dw_discr_next == NULL
25120 && !discr_lists[i]->dw_discr_range)
25121 /* If there is only one accepted value, don't bother outputting a
25122 list. */
25123 add_discr_value (variant_die, &discr_lists[i]->dw_discr_lower_bound);
25124 else
25125 add_discr_list (variant_die, discr_lists[i]);
25126
25127 for (tree member = TYPE_FIELDS (variant_type);
25128 member != NULL_TREE;
25129 member = DECL_CHAIN (member))
25130 {
25131 struct vlr_context vlr_sub_ctx = {
25132 vlr_ctx->struct_type, /* struct_type */
25133 NULL /* variant_part_offset */
25134 };
25135 if (is_variant_part (member))
25136 {
25137 /* All offsets for fields inside variant parts are relative to
25138 the top-level embedding RECORD_TYPE's base address. On the
25139 other hand, offsets in GCC's types are relative to the
25140 nested-most variant part. So we have to sum offsets each time
25141 we recurse. */
25142
25143 vlr_sub_ctx.variant_part_offset
25144 = fold_build2 (PLUS_EXPR, TREE_TYPE (variant_part_offset),
25145 variant_part_offset, byte_position (member));
25146 gen_variant_part (member, &vlr_sub_ctx, variant_die);
25147 }
25148 else
25149 {
25150 vlr_sub_ctx.variant_part_offset = variant_part_offset;
25151 gen_decl_die (member, NULL, &vlr_sub_ctx, variant_die);
25152 }
25153 }
25154 }
25155
25156 free (discr_lists);
25157 }
25158
25159 /* Generate a DIE for a class member. */
25160
25161 static void
25162 gen_member_die (tree type, dw_die_ref context_die)
25163 {
25164 tree member;
25165 tree binfo = TYPE_BINFO (type);
25166
25167 gcc_assert (TYPE_MAIN_VARIANT (type) == type);
25168
25169 /* If this is not an incomplete type, output descriptions of each of its
25170 members. Note that as we output the DIEs necessary to represent the
25171 members of this record or union type, we will also be trying to output
25172 DIEs to represent the *types* of those members. However the `type'
25173 function (above) will specifically avoid generating type DIEs for member
25174 types *within* the list of member DIEs for this (containing) type except
25175 for those types (of members) which are explicitly marked as also being
25176 members of this (containing) type themselves. The g++ front- end can
25177 force any given type to be treated as a member of some other (containing)
25178 type by setting the TYPE_CONTEXT of the given (member) type to point to
25179 the TREE node representing the appropriate (containing) type. */
25180
25181 /* First output info about the base classes. */
25182 if (binfo && early_dwarf)
25183 {
25184 vec<tree, va_gc> *accesses = BINFO_BASE_ACCESSES (binfo);
25185 int i;
25186 tree base;
25187
25188 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base); i++)
25189 gen_inheritance_die (base,
25190 (accesses ? (*accesses)[i] : access_public_node),
25191 type,
25192 context_die);
25193 }
25194
25195 /* Now output info about the members. */
25196 for (member = TYPE_FIELDS (type); member; member = DECL_CHAIN (member))
25197 {
25198 /* Ignore clones. */
25199 if (DECL_ABSTRACT_ORIGIN (member))
25200 continue;
25201
25202 struct vlr_context vlr_ctx = { type, NULL_TREE };
25203 bool static_inline_p
25204 = (VAR_P (member)
25205 && TREE_STATIC (member)
25206 && (lang_hooks.decls.decl_dwarf_attribute (member, DW_AT_inline)
25207 != -1));
25208
25209 /* If we thought we were generating minimal debug info for TYPE
25210 and then changed our minds, some of the member declarations
25211 may have already been defined. Don't define them again, but
25212 do put them in the right order. */
25213
25214 if (dw_die_ref child = lookup_decl_die (member))
25215 {
25216 /* Handle inline static data members, which only have in-class
25217 declarations. */
25218 bool splice = true;
25219
25220 dw_die_ref ref = NULL;
25221 if (child->die_tag == DW_TAG_variable
25222 && child->die_parent == comp_unit_die ())
25223 {
25224 ref = get_AT_ref (child, DW_AT_specification);
25225
25226 /* For C++17 inline static data members followed by redundant
25227 out of class redeclaration, we might get here with
25228 child being the DIE created for the out of class
25229 redeclaration and with its DW_AT_specification being
25230 the DIE created for in-class definition. We want to
25231 reparent the latter, and don't want to create another
25232 DIE with DW_AT_specification in that case, because
25233 we already have one. */
25234 if (ref
25235 && static_inline_p
25236 && ref->die_tag == DW_TAG_variable
25237 && ref->die_parent == comp_unit_die ()
25238 && get_AT (ref, DW_AT_specification) == NULL)
25239 {
25240 child = ref;
25241 ref = NULL;
25242 static_inline_p = false;
25243 }
25244
25245 if (!ref)
25246 {
25247 reparent_child (child, context_die);
25248 if (dwarf_version < 5)
25249 child->die_tag = DW_TAG_member;
25250 splice = false;
25251 }
25252 }
25253
25254 if (splice)
25255 splice_child_die (context_die, child);
25256 }
25257
25258 /* Do not generate standard DWARF for variant parts if we are generating
25259 the corresponding GNAT encodings: DIEs generated for both would
25260 conflict in our mappings. */
25261 else if (is_variant_part (member)
25262 && gnat_encodings == DWARF_GNAT_ENCODINGS_MINIMAL)
25263 {
25264 vlr_ctx.variant_part_offset = byte_position (member);
25265 gen_variant_part (member, &vlr_ctx, context_die);
25266 }
25267 else
25268 {
25269 vlr_ctx.variant_part_offset = NULL_TREE;
25270 gen_decl_die (member, NULL, &vlr_ctx, context_die);
25271 }
25272
25273 /* For C++ inline static data members emit immediately a DW_TAG_variable
25274 DIE that will refer to that DW_TAG_member/DW_TAG_variable through
25275 DW_AT_specification. */
25276 if (static_inline_p)
25277 {
25278 int old_extern = DECL_EXTERNAL (member);
25279 DECL_EXTERNAL (member) = 0;
25280 gen_decl_die (member, NULL, NULL, comp_unit_die ());
25281 DECL_EXTERNAL (member) = old_extern;
25282 }
25283 }
25284 }
25285
25286 /* Generate a DIE for a structure or union type. If TYPE_DECL_SUPPRESS_DEBUG
25287 is set, we pretend that the type was never defined, so we only get the
25288 member DIEs needed by later specification DIEs. */
25289
25290 static void
25291 gen_struct_or_union_type_die (tree type, dw_die_ref context_die,
25292 enum debug_info_usage usage)
25293 {
25294 if (TREE_ASM_WRITTEN (type))
25295 {
25296 /* Fill in the bound of variable-length fields in late dwarf if
25297 still incomplete. */
25298 if (!early_dwarf && variably_modified_type_p (type, NULL))
25299 for (tree member = TYPE_FIELDS (type);
25300 member;
25301 member = DECL_CHAIN (member))
25302 fill_variable_array_bounds (TREE_TYPE (member));
25303 return;
25304 }
25305
25306 dw_die_ref type_die = lookup_type_die (type);
25307 dw_die_ref scope_die = 0;
25308 int nested = 0;
25309 int complete = (TYPE_SIZE (type)
25310 && (! TYPE_STUB_DECL (type)
25311 || ! TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (type))));
25312 int ns_decl = (context_die && context_die->die_tag == DW_TAG_namespace);
25313 complete = complete && should_emit_struct_debug (type, usage);
25314
25315 if (type_die && ! complete)
25316 return;
25317
25318 if (TYPE_CONTEXT (type) != NULL_TREE
25319 && (AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25320 || TREE_CODE (TYPE_CONTEXT (type)) == NAMESPACE_DECL))
25321 nested = 1;
25322
25323 scope_die = scope_die_for (type, context_die);
25324
25325 /* Generate child dies for template paramaters. */
25326 if (!type_die && debug_info_level > DINFO_LEVEL_TERSE)
25327 schedule_generic_params_dies_gen (type);
25328
25329 if (! type_die || (nested && is_cu_die (scope_die)))
25330 /* First occurrence of type or toplevel definition of nested class. */
25331 {
25332 dw_die_ref old_die = type_die;
25333
25334 type_die = new_die (TREE_CODE (type) == RECORD_TYPE
25335 ? record_type_tag (type) : DW_TAG_union_type,
25336 scope_die, type);
25337 equate_type_number_to_die (type, type_die);
25338 if (old_die)
25339 add_AT_specification (type_die, old_die);
25340 else
25341 add_name_attribute (type_die, type_tag (type));
25342 }
25343 else
25344 remove_AT (type_die, DW_AT_declaration);
25345
25346 /* If this type has been completed, then give it a byte_size attribute and
25347 then give a list of members. */
25348 if (complete && !ns_decl)
25349 {
25350 /* Prevent infinite recursion in cases where the type of some member of
25351 this type is expressed in terms of this type itself. */
25352 TREE_ASM_WRITTEN (type) = 1;
25353 add_byte_size_attribute (type_die, type);
25354 add_alignment_attribute (type_die, type);
25355 if (TYPE_STUB_DECL (type) != NULL_TREE)
25356 {
25357 add_src_coords_attributes (type_die, TYPE_STUB_DECL (type));
25358 add_accessibility_attribute (type_die, TYPE_STUB_DECL (type));
25359 }
25360
25361 /* If the first reference to this type was as the return type of an
25362 inline function, then it may not have a parent. Fix this now. */
25363 if (type_die->die_parent == NULL)
25364 add_child_die (scope_die, type_die);
25365
25366 gen_member_die (type, type_die);
25367
25368 add_gnat_descriptive_type_attribute (type_die, type, context_die);
25369 if (TYPE_ARTIFICIAL (type))
25370 add_AT_flag (type_die, DW_AT_artificial, 1);
25371
25372 /* GNU extension: Record what type our vtable lives in. */
25373 if (TYPE_VFIELD (type))
25374 {
25375 tree vtype = DECL_FCONTEXT (TYPE_VFIELD (type));
25376
25377 gen_type_die (vtype, context_die);
25378 add_AT_die_ref (type_die, DW_AT_containing_type,
25379 lookup_type_die (vtype));
25380 }
25381 }
25382 else
25383 {
25384 add_AT_flag (type_die, DW_AT_declaration, 1);
25385
25386 /* We don't need to do this for function-local types. */
25387 if (TYPE_STUB_DECL (type)
25388 && ! decl_function_context (TYPE_STUB_DECL (type)))
25389 vec_safe_push (incomplete_types, type);
25390 }
25391
25392 if (get_AT (type_die, DW_AT_name))
25393 add_pubtype (type, type_die);
25394 }
25395
25396 /* Generate a DIE for a subroutine _type_. */
25397
25398 static void
25399 gen_subroutine_type_die (tree type, dw_die_ref context_die)
25400 {
25401 tree return_type = TREE_TYPE (type);
25402 dw_die_ref subr_die
25403 = new_die (DW_TAG_subroutine_type,
25404 scope_die_for (type, context_die), type);
25405
25406 equate_type_number_to_die (type, subr_die);
25407 add_prototyped_attribute (subr_die, type);
25408 add_type_attribute (subr_die, return_type, TYPE_UNQUALIFIED, false,
25409 context_die);
25410 add_alignment_attribute (subr_die, type);
25411 gen_formal_types_die (type, subr_die);
25412
25413 if (get_AT (subr_die, DW_AT_name))
25414 add_pubtype (type, subr_die);
25415 if ((dwarf_version >= 5 || !dwarf_strict)
25416 && lang_hooks.types.type_dwarf_attribute (type, DW_AT_reference) != -1)
25417 add_AT_flag (subr_die, DW_AT_reference, 1);
25418 if ((dwarf_version >= 5 || !dwarf_strict)
25419 && lang_hooks.types.type_dwarf_attribute (type,
25420 DW_AT_rvalue_reference) != -1)
25421 add_AT_flag (subr_die, DW_AT_rvalue_reference, 1);
25422 }
25423
25424 /* Generate a DIE for a type definition. */
25425
25426 static void
25427 gen_typedef_die (tree decl, dw_die_ref context_die)
25428 {
25429 dw_die_ref type_die;
25430 tree type;
25431
25432 if (TREE_ASM_WRITTEN (decl))
25433 {
25434 if (DECL_ORIGINAL_TYPE (decl))
25435 fill_variable_array_bounds (DECL_ORIGINAL_TYPE (decl));
25436 return;
25437 }
25438
25439 /* As we avoid creating DIEs for local typedefs (see decl_ultimate_origin
25440 checks in process_scope_var and modified_type_die), this should be called
25441 only for original types. */
25442 gcc_assert (decl_ultimate_origin (decl) == NULL
25443 || decl_ultimate_origin (decl) == decl);
25444
25445 TREE_ASM_WRITTEN (decl) = 1;
25446 type_die = new_die (DW_TAG_typedef, context_die, decl);
25447
25448 add_name_and_src_coords_attributes (type_die, decl);
25449 if (DECL_ORIGINAL_TYPE (decl))
25450 {
25451 type = DECL_ORIGINAL_TYPE (decl);
25452 if (type == error_mark_node)
25453 return;
25454
25455 gcc_assert (type != TREE_TYPE (decl));
25456 equate_type_number_to_die (TREE_TYPE (decl), type_die);
25457 }
25458 else
25459 {
25460 type = TREE_TYPE (decl);
25461 if (type == error_mark_node)
25462 return;
25463
25464 if (is_naming_typedef_decl (TYPE_NAME (type)))
25465 {
25466 /* Here, we are in the case of decl being a typedef naming
25467 an anonymous type, e.g:
25468 typedef struct {...} foo;
25469 In that case TREE_TYPE (decl) is not a typedef variant
25470 type and TYPE_NAME of the anonymous type is set to the
25471 TYPE_DECL of the typedef. This construct is emitted by
25472 the C++ FE.
25473
25474 TYPE is the anonymous struct named by the typedef
25475 DECL. As we need the DW_AT_type attribute of the
25476 DW_TAG_typedef to point to the DIE of TYPE, let's
25477 generate that DIE right away. add_type_attribute
25478 called below will then pick (via lookup_type_die) that
25479 anonymous struct DIE. */
25480 if (!TREE_ASM_WRITTEN (type))
25481 gen_tagged_type_die (type, context_die, DINFO_USAGE_DIR_USE);
25482
25483 /* This is a GNU Extension. We are adding a
25484 DW_AT_linkage_name attribute to the DIE of the
25485 anonymous struct TYPE. The value of that attribute
25486 is the name of the typedef decl naming the anonymous
25487 struct. This greatly eases the work of consumers of
25488 this debug info. */
25489 add_linkage_name_raw (lookup_type_die (type), decl);
25490 }
25491 }
25492
25493 add_type_attribute (type_die, type, decl_quals (decl), false,
25494 context_die);
25495
25496 if (is_naming_typedef_decl (decl))
25497 /* We want that all subsequent calls to lookup_type_die with
25498 TYPE in argument yield the DW_TAG_typedef we have just
25499 created. */
25500 equate_type_number_to_die (type, type_die);
25501
25502 add_alignment_attribute (type_die, TREE_TYPE (decl));
25503
25504 add_accessibility_attribute (type_die, decl);
25505
25506 if (DECL_ABSTRACT_P (decl))
25507 equate_decl_number_to_die (decl, type_die);
25508
25509 if (get_AT (type_die, DW_AT_name))
25510 add_pubtype (decl, type_die);
25511 }
25512
25513 /* Generate a DIE for a struct, class, enum or union type. */
25514
25515 static void
25516 gen_tagged_type_die (tree type,
25517 dw_die_ref context_die,
25518 enum debug_info_usage usage)
25519 {
25520 if (type == NULL_TREE
25521 || !is_tagged_type (type))
25522 return;
25523
25524 if (TREE_ASM_WRITTEN (type))
25525 ;
25526 /* If this is a nested type whose containing class hasn't been written
25527 out yet, writing it out will cover this one, too. This does not apply
25528 to instantiations of member class templates; they need to be added to
25529 the containing class as they are generated. FIXME: This hurts the
25530 idea of combining type decls from multiple TUs, since we can't predict
25531 what set of template instantiations we'll get. */
25532 else if (TYPE_CONTEXT (type)
25533 && AGGREGATE_TYPE_P (TYPE_CONTEXT (type))
25534 && ! TREE_ASM_WRITTEN (TYPE_CONTEXT (type)))
25535 {
25536 gen_type_die_with_usage (TYPE_CONTEXT (type), context_die, usage);
25537
25538 if (TREE_ASM_WRITTEN (type))
25539 return;
25540
25541 /* If that failed, attach ourselves to the stub. */
25542 context_die = lookup_type_die (TYPE_CONTEXT (type));
25543 }
25544 else if (TYPE_CONTEXT (type) != NULL_TREE
25545 && (TREE_CODE (TYPE_CONTEXT (type)) == FUNCTION_DECL))
25546 {
25547 /* If this type is local to a function that hasn't been written
25548 out yet, use a NULL context for now; it will be fixed up in
25549 decls_for_scope. */
25550 context_die = lookup_decl_die (TYPE_CONTEXT (type));
25551 /* A declaration DIE doesn't count; nested types need to go in the
25552 specification. */
25553 if (context_die && is_declaration_die (context_die))
25554 context_die = NULL;
25555 }
25556 else
25557 context_die = declare_in_namespace (type, context_die);
25558
25559 if (TREE_CODE (type) == ENUMERAL_TYPE)
25560 {
25561 /* This might have been written out by the call to
25562 declare_in_namespace. */
25563 if (!TREE_ASM_WRITTEN (type))
25564 gen_enumeration_type_die (type, context_die);
25565 }
25566 else
25567 gen_struct_or_union_type_die (type, context_die, usage);
25568
25569 /* Don't set TREE_ASM_WRITTEN on an incomplete struct; we want to fix
25570 it up if it is ever completed. gen_*_type_die will set it for us
25571 when appropriate. */
25572 }
25573
25574 /* Generate a type description DIE. */
25575
25576 static void
25577 gen_type_die_with_usage (tree type, dw_die_ref context_die,
25578 enum debug_info_usage usage)
25579 {
25580 struct array_descr_info info;
25581
25582 if (type == NULL_TREE || type == error_mark_node)
25583 return;
25584
25585 if (flag_checking && type)
25586 verify_type (type);
25587
25588 if (TYPE_NAME (type) != NULL_TREE
25589 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
25590 && is_redundant_typedef (TYPE_NAME (type))
25591 && DECL_ORIGINAL_TYPE (TYPE_NAME (type)))
25592 /* The DECL of this type is a typedef we don't want to emit debug
25593 info for but we want debug info for its underlying typedef.
25594 This can happen for e.g, the injected-class-name of a C++
25595 type. */
25596 type = DECL_ORIGINAL_TYPE (TYPE_NAME (type));
25597
25598 /* If TYPE is a typedef type variant, let's generate debug info
25599 for the parent typedef which TYPE is a type of. */
25600 if (typedef_variant_p (type))
25601 {
25602 if (TREE_ASM_WRITTEN (type))
25603 return;
25604
25605 tree name = TYPE_NAME (type);
25606 tree origin = decl_ultimate_origin (name);
25607 if (origin != NULL && origin != name)
25608 {
25609 gen_decl_die (origin, NULL, NULL, context_die);
25610 return;
25611 }
25612
25613 /* Prevent broken recursion; we can't hand off to the same type. */
25614 gcc_assert (DECL_ORIGINAL_TYPE (name) != type);
25615
25616 /* Give typedefs the right scope. */
25617 context_die = scope_die_for (type, context_die);
25618
25619 TREE_ASM_WRITTEN (type) = 1;
25620
25621 gen_decl_die (name, NULL, NULL, context_die);
25622 return;
25623 }
25624
25625 /* If type is an anonymous tagged type named by a typedef, let's
25626 generate debug info for the typedef. */
25627 if (is_naming_typedef_decl (TYPE_NAME (type)))
25628 {
25629 /* Give typedefs the right scope. */
25630 context_die = scope_die_for (type, context_die);
25631
25632 gen_decl_die (TYPE_NAME (type), NULL, NULL, context_die);
25633 return;
25634 }
25635
25636 if (lang_hooks.types.get_debug_type)
25637 {
25638 tree debug_type = lang_hooks.types.get_debug_type (type);
25639
25640 if (debug_type != NULL_TREE && debug_type != type)
25641 {
25642 gen_type_die_with_usage (debug_type, context_die, usage);
25643 return;
25644 }
25645 }
25646
25647 /* We are going to output a DIE to represent the unqualified version
25648 of this type (i.e. without any const or volatile qualifiers) so
25649 get the main variant (i.e. the unqualified version) of this type
25650 now. (Vectors and arrays are special because the debugging info is in the
25651 cloned type itself. Similarly function/method types can contain extra
25652 ref-qualification). */
25653 if (TREE_CODE (type) == FUNCTION_TYPE
25654 || TREE_CODE (type) == METHOD_TYPE)
25655 {
25656 /* For function/method types, can't use type_main_variant here,
25657 because that can have different ref-qualifiers for C++,
25658 but try to canonicalize. */
25659 tree main = TYPE_MAIN_VARIANT (type);
25660 for (tree t = main; t; t = TYPE_NEXT_VARIANT (t))
25661 if (TYPE_QUALS_NO_ADDR_SPACE (t) == 0
25662 && check_base_type (t, main)
25663 && check_lang_type (t, type))
25664 {
25665 type = t;
25666 break;
25667 }
25668 }
25669 else if (TREE_CODE (type) != VECTOR_TYPE
25670 && TREE_CODE (type) != ARRAY_TYPE)
25671 type = type_main_variant (type);
25672
25673 /* If this is an array type with hidden descriptor, handle it first. */
25674 if (!TREE_ASM_WRITTEN (type)
25675 && lang_hooks.types.get_array_descr_info)
25676 {
25677 memset (&info, 0, sizeof (info));
25678 if (lang_hooks.types.get_array_descr_info (type, &info))
25679 {
25680 /* Fortran sometimes emits array types with no dimension. */
25681 gcc_assert (info.ndimensions >= 0
25682 && (info.ndimensions
25683 <= DWARF2OUT_ARRAY_DESCR_INFO_MAX_DIMEN));
25684 gen_descr_array_type_die (type, &info, context_die);
25685 TREE_ASM_WRITTEN (type) = 1;
25686 return;
25687 }
25688 }
25689
25690 if (TREE_ASM_WRITTEN (type))
25691 {
25692 /* Variable-length types may be incomplete even if
25693 TREE_ASM_WRITTEN. For such types, fall through to
25694 gen_array_type_die() and possibly fill in
25695 DW_AT_{upper,lower}_bound attributes. */
25696 if ((TREE_CODE (type) != ARRAY_TYPE
25697 && TREE_CODE (type) != RECORD_TYPE
25698 && TREE_CODE (type) != UNION_TYPE
25699 && TREE_CODE (type) != QUAL_UNION_TYPE)
25700 || !variably_modified_type_p (type, NULL))
25701 return;
25702 }
25703
25704 switch (TREE_CODE (type))
25705 {
25706 case ERROR_MARK:
25707 break;
25708
25709 case POINTER_TYPE:
25710 case REFERENCE_TYPE:
25711 /* We must set TREE_ASM_WRITTEN in case this is a recursive type. This
25712 ensures that the gen_type_die recursion will terminate even if the
25713 type is recursive. Recursive types are possible in Ada. */
25714 /* ??? We could perhaps do this for all types before the switch
25715 statement. */
25716 TREE_ASM_WRITTEN (type) = 1;
25717
25718 /* For these types, all that is required is that we output a DIE (or a
25719 set of DIEs) to represent the "basis" type. */
25720 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25721 DINFO_USAGE_IND_USE);
25722 break;
25723
25724 case OFFSET_TYPE:
25725 /* This code is used for C++ pointer-to-data-member types.
25726 Output a description of the relevant class type. */
25727 gen_type_die_with_usage (TYPE_OFFSET_BASETYPE (type), context_die,
25728 DINFO_USAGE_IND_USE);
25729
25730 /* Output a description of the type of the object pointed to. */
25731 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25732 DINFO_USAGE_IND_USE);
25733
25734 /* Now output a DIE to represent this pointer-to-data-member type
25735 itself. */
25736 gen_ptr_to_mbr_type_die (type, context_die);
25737 break;
25738
25739 case FUNCTION_TYPE:
25740 /* Force out return type (in case it wasn't forced out already). */
25741 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25742 DINFO_USAGE_DIR_USE);
25743 gen_subroutine_type_die (type, context_die);
25744 break;
25745
25746 case METHOD_TYPE:
25747 /* Force out return type (in case it wasn't forced out already). */
25748 gen_type_die_with_usage (TREE_TYPE (type), context_die,
25749 DINFO_USAGE_DIR_USE);
25750 gen_subroutine_type_die (type, context_die);
25751 break;
25752
25753 case ARRAY_TYPE:
25754 case VECTOR_TYPE:
25755 gen_array_type_die (type, context_die);
25756 break;
25757
25758 case ENUMERAL_TYPE:
25759 case RECORD_TYPE:
25760 case UNION_TYPE:
25761 case QUAL_UNION_TYPE:
25762 gen_tagged_type_die (type, context_die, usage);
25763 return;
25764
25765 case VOID_TYPE:
25766 case INTEGER_TYPE:
25767 case REAL_TYPE:
25768 case FIXED_POINT_TYPE:
25769 case COMPLEX_TYPE:
25770 case BOOLEAN_TYPE:
25771 /* No DIEs needed for fundamental types. */
25772 break;
25773
25774 case NULLPTR_TYPE:
25775 case LANG_TYPE:
25776 /* Just use DW_TAG_unspecified_type. */
25777 {
25778 dw_die_ref type_die = lookup_type_die (type);
25779 if (type_die == NULL)
25780 {
25781 tree name = TYPE_IDENTIFIER (type);
25782 type_die = new_die (DW_TAG_unspecified_type, comp_unit_die (),
25783 type);
25784 add_name_attribute (type_die, IDENTIFIER_POINTER (name));
25785 equate_type_number_to_die (type, type_die);
25786 }
25787 }
25788 break;
25789
25790 default:
25791 if (is_cxx_auto (type))
25792 {
25793 tree name = TYPE_IDENTIFIER (type);
25794 dw_die_ref *die = (name == get_identifier ("auto")
25795 ? &auto_die : &decltype_auto_die);
25796 if (!*die)
25797 {
25798 *die = new_die (DW_TAG_unspecified_type,
25799 comp_unit_die (), NULL_TREE);
25800 add_name_attribute (*die, IDENTIFIER_POINTER (name));
25801 }
25802 equate_type_number_to_die (type, *die);
25803 break;
25804 }
25805 gcc_unreachable ();
25806 }
25807
25808 TREE_ASM_WRITTEN (type) = 1;
25809 }
25810
25811 static void
25812 gen_type_die (tree type, dw_die_ref context_die)
25813 {
25814 if (type != error_mark_node)
25815 {
25816 gen_type_die_with_usage (type, context_die, DINFO_USAGE_DIR_USE);
25817 if (flag_checking)
25818 {
25819 dw_die_ref die = lookup_type_die (type);
25820 if (die)
25821 check_die (die);
25822 }
25823 }
25824 }
25825
25826 /* Generate a DW_TAG_lexical_block DIE followed by DIEs to represent all of the
25827 things which are local to the given block. */
25828
25829 static void
25830 gen_block_die (tree stmt, dw_die_ref context_die)
25831 {
25832 int must_output_die = 0;
25833 bool inlined_func;
25834
25835 /* Ignore blocks that are NULL. */
25836 if (stmt == NULL_TREE)
25837 return;
25838
25839 inlined_func = inlined_function_outer_scope_p (stmt);
25840
25841 /* If the block is one fragment of a non-contiguous block, do not
25842 process the variables, since they will have been done by the
25843 origin block. Do process subblocks. */
25844 if (BLOCK_FRAGMENT_ORIGIN (stmt))
25845 {
25846 tree sub;
25847
25848 for (sub = BLOCK_SUBBLOCKS (stmt); sub; sub = BLOCK_CHAIN (sub))
25849 gen_block_die (sub, context_die);
25850
25851 return;
25852 }
25853
25854 /* Determine if we need to output any Dwarf DIEs at all to represent this
25855 block. */
25856 if (inlined_func)
25857 /* The outer scopes for inlinings *must* always be represented. We
25858 generate DW_TAG_inlined_subroutine DIEs for them. (See below.) */
25859 must_output_die = 1;
25860 else if (lookup_block_die (stmt))
25861 /* If we already have a DIE then it was filled early. Meanwhile
25862 we might have pruned all BLOCK_VARS as optimized out but we
25863 still want to generate high/low PC attributes so output it. */
25864 must_output_die = 1;
25865 else if (TREE_USED (stmt)
25866 || TREE_ASM_WRITTEN (stmt))
25867 {
25868 /* Determine if this block directly contains any "significant"
25869 local declarations which we will need to output DIEs for. */
25870 if (debug_info_level > DINFO_LEVEL_TERSE)
25871 {
25872 /* We are not in terse mode so any local declaration that
25873 is not ignored for debug purposes counts as being a
25874 "significant" one. */
25875 if (BLOCK_NUM_NONLOCALIZED_VARS (stmt))
25876 must_output_die = 1;
25877 else
25878 for (tree var = BLOCK_VARS (stmt); var; var = DECL_CHAIN (var))
25879 if (!DECL_IGNORED_P (var))
25880 {
25881 must_output_die = 1;
25882 break;
25883 }
25884 }
25885 else if (!dwarf2out_ignore_block (stmt))
25886 must_output_die = 1;
25887 }
25888
25889 /* It would be a waste of space to generate a Dwarf DW_TAG_lexical_block
25890 DIE for any block which contains no significant local declarations at
25891 all. Rather, in such cases we just call `decls_for_scope' so that any
25892 needed Dwarf info for any sub-blocks will get properly generated. Note
25893 that in terse mode, our definition of what constitutes a "significant"
25894 local declaration gets restricted to include only inlined function
25895 instances and local (nested) function definitions. */
25896 if (must_output_die)
25897 {
25898 if (inlined_func)
25899 gen_inlined_subroutine_die (stmt, context_die);
25900 else
25901 gen_lexical_block_die (stmt, context_die);
25902 }
25903 else
25904 decls_for_scope (stmt, context_die);
25905 }
25906
25907 /* Process variable DECL (or variable with origin ORIGIN) within
25908 block STMT and add it to CONTEXT_DIE. */
25909 static void
25910 process_scope_var (tree stmt, tree decl, tree origin, dw_die_ref context_die)
25911 {
25912 dw_die_ref die;
25913 tree decl_or_origin = decl ? decl : origin;
25914
25915 if (TREE_CODE (decl_or_origin) == FUNCTION_DECL)
25916 die = lookup_decl_die (decl_or_origin);
25917 else if (TREE_CODE (decl_or_origin) == TYPE_DECL)
25918 {
25919 if (TYPE_DECL_IS_STUB (decl_or_origin))
25920 die = lookup_type_die (TREE_TYPE (decl_or_origin));
25921 else
25922 die = lookup_decl_die (decl_or_origin);
25923 /* Avoid re-creating the DIE late if it was optimized as unused early. */
25924 if (! die && ! early_dwarf)
25925 return;
25926 }
25927 else
25928 die = NULL;
25929
25930 /* Avoid creating DIEs for local typedefs and concrete static variables that
25931 will only be pruned later. */
25932 if ((origin || decl_ultimate_origin (decl))
25933 && (TREE_CODE (decl_or_origin) == TYPE_DECL
25934 || (VAR_P (decl_or_origin) && TREE_STATIC (decl_or_origin))))
25935 {
25936 origin = decl_ultimate_origin (decl_or_origin);
25937 if (decl && VAR_P (decl) && die != NULL)
25938 {
25939 die = lookup_decl_die (origin);
25940 if (die != NULL)
25941 equate_decl_number_to_die (decl, die);
25942 }
25943 return;
25944 }
25945
25946 if (die != NULL && die->die_parent == NULL)
25947 add_child_die (context_die, die);
25948 else if (TREE_CODE (decl_or_origin) == IMPORTED_DECL)
25949 {
25950 if (early_dwarf)
25951 dwarf2out_imported_module_or_decl_1 (decl_or_origin, DECL_NAME (decl_or_origin),
25952 stmt, context_die);
25953 }
25954 else
25955 {
25956 if (decl && DECL_P (decl))
25957 {
25958 die = lookup_decl_die (decl);
25959
25960 /* Early created DIEs do not have a parent as the decls refer
25961 to the function as DECL_CONTEXT rather than the BLOCK. */
25962 if (die && die->die_parent == NULL)
25963 {
25964 gcc_assert (in_lto_p);
25965 add_child_die (context_die, die);
25966 }
25967 }
25968
25969 gen_decl_die (decl, origin, NULL, context_die);
25970 }
25971 }
25972
25973 /* Generate all of the decls declared within a given scope and (recursively)
25974 all of its sub-blocks. */
25975
25976 static void
25977 decls_for_scope (tree stmt, dw_die_ref context_die, bool recurse)
25978 {
25979 tree decl;
25980 unsigned int i;
25981 tree subblocks;
25982
25983 /* Ignore NULL blocks. */
25984 if (stmt == NULL_TREE)
25985 return;
25986
25987 /* Output the DIEs to represent all of the data objects and typedefs
25988 declared directly within this block but not within any nested
25989 sub-blocks. Also, nested function and tag DIEs have been
25990 generated with a parent of NULL; fix that up now. We don't
25991 have to do this if we're at -g1. */
25992 if (debug_info_level > DINFO_LEVEL_TERSE)
25993 {
25994 for (decl = BLOCK_VARS (stmt); decl != NULL; decl = DECL_CHAIN (decl))
25995 process_scope_var (stmt, decl, NULL_TREE, context_die);
25996 /* BLOCK_NONLOCALIZED_VARs simply generate DIE stubs with abstract
25997 origin - avoid doing this twice as we have no good way to see
25998 if we've done it once already. */
25999 if (! early_dwarf)
26000 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (stmt); i++)
26001 {
26002 decl = BLOCK_NONLOCALIZED_VAR (stmt, i);
26003 if (decl == current_function_decl)
26004 /* Ignore declarations of the current function, while they
26005 are declarations, gen_subprogram_die would treat them
26006 as definitions again, because they are equal to
26007 current_function_decl and endlessly recurse. */;
26008 else if (TREE_CODE (decl) == FUNCTION_DECL)
26009 process_scope_var (stmt, decl, NULL_TREE, context_die);
26010 else
26011 process_scope_var (stmt, NULL_TREE, decl, context_die);
26012 }
26013 }
26014
26015 /* Even if we're at -g1, we need to process the subblocks in order to get
26016 inlined call information. */
26017
26018 /* Output the DIEs to represent all sub-blocks (and the items declared
26019 therein) of this block. */
26020 if (recurse)
26021 for (subblocks = BLOCK_SUBBLOCKS (stmt);
26022 subblocks != NULL;
26023 subblocks = BLOCK_CHAIN (subblocks))
26024 gen_block_die (subblocks, context_die);
26025 }
26026
26027 /* Is this a typedef we can avoid emitting? */
26028
26029 static bool
26030 is_redundant_typedef (const_tree decl)
26031 {
26032 if (TYPE_DECL_IS_STUB (decl))
26033 return true;
26034
26035 if (DECL_ARTIFICIAL (decl)
26036 && DECL_CONTEXT (decl)
26037 && is_tagged_type (DECL_CONTEXT (decl))
26038 && TREE_CODE (TYPE_NAME (DECL_CONTEXT (decl))) == TYPE_DECL
26039 && DECL_NAME (decl) == DECL_NAME (TYPE_NAME (DECL_CONTEXT (decl))))
26040 /* Also ignore the artificial member typedef for the class name. */
26041 return true;
26042
26043 return false;
26044 }
26045
26046 /* Return TRUE if TYPE is a typedef that names a type for linkage
26047 purposes. This kind of typedefs is produced by the C++ FE for
26048 constructs like:
26049
26050 typedef struct {...} foo;
26051
26052 In that case, there is no typedef variant type produced for foo.
26053 Rather, the TREE_TYPE of the TYPE_DECL of foo is the anonymous
26054 struct type. */
26055
26056 static bool
26057 is_naming_typedef_decl (const_tree decl)
26058 {
26059 if (decl == NULL_TREE
26060 || TREE_CODE (decl) != TYPE_DECL
26061 || DECL_NAMELESS (decl)
26062 || !is_tagged_type (TREE_TYPE (decl))
26063 || DECL_IS_BUILTIN (decl)
26064 || is_redundant_typedef (decl)
26065 /* It looks like Ada produces TYPE_DECLs that are very similar
26066 to C++ naming typedefs but that have different
26067 semantics. Let's be specific to c++ for now. */
26068 || !is_cxx (decl))
26069 return FALSE;
26070
26071 return (DECL_ORIGINAL_TYPE (decl) == NULL_TREE
26072 && TYPE_NAME (TREE_TYPE (decl)) == decl
26073 && (TYPE_STUB_DECL (TREE_TYPE (decl))
26074 != TYPE_NAME (TREE_TYPE (decl))));
26075 }
26076
26077 /* Looks up the DIE for a context. */
26078
26079 static inline dw_die_ref
26080 lookup_context_die (tree context)
26081 {
26082 if (context)
26083 {
26084 /* Find die that represents this context. */
26085 if (TYPE_P (context))
26086 {
26087 context = TYPE_MAIN_VARIANT (context);
26088 dw_die_ref ctx = lookup_type_die (context);
26089 if (!ctx)
26090 return NULL;
26091 return strip_naming_typedef (context, ctx);
26092 }
26093 else
26094 return lookup_decl_die (context);
26095 }
26096 return comp_unit_die ();
26097 }
26098
26099 /* Returns the DIE for a context. */
26100
26101 static inline dw_die_ref
26102 get_context_die (tree context)
26103 {
26104 if (context)
26105 {
26106 /* Find die that represents this context. */
26107 if (TYPE_P (context))
26108 {
26109 context = TYPE_MAIN_VARIANT (context);
26110 return strip_naming_typedef (context, force_type_die (context));
26111 }
26112 else
26113 return force_decl_die (context);
26114 }
26115 return comp_unit_die ();
26116 }
26117
26118 /* Returns the DIE for decl. A DIE will always be returned. */
26119
26120 static dw_die_ref
26121 force_decl_die (tree decl)
26122 {
26123 dw_die_ref decl_die;
26124 unsigned saved_external_flag;
26125 tree save_fn = NULL_TREE;
26126 decl_die = lookup_decl_die (decl);
26127 if (!decl_die)
26128 {
26129 dw_die_ref context_die = get_context_die (DECL_CONTEXT (decl));
26130
26131 decl_die = lookup_decl_die (decl);
26132 if (decl_die)
26133 return decl_die;
26134
26135 switch (TREE_CODE (decl))
26136 {
26137 case FUNCTION_DECL:
26138 /* Clear current_function_decl, so that gen_subprogram_die thinks
26139 that this is a declaration. At this point, we just want to force
26140 declaration die. */
26141 save_fn = current_function_decl;
26142 current_function_decl = NULL_TREE;
26143 gen_subprogram_die (decl, context_die);
26144 current_function_decl = save_fn;
26145 break;
26146
26147 case VAR_DECL:
26148 /* Set external flag to force declaration die. Restore it after
26149 gen_decl_die() call. */
26150 saved_external_flag = DECL_EXTERNAL (decl);
26151 DECL_EXTERNAL (decl) = 1;
26152 gen_decl_die (decl, NULL, NULL, context_die);
26153 DECL_EXTERNAL (decl) = saved_external_flag;
26154 break;
26155
26156 case NAMESPACE_DECL:
26157 if (dwarf_version >= 3 || !dwarf_strict)
26158 dwarf2out_decl (decl);
26159 else
26160 /* DWARF2 has neither DW_TAG_module, nor DW_TAG_namespace. */
26161 decl_die = comp_unit_die ();
26162 break;
26163
26164 case TRANSLATION_UNIT_DECL:
26165 decl_die = comp_unit_die ();
26166 break;
26167
26168 default:
26169 gcc_unreachable ();
26170 }
26171
26172 /* We should be able to find the DIE now. */
26173 if (!decl_die)
26174 decl_die = lookup_decl_die (decl);
26175 gcc_assert (decl_die);
26176 }
26177
26178 return decl_die;
26179 }
26180
26181 /* Returns the DIE for TYPE, that must not be a base type. A DIE is
26182 always returned. */
26183
26184 static dw_die_ref
26185 force_type_die (tree type)
26186 {
26187 dw_die_ref type_die;
26188
26189 type_die = lookup_type_die (type);
26190 if (!type_die)
26191 {
26192 dw_die_ref context_die = get_context_die (TYPE_CONTEXT (type));
26193
26194 type_die = modified_type_die (type, TYPE_QUALS_NO_ADDR_SPACE (type),
26195 false, context_die);
26196 gcc_assert (type_die);
26197 }
26198 return type_die;
26199 }
26200
26201 /* Force out any required namespaces to be able to output DECL,
26202 and return the new context_die for it, if it's changed. */
26203
26204 static dw_die_ref
26205 setup_namespace_context (tree thing, dw_die_ref context_die)
26206 {
26207 tree context = (DECL_P (thing)
26208 ? DECL_CONTEXT (thing) : TYPE_CONTEXT (thing));
26209 if (context && TREE_CODE (context) == NAMESPACE_DECL)
26210 /* Force out the namespace. */
26211 context_die = force_decl_die (context);
26212
26213 return context_die;
26214 }
26215
26216 /* Emit a declaration DIE for THING (which is either a DECL or a tagged
26217 type) within its namespace, if appropriate.
26218
26219 For compatibility with older debuggers, namespace DIEs only contain
26220 declarations; all definitions are emitted at CU scope, with
26221 DW_AT_specification pointing to the declaration (like with class
26222 members). */
26223
26224 static dw_die_ref
26225 declare_in_namespace (tree thing, dw_die_ref context_die)
26226 {
26227 dw_die_ref ns_context;
26228
26229 if (debug_info_level <= DINFO_LEVEL_TERSE)
26230 return context_die;
26231
26232 /* External declarations in the local scope only need to be emitted
26233 once, not once in the namespace and once in the scope.
26234
26235 This avoids declaring the `extern' below in the
26236 namespace DIE as well as in the innermost scope:
26237
26238 namespace S
26239 {
26240 int i=5;
26241 int foo()
26242 {
26243 int i=8;
26244 extern int i;
26245 return i;
26246 }
26247 }
26248 */
26249 if (DECL_P (thing) && DECL_EXTERNAL (thing) && local_scope_p (context_die))
26250 return context_die;
26251
26252 /* If this decl is from an inlined function, then don't try to emit it in its
26253 namespace, as we will get confused. It would have already been emitted
26254 when the abstract instance of the inline function was emitted anyways. */
26255 if (DECL_P (thing) && DECL_ABSTRACT_ORIGIN (thing))
26256 return context_die;
26257
26258 ns_context = setup_namespace_context (thing, context_die);
26259
26260 if (ns_context != context_die)
26261 {
26262 if (is_fortran () || is_dlang ())
26263 return ns_context;
26264 if (DECL_P (thing))
26265 gen_decl_die (thing, NULL, NULL, ns_context);
26266 else
26267 gen_type_die (thing, ns_context);
26268 }
26269 return context_die;
26270 }
26271
26272 /* Generate a DIE for a namespace or namespace alias. */
26273
26274 static void
26275 gen_namespace_die (tree decl, dw_die_ref context_die)
26276 {
26277 dw_die_ref namespace_die;
26278
26279 /* Namespace aliases have a DECL_ABSTRACT_ORIGIN of the namespace
26280 they are an alias of. */
26281 if (DECL_ABSTRACT_ORIGIN (decl) == NULL)
26282 {
26283 /* Output a real namespace or module. */
26284 context_die = setup_namespace_context (decl, comp_unit_die ());
26285 namespace_die = new_die (is_fortran () || is_dlang ()
26286 ? DW_TAG_module : DW_TAG_namespace,
26287 context_die, decl);
26288 /* For Fortran modules defined in different CU don't add src coords. */
26289 if (namespace_die->die_tag == DW_TAG_module && DECL_EXTERNAL (decl))
26290 {
26291 const char *name = dwarf2_name (decl, 0);
26292 if (name)
26293 add_name_attribute (namespace_die, name);
26294 }
26295 else
26296 add_name_and_src_coords_attributes (namespace_die, decl);
26297 if (DECL_EXTERNAL (decl))
26298 add_AT_flag (namespace_die, DW_AT_declaration, 1);
26299 equate_decl_number_to_die (decl, namespace_die);
26300 }
26301 else
26302 {
26303 /* Output a namespace alias. */
26304
26305 /* Force out the namespace we are an alias of, if necessary. */
26306 dw_die_ref origin_die
26307 = force_decl_die (DECL_ABSTRACT_ORIGIN (decl));
26308
26309 if (DECL_FILE_SCOPE_P (decl)
26310 || TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL)
26311 context_die = setup_namespace_context (decl, comp_unit_die ());
26312 /* Now create the namespace alias DIE. */
26313 namespace_die = new_die (DW_TAG_imported_declaration, context_die, decl);
26314 add_name_and_src_coords_attributes (namespace_die, decl);
26315 add_AT_die_ref (namespace_die, DW_AT_import, origin_die);
26316 equate_decl_number_to_die (decl, namespace_die);
26317 }
26318 if ((dwarf_version >= 5 || !dwarf_strict)
26319 && lang_hooks.decls.decl_dwarf_attribute (decl,
26320 DW_AT_export_symbols) == 1)
26321 add_AT_flag (namespace_die, DW_AT_export_symbols, 1);
26322
26323 /* Bypass dwarf2_name's check for DECL_NAMELESS. */
26324 if (want_pubnames ())
26325 add_pubname_string (lang_hooks.dwarf_name (decl, 1), namespace_die);
26326 }
26327
26328 /* Generate Dwarf debug information for a decl described by DECL.
26329 The return value is currently only meaningful for PARM_DECLs,
26330 for all other decls it returns NULL.
26331
26332 If DECL is a FIELD_DECL, CTX is required: see the comment for VLR_CONTEXT.
26333 It can be NULL otherwise. */
26334
26335 static dw_die_ref
26336 gen_decl_die (tree decl, tree origin, struct vlr_context *ctx,
26337 dw_die_ref context_die)
26338 {
26339 tree decl_or_origin = decl ? decl : origin;
26340 tree class_origin = NULL, ultimate_origin;
26341
26342 if (DECL_P (decl_or_origin) && DECL_IGNORED_P (decl_or_origin))
26343 return NULL;
26344
26345 switch (TREE_CODE (decl_or_origin))
26346 {
26347 case ERROR_MARK:
26348 break;
26349
26350 case CONST_DECL:
26351 if (!is_fortran () && !is_ada () && !is_dlang ())
26352 {
26353 /* The individual enumerators of an enum type get output when we output
26354 the Dwarf representation of the relevant enum type itself. */
26355 break;
26356 }
26357
26358 /* Emit its type. */
26359 gen_type_die (TREE_TYPE (decl), context_die);
26360
26361 /* And its containing namespace. */
26362 context_die = declare_in_namespace (decl, context_die);
26363
26364 gen_const_die (decl, context_die);
26365 break;
26366
26367 case FUNCTION_DECL:
26368 #if 0
26369 /* FIXME */
26370 /* This doesn't work because the C frontend sets DECL_ABSTRACT_ORIGIN
26371 on local redeclarations of global functions. That seems broken. */
26372 if (current_function_decl != decl)
26373 /* This is only a declaration. */;
26374 #endif
26375
26376 /* We should have abstract copies already and should not generate
26377 stray type DIEs in late LTO dumping. */
26378 if (! early_dwarf)
26379 ;
26380
26381 /* If we're emitting a clone, emit info for the abstract instance. */
26382 else if (origin || DECL_ORIGIN (decl) != decl)
26383 dwarf2out_abstract_function (origin
26384 ? DECL_ORIGIN (origin)
26385 : DECL_ABSTRACT_ORIGIN (decl));
26386
26387 /* If we're emitting a possibly inlined function emit it as
26388 abstract instance. */
26389 else if (cgraph_function_possibly_inlined_p (decl)
26390 && ! DECL_ABSTRACT_P (decl)
26391 && ! class_or_namespace_scope_p (context_die)
26392 /* dwarf2out_abstract_function won't emit a die if this is just
26393 a declaration. We must avoid setting DECL_ABSTRACT_ORIGIN in
26394 that case, because that works only if we have a die. */
26395 && DECL_INITIAL (decl) != NULL_TREE)
26396 dwarf2out_abstract_function (decl);
26397
26398 /* Otherwise we're emitting the primary DIE for this decl. */
26399 else if (debug_info_level > DINFO_LEVEL_TERSE)
26400 {
26401 /* Before we describe the FUNCTION_DECL itself, make sure that we
26402 have its containing type. */
26403 if (!origin)
26404 origin = decl_class_context (decl);
26405 if (origin != NULL_TREE)
26406 gen_type_die (origin, context_die);
26407
26408 /* And its return type. */
26409 gen_type_die (TREE_TYPE (TREE_TYPE (decl)), context_die);
26410
26411 /* And its virtual context. */
26412 if (DECL_VINDEX (decl) != NULL_TREE)
26413 gen_type_die (DECL_CONTEXT (decl), context_die);
26414
26415 /* Make sure we have a member DIE for decl. */
26416 if (origin != NULL_TREE)
26417 gen_type_die_for_member (origin, decl, context_die);
26418
26419 /* And its containing namespace. */
26420 context_die = declare_in_namespace (decl, context_die);
26421 }
26422
26423 /* Now output a DIE to represent the function itself. */
26424 if (decl)
26425 gen_subprogram_die (decl, context_die);
26426 break;
26427
26428 case TYPE_DECL:
26429 /* If we are in terse mode, don't generate any DIEs to represent any
26430 actual typedefs. */
26431 if (debug_info_level <= DINFO_LEVEL_TERSE)
26432 break;
26433
26434 /* In the special case of a TYPE_DECL node representing the declaration
26435 of some type tag, if the given TYPE_DECL is marked as having been
26436 instantiated from some other (original) TYPE_DECL node (e.g. one which
26437 was generated within the original definition of an inline function) we
26438 used to generate a special (abbreviated) DW_TAG_structure_type,
26439 DW_TAG_union_type, or DW_TAG_enumeration_type DIE here. But nothing
26440 should be actually referencing those DIEs, as variable DIEs with that
26441 type would be emitted already in the abstract origin, so it was always
26442 removed during unused type prunning. Don't add anything in this
26443 case. */
26444 if (TYPE_DECL_IS_STUB (decl) && decl_ultimate_origin (decl) != NULL_TREE)
26445 break;
26446
26447 if (is_redundant_typedef (decl))
26448 gen_type_die (TREE_TYPE (decl), context_die);
26449 else
26450 /* Output a DIE to represent the typedef itself. */
26451 gen_typedef_die (decl, context_die);
26452 break;
26453
26454 case LABEL_DECL:
26455 if (debug_info_level >= DINFO_LEVEL_NORMAL)
26456 gen_label_die (decl, context_die);
26457 break;
26458
26459 case VAR_DECL:
26460 case RESULT_DECL:
26461 /* If we are in terse mode, don't generate any DIEs to represent any
26462 variable declarations or definitions. */
26463 if (debug_info_level <= DINFO_LEVEL_TERSE)
26464 break;
26465
26466 /* Avoid generating stray type DIEs during late dwarf dumping.
26467 All types have been dumped early. */
26468 if (early_dwarf
26469 /* ??? But in LTRANS we cannot annotate early created variably
26470 modified type DIEs without copying them and adjusting all
26471 references to them. Dump them again as happens for inlining
26472 which copies both the decl and the types. */
26473 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26474 in VLA bound information for example. */
26475 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26476 current_function_decl)))
26477 {
26478 /* Output any DIEs that are needed to specify the type of this data
26479 object. */
26480 if (decl_by_reference_p (decl_or_origin))
26481 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26482 else
26483 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26484 }
26485
26486 if (early_dwarf)
26487 {
26488 /* And its containing type. */
26489 class_origin = decl_class_context (decl_or_origin);
26490 if (class_origin != NULL_TREE)
26491 gen_type_die_for_member (class_origin, decl_or_origin, context_die);
26492
26493 /* And its containing namespace. */
26494 context_die = declare_in_namespace (decl_or_origin, context_die);
26495 }
26496
26497 /* Now output the DIE to represent the data object itself. This gets
26498 complicated because of the possibility that the VAR_DECL really
26499 represents an inlined instance of a formal parameter for an inline
26500 function. */
26501 ultimate_origin = decl_ultimate_origin (decl_or_origin);
26502 if (ultimate_origin != NULL_TREE
26503 && TREE_CODE (ultimate_origin) == PARM_DECL)
26504 gen_formal_parameter_die (decl, origin,
26505 true /* Emit name attribute. */,
26506 context_die);
26507 else
26508 gen_variable_die (decl, origin, context_die);
26509 break;
26510
26511 case FIELD_DECL:
26512 gcc_assert (ctx != NULL && ctx->struct_type != NULL);
26513 /* Ignore the nameless fields that are used to skip bits but handle C++
26514 anonymous unions and structs. */
26515 if (DECL_NAME (decl) != NULL_TREE
26516 || TREE_CODE (TREE_TYPE (decl)) == UNION_TYPE
26517 || TREE_CODE (TREE_TYPE (decl)) == RECORD_TYPE)
26518 {
26519 gen_type_die (member_declared_type (decl), context_die);
26520 gen_field_die (decl, ctx, context_die);
26521 }
26522 break;
26523
26524 case PARM_DECL:
26525 /* Avoid generating stray type DIEs during late dwarf dumping.
26526 All types have been dumped early. */
26527 if (early_dwarf
26528 /* ??? But in LTRANS we cannot annotate early created variably
26529 modified type DIEs without copying them and adjusting all
26530 references to them. Dump them again as happens for inlining
26531 which copies both the decl and the types. */
26532 /* ??? And even non-LTO needs to re-visit type DIEs to fill
26533 in VLA bound information for example. */
26534 || (decl && variably_modified_type_p (TREE_TYPE (decl),
26535 current_function_decl)))
26536 {
26537 if (DECL_BY_REFERENCE (decl_or_origin))
26538 gen_type_die (TREE_TYPE (TREE_TYPE (decl_or_origin)), context_die);
26539 else
26540 gen_type_die (TREE_TYPE (decl_or_origin), context_die);
26541 }
26542 return gen_formal_parameter_die (decl, origin,
26543 true /* Emit name attribute. */,
26544 context_die);
26545
26546 case NAMESPACE_DECL:
26547 if (dwarf_version >= 3 || !dwarf_strict)
26548 gen_namespace_die (decl, context_die);
26549 break;
26550
26551 case IMPORTED_DECL:
26552 dwarf2out_imported_module_or_decl_1 (decl, DECL_NAME (decl),
26553 DECL_CONTEXT (decl), context_die);
26554 break;
26555
26556 case NAMELIST_DECL:
26557 gen_namelist_decl (DECL_NAME (decl), context_die,
26558 NAMELIST_DECL_ASSOCIATED_DECL (decl));
26559 break;
26560
26561 default:
26562 /* Probably some frontend-internal decl. Assume we don't care. */
26563 gcc_assert ((int)TREE_CODE (decl) > NUM_TREE_CODES);
26564 break;
26565 }
26566
26567 return NULL;
26568 }
26569 \f
26570 /* Output initial debug information for global DECL. Called at the
26571 end of the parsing process.
26572
26573 This is the initial debug generation process. As such, the DIEs
26574 generated may be incomplete. A later debug generation pass
26575 (dwarf2out_late_global_decl) will augment the information generated
26576 in this pass (e.g., with complete location info). */
26577
26578 static void
26579 dwarf2out_early_global_decl (tree decl)
26580 {
26581 set_early_dwarf s;
26582
26583 /* gen_decl_die() will set DECL_ABSTRACT because
26584 cgraph_function_possibly_inlined_p() returns true. This is in
26585 turn will cause DW_AT_inline attributes to be set.
26586
26587 This happens because at early dwarf generation, there is no
26588 cgraph information, causing cgraph_function_possibly_inlined_p()
26589 to return true. Trick cgraph_function_possibly_inlined_p()
26590 while we generate dwarf early. */
26591 bool save = symtab->global_info_ready;
26592 symtab->global_info_ready = true;
26593
26594 /* We don't handle TYPE_DECLs. If required, they'll be reached via
26595 other DECLs and they can point to template types or other things
26596 that dwarf2out can't handle when done via dwarf2out_decl. */
26597 if (TREE_CODE (decl) != TYPE_DECL
26598 && TREE_CODE (decl) != PARM_DECL)
26599 {
26600 if (TREE_CODE (decl) == FUNCTION_DECL)
26601 {
26602 tree save_fndecl = current_function_decl;
26603
26604 /* For nested functions, make sure we have DIEs for the parents first
26605 so that all nested DIEs are generated at the proper scope in the
26606 first shot. */
26607 tree context = decl_function_context (decl);
26608 if (context != NULL)
26609 {
26610 dw_die_ref context_die = lookup_decl_die (context);
26611 current_function_decl = context;
26612
26613 /* Avoid emitting DIEs multiple times, but still process CONTEXT
26614 enough so that it lands in its own context. This avoids type
26615 pruning issues later on. */
26616 if (context_die == NULL || is_declaration_die (context_die))
26617 dwarf2out_early_global_decl (context);
26618 }
26619
26620 /* Emit an abstract origin of a function first. This happens
26621 with C++ constructor clones for example and makes
26622 dwarf2out_abstract_function happy which requires the early
26623 DIE of the abstract instance to be present. */
26624 tree origin = DECL_ABSTRACT_ORIGIN (decl);
26625 dw_die_ref origin_die;
26626 if (origin != NULL
26627 /* Do not emit the DIE multiple times but make sure to
26628 process it fully here in case we just saw a declaration. */
26629 && ((origin_die = lookup_decl_die (origin)) == NULL
26630 || is_declaration_die (origin_die)))
26631 {
26632 current_function_decl = origin;
26633 dwarf2out_decl (origin);
26634 }
26635
26636 /* Emit the DIE for decl but avoid doing that multiple times. */
26637 dw_die_ref old_die;
26638 if ((old_die = lookup_decl_die (decl)) == NULL
26639 || is_declaration_die (old_die))
26640 {
26641 current_function_decl = decl;
26642 dwarf2out_decl (decl);
26643 }
26644
26645 current_function_decl = save_fndecl;
26646 }
26647 else
26648 dwarf2out_decl (decl);
26649 }
26650 symtab->global_info_ready = save;
26651 }
26652
26653 /* Return whether EXPR is an expression with the following pattern:
26654 INDIRECT_REF (NOP_EXPR (INTEGER_CST)). */
26655
26656 static bool
26657 is_trivial_indirect_ref (tree expr)
26658 {
26659 if (expr == NULL_TREE || TREE_CODE (expr) != INDIRECT_REF)
26660 return false;
26661
26662 tree nop = TREE_OPERAND (expr, 0);
26663 if (nop == NULL_TREE || TREE_CODE (nop) != NOP_EXPR)
26664 return false;
26665
26666 tree int_cst = TREE_OPERAND (nop, 0);
26667 return int_cst != NULL_TREE && TREE_CODE (int_cst) == INTEGER_CST;
26668 }
26669
26670 /* Output debug information for global decl DECL. Called from
26671 toplev.c after compilation proper has finished. */
26672
26673 static void
26674 dwarf2out_late_global_decl (tree decl)
26675 {
26676 /* Fill-in any location information we were unable to determine
26677 on the first pass. */
26678 if (VAR_P (decl))
26679 {
26680 dw_die_ref die = lookup_decl_die (decl);
26681
26682 /* We may have to generate full debug late for LTO in case debug
26683 was not enabled at compile-time or the target doesn't support
26684 the LTO early debug scheme. */
26685 if (! die && in_lto_p)
26686 dwarf2out_decl (decl);
26687 else if (die)
26688 {
26689 /* We get called via the symtab code invoking late_global_decl
26690 for symbols that are optimized out.
26691
26692 Do not add locations for those, except if they have a
26693 DECL_VALUE_EXPR, in which case they are relevant for debuggers.
26694 Still don't add a location if the DECL_VALUE_EXPR is not a trivial
26695 INDIRECT_REF expression, as this could generate relocations to
26696 text symbols in LTO object files, which is invalid. */
26697 varpool_node *node = varpool_node::get (decl);
26698 if ((! node || ! node->definition)
26699 && ! (DECL_HAS_VALUE_EXPR_P (decl)
26700 && is_trivial_indirect_ref (DECL_VALUE_EXPR (decl))))
26701 tree_add_const_value_attribute_for_decl (die, decl);
26702 else
26703 add_location_or_const_value_attribute (die, decl, false);
26704 }
26705 }
26706 }
26707
26708 /* Output debug information for type decl DECL. Called from toplev.c
26709 and from language front ends (to record built-in types). */
26710 static void
26711 dwarf2out_type_decl (tree decl, int local)
26712 {
26713 if (!local)
26714 {
26715 set_early_dwarf s;
26716 dwarf2out_decl (decl);
26717 }
26718 }
26719
26720 /* Output debug information for imported module or decl DECL.
26721 NAME is non-NULL name in the lexical block if the decl has been renamed.
26722 LEXICAL_BLOCK is the lexical block (which TREE_CODE is a BLOCK)
26723 that DECL belongs to.
26724 LEXICAL_BLOCK_DIE is the DIE of LEXICAL_BLOCK. */
26725 static void
26726 dwarf2out_imported_module_or_decl_1 (tree decl,
26727 tree name,
26728 tree lexical_block,
26729 dw_die_ref lexical_block_die)
26730 {
26731 expanded_location xloc;
26732 dw_die_ref imported_die = NULL;
26733 dw_die_ref at_import_die;
26734
26735 if (TREE_CODE (decl) == IMPORTED_DECL)
26736 {
26737 xloc = expand_location (DECL_SOURCE_LOCATION (decl));
26738 decl = IMPORTED_DECL_ASSOCIATED_DECL (decl);
26739 gcc_assert (decl);
26740 }
26741 else
26742 xloc = expand_location (input_location);
26743
26744 if (TREE_CODE (decl) == TYPE_DECL || TREE_CODE (decl) == CONST_DECL)
26745 {
26746 at_import_die = force_type_die (TREE_TYPE (decl));
26747 /* For namespace N { typedef void T; } using N::T; base_type_die
26748 returns NULL, but DW_TAG_imported_declaration requires
26749 the DW_AT_import tag. Force creation of DW_TAG_typedef. */
26750 if (!at_import_die)
26751 {
26752 gcc_assert (TREE_CODE (decl) == TYPE_DECL);
26753 gen_typedef_die (decl, get_context_die (DECL_CONTEXT (decl)));
26754 at_import_die = lookup_type_die (TREE_TYPE (decl));
26755 gcc_assert (at_import_die);
26756 }
26757 }
26758 else
26759 {
26760 at_import_die = lookup_decl_die (decl);
26761 if (!at_import_die)
26762 {
26763 /* If we're trying to avoid duplicate debug info, we may not have
26764 emitted the member decl for this field. Emit it now. */
26765 if (TREE_CODE (decl) == FIELD_DECL)
26766 {
26767 tree type = DECL_CONTEXT (decl);
26768
26769 if (TYPE_CONTEXT (type)
26770 && TYPE_P (TYPE_CONTEXT (type))
26771 && !should_emit_struct_debug (TYPE_CONTEXT (type),
26772 DINFO_USAGE_DIR_USE))
26773 return;
26774 gen_type_die_for_member (type, decl,
26775 get_context_die (TYPE_CONTEXT (type)));
26776 }
26777 if (TREE_CODE (decl) == NAMELIST_DECL)
26778 at_import_die = gen_namelist_decl (DECL_NAME (decl),
26779 get_context_die (DECL_CONTEXT (decl)),
26780 NULL_TREE);
26781 else
26782 at_import_die = force_decl_die (decl);
26783 }
26784 }
26785
26786 if (TREE_CODE (decl) == NAMESPACE_DECL)
26787 {
26788 if (dwarf_version >= 3 || !dwarf_strict)
26789 imported_die = new_die (DW_TAG_imported_module,
26790 lexical_block_die,
26791 lexical_block);
26792 else
26793 return;
26794 }
26795 else
26796 imported_die = new_die (DW_TAG_imported_declaration,
26797 lexical_block_die,
26798 lexical_block);
26799
26800 add_AT_file (imported_die, DW_AT_decl_file, lookup_filename (xloc.file));
26801 add_AT_unsigned (imported_die, DW_AT_decl_line, xloc.line);
26802 if (debug_column_info && xloc.column)
26803 add_AT_unsigned (imported_die, DW_AT_decl_column, xloc.column);
26804 if (name)
26805 add_AT_string (imported_die, DW_AT_name,
26806 IDENTIFIER_POINTER (name));
26807 add_AT_die_ref (imported_die, DW_AT_import, at_import_die);
26808 }
26809
26810 /* Output debug information for imported module or decl DECL.
26811 NAME is non-NULL name in context if the decl has been renamed.
26812 CHILD is true if decl is one of the renamed decls as part of
26813 importing whole module.
26814 IMPLICIT is set if this hook is called for an implicit import
26815 such as inline namespace. */
26816
26817 static void
26818 dwarf2out_imported_module_or_decl (tree decl, tree name, tree context,
26819 bool child, bool implicit)
26820 {
26821 /* dw_die_ref at_import_die; */
26822 dw_die_ref scope_die;
26823
26824 if (debug_info_level <= DINFO_LEVEL_TERSE)
26825 return;
26826
26827 gcc_assert (decl);
26828
26829 /* For DWARF5, just DW_AT_export_symbols on the DW_TAG_namespace
26830 should be enough, for DWARF4 and older even if we emit as extension
26831 DW_AT_export_symbols add the implicit DW_TAG_imported_module anyway
26832 for the benefit of consumers unaware of DW_AT_export_symbols. */
26833 if (implicit
26834 && dwarf_version >= 5
26835 && lang_hooks.decls.decl_dwarf_attribute (decl,
26836 DW_AT_export_symbols) == 1)
26837 return;
26838
26839 set_early_dwarf s;
26840
26841 /* To emit DW_TAG_imported_module or DW_TAG_imported_decl, we need two DIEs.
26842 We need decl DIE for reference and scope die. First, get DIE for the decl
26843 itself. */
26844
26845 /* Get the scope die for decl context. Use comp_unit_die for global module
26846 or decl. If die is not found for non globals, force new die. */
26847 if (context
26848 && TYPE_P (context)
26849 && !should_emit_struct_debug (context, DINFO_USAGE_DIR_USE))
26850 return;
26851
26852 scope_die = get_context_die (context);
26853
26854 if (child)
26855 {
26856 /* DW_TAG_imported_module was introduced in the DWARFv3 specification, so
26857 there is nothing we can do, here. */
26858 if (dwarf_version < 3 && dwarf_strict)
26859 return;
26860
26861 gcc_assert (scope_die->die_child);
26862 gcc_assert (scope_die->die_child->die_tag == DW_TAG_imported_module);
26863 gcc_assert (TREE_CODE (decl) != NAMESPACE_DECL);
26864 scope_die = scope_die->die_child;
26865 }
26866
26867 /* OK, now we have DIEs for decl as well as scope. Emit imported die. */
26868 dwarf2out_imported_module_or_decl_1 (decl, name, context, scope_die);
26869 }
26870
26871 /* Output debug information for namelists. */
26872
26873 static dw_die_ref
26874 gen_namelist_decl (tree name, dw_die_ref scope_die, tree item_decls)
26875 {
26876 dw_die_ref nml_die, nml_item_die, nml_item_ref_die;
26877 tree value;
26878 unsigned i;
26879
26880 if (debug_info_level <= DINFO_LEVEL_TERSE)
26881 return NULL;
26882
26883 gcc_assert (scope_die != NULL);
26884 nml_die = new_die (DW_TAG_namelist, scope_die, NULL);
26885 add_AT_string (nml_die, DW_AT_name, IDENTIFIER_POINTER (name));
26886
26887 /* If there are no item_decls, we have a nondefining namelist, e.g.
26888 with USE association; hence, set DW_AT_declaration. */
26889 if (item_decls == NULL_TREE)
26890 {
26891 add_AT_flag (nml_die, DW_AT_declaration, 1);
26892 return nml_die;
26893 }
26894
26895 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (item_decls), i, value)
26896 {
26897 nml_item_ref_die = lookup_decl_die (value);
26898 if (!nml_item_ref_die)
26899 nml_item_ref_die = force_decl_die (value);
26900
26901 nml_item_die = new_die (DW_TAG_namelist_item, nml_die, NULL);
26902 add_AT_die_ref (nml_item_die, DW_AT_namelist_items, nml_item_ref_die);
26903 }
26904 return nml_die;
26905 }
26906
26907
26908 /* Write the debugging output for DECL and return the DIE. */
26909
26910 static void
26911 dwarf2out_decl (tree decl)
26912 {
26913 dw_die_ref context_die = comp_unit_die ();
26914
26915 switch (TREE_CODE (decl))
26916 {
26917 case ERROR_MARK:
26918 return;
26919
26920 case FUNCTION_DECL:
26921 /* If we're a nested function, initially use a parent of NULL; if we're
26922 a plain function, this will be fixed up in decls_for_scope. If
26923 we're a method, it will be ignored, since we already have a DIE.
26924 Avoid doing this late though since clones of class methods may
26925 otherwise end up in limbo and create type DIEs late. */
26926 if (early_dwarf
26927 && decl_function_context (decl)
26928 /* But if we're in terse mode, we don't care about scope. */
26929 && debug_info_level > DINFO_LEVEL_TERSE)
26930 context_die = NULL;
26931 break;
26932
26933 case VAR_DECL:
26934 /* For local statics lookup proper context die. */
26935 if (local_function_static (decl))
26936 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26937
26938 /* If we are in terse mode, don't generate any DIEs to represent any
26939 variable declarations or definitions. */
26940 if (debug_info_level <= DINFO_LEVEL_TERSE)
26941 return;
26942 break;
26943
26944 case CONST_DECL:
26945 if (debug_info_level <= DINFO_LEVEL_TERSE)
26946 return;
26947 if (!is_fortran () && !is_ada () && !is_dlang ())
26948 return;
26949 if (TREE_STATIC (decl) && decl_function_context (decl))
26950 context_die = lookup_decl_die (DECL_CONTEXT (decl));
26951 break;
26952
26953 case NAMESPACE_DECL:
26954 case IMPORTED_DECL:
26955 if (debug_info_level <= DINFO_LEVEL_TERSE)
26956 return;
26957 if (lookup_decl_die (decl) != NULL)
26958 return;
26959 break;
26960
26961 case TYPE_DECL:
26962 /* Don't emit stubs for types unless they are needed by other DIEs. */
26963 if (TYPE_DECL_SUPPRESS_DEBUG (decl))
26964 return;
26965
26966 /* Don't bother trying to generate any DIEs to represent any of the
26967 normal built-in types for the language we are compiling. */
26968 if (DECL_IS_BUILTIN (decl))
26969 return;
26970
26971 /* If we are in terse mode, don't generate any DIEs for types. */
26972 if (debug_info_level <= DINFO_LEVEL_TERSE)
26973 return;
26974
26975 /* If we're a function-scope tag, initially use a parent of NULL;
26976 this will be fixed up in decls_for_scope. */
26977 if (decl_function_context (decl))
26978 context_die = NULL;
26979
26980 break;
26981
26982 case NAMELIST_DECL:
26983 break;
26984
26985 default:
26986 return;
26987 }
26988
26989 gen_decl_die (decl, NULL, NULL, context_die);
26990
26991 if (flag_checking)
26992 {
26993 dw_die_ref die = lookup_decl_die (decl);
26994 if (die)
26995 check_die (die);
26996 }
26997 }
26998
26999 /* Write the debugging output for DECL. */
27000
27001 static void
27002 dwarf2out_function_decl (tree decl)
27003 {
27004 dwarf2out_decl (decl);
27005 call_arg_locations = NULL;
27006 call_arg_loc_last = NULL;
27007 call_site_count = -1;
27008 tail_call_site_count = -1;
27009 decl_loc_table->empty ();
27010 cached_dw_loc_list_table->empty ();
27011 }
27012
27013 /* Output a marker (i.e. a label) for the beginning of the generated code for
27014 a lexical block. */
27015
27016 static void
27017 dwarf2out_begin_block (unsigned int line ATTRIBUTE_UNUSED,
27018 unsigned int blocknum)
27019 {
27020 switch_to_section (current_function_section ());
27021 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_BEGIN_LABEL, blocknum);
27022 }
27023
27024 /* Output a marker (i.e. a label) for the end of the generated code for a
27025 lexical block. */
27026
27027 static void
27028 dwarf2out_end_block (unsigned int line ATTRIBUTE_UNUSED, unsigned int blocknum)
27029 {
27030 switch_to_section (current_function_section ());
27031 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_END_LABEL, blocknum);
27032 }
27033
27034 /* Returns nonzero if it is appropriate not to emit any debugging
27035 information for BLOCK, because it doesn't contain any instructions.
27036
27037 Don't allow this for blocks with nested functions or local classes
27038 as we would end up with orphans, and in the presence of scheduling
27039 we may end up calling them anyway. */
27040
27041 static bool
27042 dwarf2out_ignore_block (const_tree block)
27043 {
27044 tree decl;
27045 unsigned int i;
27046
27047 for (decl = BLOCK_VARS (block); decl; decl = DECL_CHAIN (decl))
27048 if (TREE_CODE (decl) == FUNCTION_DECL
27049 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
27050 return 0;
27051 for (i = 0; i < BLOCK_NUM_NONLOCALIZED_VARS (block); i++)
27052 {
27053 decl = BLOCK_NONLOCALIZED_VAR (block, i);
27054 if (TREE_CODE (decl) == FUNCTION_DECL
27055 || (TREE_CODE (decl) == TYPE_DECL && TYPE_DECL_IS_STUB (decl)))
27056 return 0;
27057 }
27058
27059 return 1;
27060 }
27061
27062 /* Hash table routines for file_hash. */
27063
27064 bool
27065 dwarf_file_hasher::equal (dwarf_file_data *p1, const char *p2)
27066 {
27067 return filename_cmp (p1->filename, p2) == 0;
27068 }
27069
27070 hashval_t
27071 dwarf_file_hasher::hash (dwarf_file_data *p)
27072 {
27073 return htab_hash_string (p->filename);
27074 }
27075
27076 /* Lookup FILE_NAME (in the list of filenames that we know about here in
27077 dwarf2out.c) and return its "index". The index of each (known) filename is
27078 just a unique number which is associated with only that one filename. We
27079 need such numbers for the sake of generating labels (in the .debug_sfnames
27080 section) and references to those files numbers (in the .debug_srcinfo
27081 and .debug_macinfo sections). If the filename given as an argument is not
27082 found in our current list, add it to the list and assign it the next
27083 available unique index number. */
27084
27085 static struct dwarf_file_data *
27086 lookup_filename (const char *file_name)
27087 {
27088 struct dwarf_file_data * created;
27089
27090 if (!file_name)
27091 return NULL;
27092
27093 dwarf_file_data **slot
27094 = file_table->find_slot_with_hash (file_name, htab_hash_string (file_name),
27095 INSERT);
27096 if (*slot)
27097 return *slot;
27098
27099 created = ggc_alloc<dwarf_file_data> ();
27100 created->filename = file_name;
27101 created->emitted_number = 0;
27102 *slot = created;
27103 return created;
27104 }
27105
27106 /* If the assembler will construct the file table, then translate the compiler
27107 internal file table number into the assembler file table number, and emit
27108 a .file directive if we haven't already emitted one yet. The file table
27109 numbers are different because we prune debug info for unused variables and
27110 types, which may include filenames. */
27111
27112 static int
27113 maybe_emit_file (struct dwarf_file_data * fd)
27114 {
27115 if (! fd->emitted_number)
27116 {
27117 if (last_emitted_file)
27118 fd->emitted_number = last_emitted_file->emitted_number + 1;
27119 else
27120 fd->emitted_number = 1;
27121 last_emitted_file = fd;
27122
27123 if (output_asm_line_debug_info ())
27124 {
27125 fprintf (asm_out_file, "\t.file %u ", fd->emitted_number);
27126 output_quoted_string (asm_out_file,
27127 remap_debug_filename (fd->filename));
27128 fputc ('\n', asm_out_file);
27129 }
27130 }
27131
27132 return fd->emitted_number;
27133 }
27134
27135 /* Schedule generation of a DW_AT_const_value attribute to DIE.
27136 That generation should happen after function debug info has been
27137 generated. The value of the attribute is the constant value of ARG. */
27138
27139 static void
27140 append_entry_to_tmpl_value_parm_die_table (dw_die_ref die, tree arg)
27141 {
27142 die_arg_entry entry;
27143
27144 if (!die || !arg)
27145 return;
27146
27147 gcc_assert (early_dwarf);
27148
27149 if (!tmpl_value_parm_die_table)
27150 vec_alloc (tmpl_value_parm_die_table, 32);
27151
27152 entry.die = die;
27153 entry.arg = arg;
27154 vec_safe_push (tmpl_value_parm_die_table, entry);
27155 }
27156
27157 /* Return TRUE if T is an instance of generic type, FALSE
27158 otherwise. */
27159
27160 static bool
27161 generic_type_p (tree t)
27162 {
27163 if (t == NULL_TREE || !TYPE_P (t))
27164 return false;
27165 return lang_hooks.get_innermost_generic_parms (t) != NULL_TREE;
27166 }
27167
27168 /* Schedule the generation of the generic parameter dies for the
27169 instance of generic type T. The proper generation itself is later
27170 done by gen_scheduled_generic_parms_dies. */
27171
27172 static void
27173 schedule_generic_params_dies_gen (tree t)
27174 {
27175 if (!generic_type_p (t))
27176 return;
27177
27178 gcc_assert (early_dwarf);
27179
27180 if (!generic_type_instances)
27181 vec_alloc (generic_type_instances, 256);
27182
27183 vec_safe_push (generic_type_instances, t);
27184 }
27185
27186 /* Add a DW_AT_const_value attribute to DIEs that were scheduled
27187 by append_entry_to_tmpl_value_parm_die_table. This function must
27188 be called after function DIEs have been generated. */
27189
27190 static void
27191 gen_remaining_tmpl_value_param_die_attribute (void)
27192 {
27193 if (tmpl_value_parm_die_table)
27194 {
27195 unsigned i, j;
27196 die_arg_entry *e;
27197
27198 /* We do this in two phases - first get the cases we can
27199 handle during early-finish, preserving those we cannot
27200 (containing symbolic constants where we don't yet know
27201 whether we are going to output the referenced symbols).
27202 For those we try again at late-finish. */
27203 j = 0;
27204 FOR_EACH_VEC_ELT (*tmpl_value_parm_die_table, i, e)
27205 {
27206 if (!e->die->removed
27207 && !tree_add_const_value_attribute (e->die, e->arg))
27208 {
27209 dw_loc_descr_ref loc = NULL;
27210 if (! early_dwarf
27211 && (dwarf_version >= 5 || !dwarf_strict))
27212 loc = loc_descriptor_from_tree (e->arg, 2, NULL);
27213 if (loc)
27214 add_AT_loc (e->die, DW_AT_location, loc);
27215 else
27216 (*tmpl_value_parm_die_table)[j++] = *e;
27217 }
27218 }
27219 tmpl_value_parm_die_table->truncate (j);
27220 }
27221 }
27222
27223 /* Generate generic parameters DIEs for instances of generic types
27224 that have been previously scheduled by
27225 schedule_generic_params_dies_gen. This function must be called
27226 after all the types of the CU have been laid out. */
27227
27228 static void
27229 gen_scheduled_generic_parms_dies (void)
27230 {
27231 unsigned i;
27232 tree t;
27233
27234 if (!generic_type_instances)
27235 return;
27236
27237 FOR_EACH_VEC_ELT (*generic_type_instances, i, t)
27238 if (COMPLETE_TYPE_P (t))
27239 gen_generic_params_dies (t);
27240
27241 generic_type_instances = NULL;
27242 }
27243
27244
27245 /* Replace DW_AT_name for the decl with name. */
27246
27247 static void
27248 dwarf2out_set_name (tree decl, tree name)
27249 {
27250 dw_die_ref die;
27251 dw_attr_node *attr;
27252 const char *dname;
27253
27254 die = TYPE_SYMTAB_DIE (decl);
27255 if (!die)
27256 return;
27257
27258 dname = dwarf2_name (name, 0);
27259 if (!dname)
27260 return;
27261
27262 attr = get_AT (die, DW_AT_name);
27263 if (attr)
27264 {
27265 struct indirect_string_node *node;
27266
27267 node = find_AT_string (dname);
27268 /* replace the string. */
27269 attr->dw_attr_val.v.val_str = node;
27270 }
27271
27272 else
27273 add_name_attribute (die, dname);
27274 }
27275
27276 /* True if before or during processing of the first function being emitted. */
27277 static bool in_first_function_p = true;
27278 /* True if loc_note during dwarf2out_var_location call might still be
27279 before first real instruction at address equal to .Ltext0. */
27280 static bool maybe_at_text_label_p = true;
27281 /* One above highest N where .LVLN label might be equal to .Ltext0 label. */
27282 static unsigned int first_loclabel_num_not_at_text_label;
27283
27284 /* Look ahead for a real insn, or for a begin stmt marker. */
27285
27286 static rtx_insn *
27287 dwarf2out_next_real_insn (rtx_insn *loc_note)
27288 {
27289 rtx_insn *next_real = NEXT_INSN (loc_note);
27290
27291 while (next_real)
27292 if (INSN_P (next_real))
27293 break;
27294 else
27295 next_real = NEXT_INSN (next_real);
27296
27297 return next_real;
27298 }
27299
27300 /* Called by the final INSN scan whenever we see a var location. We
27301 use it to drop labels in the right places, and throw the location in
27302 our lookup table. */
27303
27304 static void
27305 dwarf2out_var_location (rtx_insn *loc_note)
27306 {
27307 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES + 2];
27308 struct var_loc_node *newloc;
27309 rtx_insn *next_real, *next_note;
27310 rtx_insn *call_insn = NULL;
27311 static const char *last_label;
27312 static const char *last_postcall_label;
27313 static bool last_in_cold_section_p;
27314 static rtx_insn *expected_next_loc_note;
27315 tree decl;
27316 bool var_loc_p;
27317 var_loc_view view = 0;
27318
27319 if (!NOTE_P (loc_note))
27320 {
27321 if (CALL_P (loc_note))
27322 {
27323 maybe_reset_location_view (loc_note, cur_line_info_table);
27324 call_site_count++;
27325 if (SIBLING_CALL_P (loc_note))
27326 tail_call_site_count++;
27327 if (find_reg_note (loc_note, REG_CALL_ARG_LOCATION, NULL_RTX))
27328 {
27329 call_insn = loc_note;
27330 loc_note = NULL;
27331 var_loc_p = false;
27332
27333 next_real = dwarf2out_next_real_insn (call_insn);
27334 next_note = NULL;
27335 cached_next_real_insn = NULL;
27336 goto create_label;
27337 }
27338 if (optimize == 0 && !flag_var_tracking)
27339 {
27340 /* When the var-tracking pass is not running, there is no note
27341 for indirect calls whose target is compile-time known. In this
27342 case, process such calls specifically so that we generate call
27343 sites for them anyway. */
27344 rtx x = PATTERN (loc_note);
27345 if (GET_CODE (x) == PARALLEL)
27346 x = XVECEXP (x, 0, 0);
27347 if (GET_CODE (x) == SET)
27348 x = SET_SRC (x);
27349 if (GET_CODE (x) == CALL)
27350 x = XEXP (x, 0);
27351 if (!MEM_P (x)
27352 || GET_CODE (XEXP (x, 0)) != SYMBOL_REF
27353 || !SYMBOL_REF_DECL (XEXP (x, 0))
27354 || (TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0)))
27355 != FUNCTION_DECL))
27356 {
27357 call_insn = loc_note;
27358 loc_note = NULL;
27359 var_loc_p = false;
27360
27361 next_real = dwarf2out_next_real_insn (call_insn);
27362 next_note = NULL;
27363 cached_next_real_insn = NULL;
27364 goto create_label;
27365 }
27366 }
27367 }
27368 else if (!debug_variable_location_views)
27369 gcc_unreachable ();
27370 else
27371 maybe_reset_location_view (loc_note, cur_line_info_table);
27372
27373 return;
27374 }
27375
27376 var_loc_p = NOTE_KIND (loc_note) == NOTE_INSN_VAR_LOCATION;
27377 if (var_loc_p && !DECL_P (NOTE_VAR_LOCATION_DECL (loc_note)))
27378 return;
27379
27380 /* Optimize processing a large consecutive sequence of location
27381 notes so we don't spend too much time in next_real_insn. If the
27382 next insn is another location note, remember the next_real_insn
27383 calculation for next time. */
27384 next_real = cached_next_real_insn;
27385 if (next_real)
27386 {
27387 if (expected_next_loc_note != loc_note)
27388 next_real = NULL;
27389 }
27390
27391 next_note = NEXT_INSN (loc_note);
27392 if (! next_note
27393 || next_note->deleted ()
27394 || ! NOTE_P (next_note)
27395 || (NOTE_KIND (next_note) != NOTE_INSN_VAR_LOCATION
27396 && NOTE_KIND (next_note) != NOTE_INSN_BEGIN_STMT
27397 && NOTE_KIND (next_note) != NOTE_INSN_INLINE_ENTRY))
27398 next_note = NULL;
27399
27400 if (! next_real)
27401 next_real = dwarf2out_next_real_insn (loc_note);
27402
27403 if (next_note)
27404 {
27405 expected_next_loc_note = next_note;
27406 cached_next_real_insn = next_real;
27407 }
27408 else
27409 cached_next_real_insn = NULL;
27410
27411 /* If there are no instructions which would be affected by this note,
27412 don't do anything. */
27413 if (var_loc_p
27414 && next_real == NULL_RTX
27415 && !NOTE_DURING_CALL_P (loc_note))
27416 return;
27417
27418 create_label:
27419
27420 if (next_real == NULL_RTX)
27421 next_real = get_last_insn ();
27422
27423 /* If there were any real insns between note we processed last time
27424 and this note (or if it is the first note), clear
27425 last_{,postcall_}label so that they are not reused this time. */
27426 if (last_var_location_insn == NULL_RTX
27427 || last_var_location_insn != next_real
27428 || last_in_cold_section_p != in_cold_section_p)
27429 {
27430 last_label = NULL;
27431 last_postcall_label = NULL;
27432 }
27433
27434 if (var_loc_p)
27435 {
27436 const char *label
27437 = NOTE_DURING_CALL_P (loc_note) ? last_postcall_label : last_label;
27438 view = cur_line_info_table->view;
27439 decl = NOTE_VAR_LOCATION_DECL (loc_note);
27440 newloc = add_var_loc_to_decl (decl, loc_note, label, view);
27441 if (newloc == NULL)
27442 return;
27443 }
27444 else
27445 {
27446 decl = NULL_TREE;
27447 newloc = NULL;
27448 }
27449
27450 /* If there were no real insns between note we processed last time
27451 and this note, use the label we emitted last time. Otherwise
27452 create a new label and emit it. */
27453 if (last_label == NULL)
27454 {
27455 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", loclabel_num);
27456 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, "LVL", loclabel_num);
27457 loclabel_num++;
27458 last_label = ggc_strdup (loclabel);
27459 /* See if loclabel might be equal to .Ltext0. If yes,
27460 bump first_loclabel_num_not_at_text_label. */
27461 if (!have_multiple_function_sections
27462 && in_first_function_p
27463 && maybe_at_text_label_p)
27464 {
27465 static rtx_insn *last_start;
27466 rtx_insn *insn;
27467 for (insn = loc_note; insn; insn = previous_insn (insn))
27468 if (insn == last_start)
27469 break;
27470 else if (!NONDEBUG_INSN_P (insn))
27471 continue;
27472 else
27473 {
27474 rtx body = PATTERN (insn);
27475 if (GET_CODE (body) == USE || GET_CODE (body) == CLOBBER)
27476 continue;
27477 /* Inline asm could occupy zero bytes. */
27478 else if (GET_CODE (body) == ASM_INPUT
27479 || asm_noperands (body) >= 0)
27480 continue;
27481 #ifdef HAVE_ATTR_length /* ??? We don't include insn-attr.h. */
27482 else if (HAVE_ATTR_length && get_attr_min_length (insn) == 0)
27483 continue;
27484 #endif
27485 else
27486 {
27487 /* Assume insn has non-zero length. */
27488 maybe_at_text_label_p = false;
27489 break;
27490 }
27491 }
27492 if (maybe_at_text_label_p)
27493 {
27494 last_start = loc_note;
27495 first_loclabel_num_not_at_text_label = loclabel_num;
27496 }
27497 }
27498 }
27499
27500 gcc_assert ((loc_note == NULL_RTX && call_insn != NULL_RTX)
27501 || (loc_note != NULL_RTX && call_insn == NULL_RTX));
27502
27503 if (!var_loc_p)
27504 {
27505 struct call_arg_loc_node *ca_loc
27506 = ggc_cleared_alloc<call_arg_loc_node> ();
27507 rtx_insn *prev = call_insn;
27508
27509 ca_loc->call_arg_loc_note
27510 = find_reg_note (call_insn, REG_CALL_ARG_LOCATION, NULL_RTX);
27511 ca_loc->next = NULL;
27512 ca_loc->label = last_label;
27513 gcc_assert (prev
27514 && (CALL_P (prev)
27515 || (NONJUMP_INSN_P (prev)
27516 && GET_CODE (PATTERN (prev)) == SEQUENCE
27517 && CALL_P (XVECEXP (PATTERN (prev), 0, 0)))));
27518 if (!CALL_P (prev))
27519 prev = as_a <rtx_sequence *> (PATTERN (prev))->insn (0);
27520 ca_loc->tail_call_p = SIBLING_CALL_P (prev);
27521
27522 /* Look for a SYMBOL_REF in the "prev" instruction. */
27523 rtx x = get_call_rtx_from (prev);
27524 if (x)
27525 {
27526 /* Try to get the call symbol, if any. */
27527 if (MEM_P (XEXP (x, 0)))
27528 x = XEXP (x, 0);
27529 /* First, look for a memory access to a symbol_ref. */
27530 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
27531 && SYMBOL_REF_DECL (XEXP (x, 0))
27532 && TREE_CODE (SYMBOL_REF_DECL (XEXP (x, 0))) == FUNCTION_DECL)
27533 ca_loc->symbol_ref = XEXP (x, 0);
27534 /* Otherwise, look at a compile-time known user-level function
27535 declaration. */
27536 else if (MEM_P (x)
27537 && MEM_EXPR (x)
27538 && TREE_CODE (MEM_EXPR (x)) == FUNCTION_DECL)
27539 ca_loc->symbol_ref = XEXP (DECL_RTL (MEM_EXPR (x)), 0);
27540 }
27541
27542 ca_loc->block = insn_scope (prev);
27543 if (call_arg_locations)
27544 call_arg_loc_last->next = ca_loc;
27545 else
27546 call_arg_locations = ca_loc;
27547 call_arg_loc_last = ca_loc;
27548 }
27549 else if (loc_note != NULL_RTX && !NOTE_DURING_CALL_P (loc_note))
27550 {
27551 newloc->label = last_label;
27552 newloc->view = view;
27553 }
27554 else
27555 {
27556 if (!last_postcall_label)
27557 {
27558 sprintf (loclabel, "%s-1", last_label);
27559 last_postcall_label = ggc_strdup (loclabel);
27560 }
27561 newloc->label = last_postcall_label;
27562 /* ??? This view is at last_label, not last_label-1, but we
27563 could only assume view at last_label-1 is zero if we could
27564 assume calls always have length greater than one. This is
27565 probably true in general, though there might be a rare
27566 exception to this rule, e.g. if a call insn is optimized out
27567 by target magic. Then, even the -1 in the label will be
27568 wrong, which might invalidate the range. Anyway, using view,
27569 though technically possibly incorrect, will work as far as
27570 ranges go: since L-1 is in the middle of the call insn,
27571 (L-1).0 and (L-1).V shouldn't make any difference, and having
27572 the loclist entry refer to the .loc entry might be useful, so
27573 leave it like this. */
27574 newloc->view = view;
27575 }
27576
27577 if (var_loc_p && flag_debug_asm)
27578 {
27579 const char *name, *sep, *patstr;
27580 if (decl && DECL_NAME (decl))
27581 name = IDENTIFIER_POINTER (DECL_NAME (decl));
27582 else
27583 name = "";
27584 if (NOTE_VAR_LOCATION_LOC (loc_note))
27585 {
27586 sep = " => ";
27587 patstr = str_pattern_slim (NOTE_VAR_LOCATION_LOC (loc_note));
27588 }
27589 else
27590 {
27591 sep = " ";
27592 patstr = "RESET";
27593 }
27594 fprintf (asm_out_file, "\t%s DEBUG %s%s%s\n", ASM_COMMENT_START,
27595 name, sep, patstr);
27596 }
27597
27598 last_var_location_insn = next_real;
27599 last_in_cold_section_p = in_cold_section_p;
27600 }
27601
27602 /* Check whether BLOCK, a lexical block, is nested within OUTER, or is
27603 OUTER itself. If BOTHWAYS, check not only that BLOCK can reach
27604 OUTER through BLOCK_SUPERCONTEXT links, but also that there is a
27605 path from OUTER to BLOCK through BLOCK_SUBBLOCKs and
27606 BLOCK_FRAGMENT_ORIGIN links. */
27607 static bool
27608 block_within_block_p (tree block, tree outer, bool bothways)
27609 {
27610 if (block == outer)
27611 return true;
27612
27613 /* Quickly check that OUTER is up BLOCK's supercontext chain. */
27614 for (tree context = BLOCK_SUPERCONTEXT (block);
27615 context != outer;
27616 context = BLOCK_SUPERCONTEXT (context))
27617 if (!context || TREE_CODE (context) != BLOCK)
27618 return false;
27619
27620 if (!bothways)
27621 return true;
27622
27623 /* Now check that each block is actually referenced by its
27624 parent. */
27625 for (tree context = BLOCK_SUPERCONTEXT (block); ;
27626 context = BLOCK_SUPERCONTEXT (context))
27627 {
27628 if (BLOCK_FRAGMENT_ORIGIN (context))
27629 {
27630 gcc_assert (!BLOCK_SUBBLOCKS (context));
27631 context = BLOCK_FRAGMENT_ORIGIN (context);
27632 }
27633 for (tree sub = BLOCK_SUBBLOCKS (context);
27634 sub != block;
27635 sub = BLOCK_CHAIN (sub))
27636 if (!sub)
27637 return false;
27638 if (context == outer)
27639 return true;
27640 else
27641 block = context;
27642 }
27643 }
27644
27645 /* Called during final while assembling the marker of the entry point
27646 for an inlined function. */
27647
27648 static void
27649 dwarf2out_inline_entry (tree block)
27650 {
27651 gcc_assert (debug_inline_points);
27652
27653 /* If we can't represent it, don't bother. */
27654 if (!(dwarf_version >= 3 || !dwarf_strict))
27655 return;
27656
27657 gcc_assert (DECL_P (block_ultimate_origin (block)));
27658
27659 /* Sanity check the block tree. This would catch a case in which
27660 BLOCK got removed from the tree reachable from the outermost
27661 lexical block, but got retained in markers. It would still link
27662 back to its parents, but some ancestor would be missing a link
27663 down the path to the sub BLOCK. If the block got removed, its
27664 BLOCK_NUMBER will not be a usable value. */
27665 if (flag_checking)
27666 gcc_assert (block_within_block_p (block,
27667 DECL_INITIAL (current_function_decl),
27668 true));
27669
27670 gcc_assert (inlined_function_outer_scope_p (block));
27671 gcc_assert (!lookup_block_die (block));
27672
27673 if (BLOCK_FRAGMENT_ORIGIN (block))
27674 block = BLOCK_FRAGMENT_ORIGIN (block);
27675 /* Can the entry point ever not be at the beginning of an
27676 unfragmented lexical block? */
27677 else if (!(BLOCK_FRAGMENT_CHAIN (block)
27678 || (cur_line_info_table
27679 && !ZERO_VIEW_P (cur_line_info_table->view))))
27680 return;
27681
27682 if (!inline_entry_data_table)
27683 inline_entry_data_table
27684 = hash_table<inline_entry_data_hasher>::create_ggc (10);
27685
27686
27687 inline_entry_data **iedp
27688 = inline_entry_data_table->find_slot_with_hash (block,
27689 htab_hash_pointer (block),
27690 INSERT);
27691 if (*iedp)
27692 /* ??? Ideally, we'd record all entry points for the same inlined
27693 function (some may have been duplicated by e.g. unrolling), but
27694 we have no way to represent that ATM. */
27695 return;
27696
27697 inline_entry_data *ied = *iedp = ggc_cleared_alloc<inline_entry_data> ();
27698 ied->block = block;
27699 ied->label_pfx = BLOCK_INLINE_ENTRY_LABEL;
27700 ied->label_num = BLOCK_NUMBER (block);
27701 if (cur_line_info_table)
27702 ied->view = cur_line_info_table->view;
27703
27704 ASM_OUTPUT_DEBUG_LABEL (asm_out_file, BLOCK_INLINE_ENTRY_LABEL,
27705 BLOCK_NUMBER (block));
27706 }
27707
27708 /* Called from finalize_size_functions for size functions so that their body
27709 can be encoded in the debug info to describe the layout of variable-length
27710 structures. */
27711
27712 static void
27713 dwarf2out_size_function (tree decl)
27714 {
27715 function_to_dwarf_procedure (decl);
27716 }
27717
27718 /* Note in one location list that text section has changed. */
27719
27720 int
27721 var_location_switch_text_section_1 (var_loc_list **slot, void *)
27722 {
27723 var_loc_list *list = *slot;
27724 if (list->first)
27725 list->last_before_switch
27726 = list->last->next ? list->last->next : list->last;
27727 return 1;
27728 }
27729
27730 /* Note in all location lists that text section has changed. */
27731
27732 static void
27733 var_location_switch_text_section (void)
27734 {
27735 if (decl_loc_table == NULL)
27736 return;
27737
27738 decl_loc_table->traverse<void *, var_location_switch_text_section_1> (NULL);
27739 }
27740
27741 /* Create a new line number table. */
27742
27743 static dw_line_info_table *
27744 new_line_info_table (void)
27745 {
27746 dw_line_info_table *table;
27747
27748 table = ggc_cleared_alloc<dw_line_info_table> ();
27749 table->file_num = 1;
27750 table->line_num = 1;
27751 table->is_stmt = DWARF_LINE_DEFAULT_IS_STMT_START;
27752 FORCE_RESET_NEXT_VIEW (table->view);
27753 table->symviews_since_reset = 0;
27754
27755 return table;
27756 }
27757
27758 /* Lookup the "current" table into which we emit line info, so
27759 that we don't have to do it for every source line. */
27760
27761 static void
27762 set_cur_line_info_table (section *sec)
27763 {
27764 dw_line_info_table *table;
27765
27766 if (sec == text_section)
27767 table = text_section_line_info;
27768 else if (sec == cold_text_section)
27769 {
27770 table = cold_text_section_line_info;
27771 if (!table)
27772 {
27773 cold_text_section_line_info = table = new_line_info_table ();
27774 table->end_label = cold_end_label;
27775 }
27776 }
27777 else
27778 {
27779 const char *end_label;
27780
27781 if (crtl->has_bb_partition)
27782 {
27783 if (in_cold_section_p)
27784 end_label = crtl->subsections.cold_section_end_label;
27785 else
27786 end_label = crtl->subsections.hot_section_end_label;
27787 }
27788 else
27789 {
27790 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27791 ASM_GENERATE_INTERNAL_LABEL (label, FUNC_END_LABEL,
27792 current_function_funcdef_no);
27793 end_label = ggc_strdup (label);
27794 }
27795
27796 table = new_line_info_table ();
27797 table->end_label = end_label;
27798
27799 vec_safe_push (separate_line_info, table);
27800 }
27801
27802 if (output_asm_line_debug_info ())
27803 table->is_stmt = (cur_line_info_table
27804 ? cur_line_info_table->is_stmt
27805 : DWARF_LINE_DEFAULT_IS_STMT_START);
27806 cur_line_info_table = table;
27807 }
27808
27809
27810 /* We need to reset the locations at the beginning of each
27811 function. We can't do this in the end_function hook, because the
27812 declarations that use the locations won't have been output when
27813 that hook is called. Also compute have_multiple_function_sections here. */
27814
27815 static void
27816 dwarf2out_begin_function (tree fun)
27817 {
27818 section *sec = function_section (fun);
27819
27820 if (sec != text_section)
27821 have_multiple_function_sections = true;
27822
27823 if (crtl->has_bb_partition && !cold_text_section)
27824 {
27825 gcc_assert (current_function_decl == fun);
27826 cold_text_section = unlikely_text_section ();
27827 switch_to_section (cold_text_section);
27828 ASM_OUTPUT_LABEL (asm_out_file, cold_text_section_label);
27829 switch_to_section (sec);
27830 }
27831
27832 dwarf2out_note_section_used ();
27833 call_site_count = 0;
27834 tail_call_site_count = 0;
27835
27836 set_cur_line_info_table (sec);
27837 FORCE_RESET_NEXT_VIEW (cur_line_info_table->view);
27838 }
27839
27840 /* Helper function of dwarf2out_end_function, called only after emitting
27841 the very first function into assembly. Check if some .debug_loc range
27842 might end with a .LVL* label that could be equal to .Ltext0.
27843 In that case we must force using absolute addresses in .debug_loc ranges,
27844 because this range could be .LVLN-.Ltext0 .. .LVLM-.Ltext0 for
27845 .LVLN == .LVLM == .Ltext0, thus 0 .. 0, which is a .debug_loc
27846 list terminator.
27847 Set have_multiple_function_sections to true in that case and
27848 terminate htab traversal. */
27849
27850 int
27851 find_empty_loc_ranges_at_text_label (var_loc_list **slot, int)
27852 {
27853 var_loc_list *entry = *slot;
27854 struct var_loc_node *node;
27855
27856 node = entry->first;
27857 if (node && node->next && node->next->label)
27858 {
27859 unsigned int i;
27860 const char *label = node->next->label;
27861 char loclabel[MAX_ARTIFICIAL_LABEL_BYTES];
27862
27863 for (i = 0; i < first_loclabel_num_not_at_text_label; i++)
27864 {
27865 ASM_GENERATE_INTERNAL_LABEL (loclabel, "LVL", i);
27866 if (strcmp (label, loclabel) == 0)
27867 {
27868 have_multiple_function_sections = true;
27869 return 0;
27870 }
27871 }
27872 }
27873 return 1;
27874 }
27875
27876 /* Hook called after emitting a function into assembly.
27877 This does something only for the very first function emitted. */
27878
27879 static void
27880 dwarf2out_end_function (unsigned int)
27881 {
27882 if (in_first_function_p
27883 && !have_multiple_function_sections
27884 && first_loclabel_num_not_at_text_label
27885 && decl_loc_table)
27886 decl_loc_table->traverse<int, find_empty_loc_ranges_at_text_label> (0);
27887 in_first_function_p = false;
27888 maybe_at_text_label_p = false;
27889 }
27890
27891 /* Temporary holder for dwarf2out_register_main_translation_unit. Used to let
27892 front-ends register a translation unit even before dwarf2out_init is
27893 called. */
27894 static tree main_translation_unit = NULL_TREE;
27895
27896 /* Hook called by front-ends after they built their main translation unit.
27897 Associate comp_unit_die to UNIT. */
27898
27899 static void
27900 dwarf2out_register_main_translation_unit (tree unit)
27901 {
27902 gcc_assert (TREE_CODE (unit) == TRANSLATION_UNIT_DECL
27903 && main_translation_unit == NULL_TREE);
27904 main_translation_unit = unit;
27905 /* If dwarf2out_init has not been called yet, it will perform the association
27906 itself looking at main_translation_unit. */
27907 if (decl_die_table != NULL)
27908 equate_decl_number_to_die (unit, comp_unit_die ());
27909 }
27910
27911 /* Add OPCODE+VAL as an entry at the end of the opcode array in TABLE. */
27912
27913 static void
27914 push_dw_line_info_entry (dw_line_info_table *table,
27915 enum dw_line_info_opcode opcode, unsigned int val)
27916 {
27917 dw_line_info_entry e;
27918 e.opcode = opcode;
27919 e.val = val;
27920 vec_safe_push (table->entries, e);
27921 }
27922
27923 /* Output a label to mark the beginning of a source code line entry
27924 and record information relating to this source line, in
27925 'line_info_table' for later output of the .debug_line section. */
27926 /* ??? The discriminator parameter ought to be unsigned. */
27927
27928 static void
27929 dwarf2out_source_line (unsigned int line, unsigned int column,
27930 const char *filename,
27931 int discriminator, bool is_stmt)
27932 {
27933 unsigned int file_num;
27934 dw_line_info_table *table;
27935 static var_loc_view lvugid;
27936
27937 if (debug_info_level < DINFO_LEVEL_TERSE)
27938 return;
27939
27940 table = cur_line_info_table;
27941
27942 if (line == 0)
27943 {
27944 if (debug_variable_location_views
27945 && output_asm_line_debug_info ()
27946 && table && !RESETTING_VIEW_P (table->view))
27947 {
27948 /* If we're using the assembler to compute view numbers, we
27949 can't issue a .loc directive for line zero, so we can't
27950 get a view number at this point. We might attempt to
27951 compute it from the previous view, or equate it to a
27952 subsequent view (though it might not be there!), but
27953 since we're omitting the line number entry, we might as
27954 well omit the view number as well. That means pretending
27955 it's a view number zero, which might very well turn out
27956 to be correct. ??? Extend the assembler so that the
27957 compiler could emit e.g. ".locview .LVU#", to output a
27958 view without changing line number information. We'd then
27959 have to count it in symviews_since_reset; when it's omitted,
27960 it doesn't count. */
27961 if (!zero_view_p)
27962 zero_view_p = BITMAP_GGC_ALLOC ();
27963 bitmap_set_bit (zero_view_p, table->view);
27964 if (flag_debug_asm)
27965 {
27966 char label[MAX_ARTIFICIAL_LABEL_BYTES];
27967 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
27968 fprintf (asm_out_file, "\t%s line 0, omitted view ",
27969 ASM_COMMENT_START);
27970 assemble_name (asm_out_file, label);
27971 putc ('\n', asm_out_file);
27972 }
27973 table->view = ++lvugid;
27974 }
27975 return;
27976 }
27977
27978 /* The discriminator column was added in dwarf4. Simplify the below
27979 by simply removing it if we're not supposed to output it. */
27980 if (dwarf_version < 4 && dwarf_strict)
27981 discriminator = 0;
27982
27983 if (!debug_column_info)
27984 column = 0;
27985
27986 file_num = maybe_emit_file (lookup_filename (filename));
27987
27988 /* ??? TODO: Elide duplicate line number entries. Traditionally,
27989 the debugger has used the second (possibly duplicate) line number
27990 at the beginning of the function to mark the end of the prologue.
27991 We could eliminate any other duplicates within the function. For
27992 Dwarf3, we ought to include the DW_LNS_set_prologue_end mark in
27993 that second line number entry. */
27994 /* Recall that this end-of-prologue indication is *not* the same thing
27995 as the end_prologue debug hook. The NOTE_INSN_PROLOGUE_END note,
27996 to which the hook corresponds, follows the last insn that was
27997 emitted by gen_prologue. What we need is to precede the first insn
27998 that had been emitted after NOTE_INSN_FUNCTION_BEG, i.e. the first
27999 insn that corresponds to something the user wrote. These may be
28000 very different locations once scheduling is enabled. */
28001
28002 if (0 && file_num == table->file_num
28003 && line == table->line_num
28004 && column == table->column_num
28005 && discriminator == table->discrim_num
28006 && is_stmt == table->is_stmt)
28007 return;
28008
28009 switch_to_section (current_function_section ());
28010
28011 /* If requested, emit something human-readable. */
28012 if (flag_debug_asm)
28013 {
28014 if (debug_column_info)
28015 fprintf (asm_out_file, "\t%s %s:%d:%d\n", ASM_COMMENT_START,
28016 filename, line, column);
28017 else
28018 fprintf (asm_out_file, "\t%s %s:%d\n", ASM_COMMENT_START,
28019 filename, line);
28020 }
28021
28022 if (output_asm_line_debug_info ())
28023 {
28024 /* Emit the .loc directive understood by GNU as. */
28025 /* "\t.loc %u %u 0 is_stmt %u discriminator %u",
28026 file_num, line, is_stmt, discriminator */
28027 fputs ("\t.loc ", asm_out_file);
28028 fprint_ul (asm_out_file, file_num);
28029 putc (' ', asm_out_file);
28030 fprint_ul (asm_out_file, line);
28031 putc (' ', asm_out_file);
28032 fprint_ul (asm_out_file, column);
28033
28034 if (is_stmt != table->is_stmt)
28035 {
28036 #if HAVE_GAS_LOC_STMT
28037 fputs (" is_stmt ", asm_out_file);
28038 putc (is_stmt ? '1' : '0', asm_out_file);
28039 #endif
28040 }
28041 if (SUPPORTS_DISCRIMINATOR && discriminator != 0)
28042 {
28043 gcc_assert (discriminator > 0);
28044 fputs (" discriminator ", asm_out_file);
28045 fprint_ul (asm_out_file, (unsigned long) discriminator);
28046 }
28047 if (debug_variable_location_views)
28048 {
28049 if (!RESETTING_VIEW_P (table->view))
28050 {
28051 table->symviews_since_reset++;
28052 if (table->symviews_since_reset > symview_upper_bound)
28053 symview_upper_bound = table->symviews_since_reset;
28054 /* When we're using the assembler to compute view
28055 numbers, we output symbolic labels after "view" in
28056 .loc directives, and the assembler will set them for
28057 us, so that we can refer to the view numbers in
28058 location lists. The only exceptions are when we know
28059 a view will be zero: "-0" is a forced reset, used
28060 e.g. in the beginning of functions, whereas "0" tells
28061 the assembler to check that there was a PC change
28062 since the previous view, in a way that implicitly
28063 resets the next view. */
28064 fputs (" view ", asm_out_file);
28065 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28066 ASM_GENERATE_INTERNAL_LABEL (label, "LVU", table->view);
28067 assemble_name (asm_out_file, label);
28068 table->view = ++lvugid;
28069 }
28070 else
28071 {
28072 table->symviews_since_reset = 0;
28073 if (FORCE_RESETTING_VIEW_P (table->view))
28074 fputs (" view -0", asm_out_file);
28075 else
28076 fputs (" view 0", asm_out_file);
28077 /* Mark the present view as a zero view. Earlier debug
28078 binds may have already added its id to loclists to be
28079 emitted later, so we can't reuse the id for something
28080 else. However, it's good to know whether a view is
28081 known to be zero, because then we may be able to
28082 optimize out locviews that are all zeros, so take
28083 note of it in zero_view_p. */
28084 if (!zero_view_p)
28085 zero_view_p = BITMAP_GGC_ALLOC ();
28086 bitmap_set_bit (zero_view_p, lvugid);
28087 table->view = ++lvugid;
28088 }
28089 }
28090 putc ('\n', asm_out_file);
28091 }
28092 else
28093 {
28094 unsigned int label_num = ++line_info_label_num;
28095
28096 targetm.asm_out.internal_label (asm_out_file, LINE_CODE_LABEL, label_num);
28097
28098 if (debug_variable_location_views && !RESETTING_VIEW_P (table->view))
28099 push_dw_line_info_entry (table, LI_adv_address, label_num);
28100 else
28101 push_dw_line_info_entry (table, LI_set_address, label_num);
28102 if (debug_variable_location_views)
28103 {
28104 bool resetting = FORCE_RESETTING_VIEW_P (table->view);
28105 if (resetting)
28106 table->view = 0;
28107
28108 if (flag_debug_asm)
28109 fprintf (asm_out_file, "\t%s view %s%d\n",
28110 ASM_COMMENT_START,
28111 resetting ? "-" : "",
28112 table->view);
28113
28114 table->view++;
28115 }
28116 if (file_num != table->file_num)
28117 push_dw_line_info_entry (table, LI_set_file, file_num);
28118 if (discriminator != table->discrim_num)
28119 push_dw_line_info_entry (table, LI_set_discriminator, discriminator);
28120 if (is_stmt != table->is_stmt)
28121 push_dw_line_info_entry (table, LI_negate_stmt, 0);
28122 push_dw_line_info_entry (table, LI_set_line, line);
28123 if (debug_column_info)
28124 push_dw_line_info_entry (table, LI_set_column, column);
28125 }
28126
28127 table->file_num = file_num;
28128 table->line_num = line;
28129 table->column_num = column;
28130 table->discrim_num = discriminator;
28131 table->is_stmt = is_stmt;
28132 table->in_use = true;
28133 }
28134
28135 /* Record the beginning of a new source file. */
28136
28137 static void
28138 dwarf2out_start_source_file (unsigned int lineno, const char *filename)
28139 {
28140 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28141 {
28142 macinfo_entry e;
28143 e.code = DW_MACINFO_start_file;
28144 e.lineno = lineno;
28145 e.info = ggc_strdup (filename);
28146 vec_safe_push (macinfo_table, e);
28147 }
28148 }
28149
28150 /* Record the end of a source file. */
28151
28152 static void
28153 dwarf2out_end_source_file (unsigned int lineno ATTRIBUTE_UNUSED)
28154 {
28155 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28156 {
28157 macinfo_entry e;
28158 e.code = DW_MACINFO_end_file;
28159 e.lineno = lineno;
28160 e.info = NULL;
28161 vec_safe_push (macinfo_table, e);
28162 }
28163 }
28164
28165 /* Called from debug_define in toplev.c. The `buffer' parameter contains
28166 the tail part of the directive line, i.e. the part which is past the
28167 initial whitespace, #, whitespace, directive-name, whitespace part. */
28168
28169 static void
28170 dwarf2out_define (unsigned int lineno ATTRIBUTE_UNUSED,
28171 const char *buffer ATTRIBUTE_UNUSED)
28172 {
28173 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28174 {
28175 macinfo_entry e;
28176 /* Insert a dummy first entry to be able to optimize the whole
28177 predefined macro block using DW_MACRO_import. */
28178 if (macinfo_table->is_empty () && lineno <= 1)
28179 {
28180 e.code = 0;
28181 e.lineno = 0;
28182 e.info = NULL;
28183 vec_safe_push (macinfo_table, e);
28184 }
28185 e.code = DW_MACINFO_define;
28186 e.lineno = lineno;
28187 e.info = ggc_strdup (buffer);
28188 vec_safe_push (macinfo_table, e);
28189 }
28190 }
28191
28192 /* Called from debug_undef in toplev.c. The `buffer' parameter contains
28193 the tail part of the directive line, i.e. the part which is past the
28194 initial whitespace, #, whitespace, directive-name, whitespace part. */
28195
28196 static void
28197 dwarf2out_undef (unsigned int lineno ATTRIBUTE_UNUSED,
28198 const char *buffer ATTRIBUTE_UNUSED)
28199 {
28200 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28201 {
28202 macinfo_entry e;
28203 /* Insert a dummy first entry to be able to optimize the whole
28204 predefined macro block using DW_MACRO_import. */
28205 if (macinfo_table->is_empty () && lineno <= 1)
28206 {
28207 e.code = 0;
28208 e.lineno = 0;
28209 e.info = NULL;
28210 vec_safe_push (macinfo_table, e);
28211 }
28212 e.code = DW_MACINFO_undef;
28213 e.lineno = lineno;
28214 e.info = ggc_strdup (buffer);
28215 vec_safe_push (macinfo_table, e);
28216 }
28217 }
28218
28219 /* Helpers to manipulate hash table of CUs. */
28220
28221 struct macinfo_entry_hasher : nofree_ptr_hash <macinfo_entry>
28222 {
28223 static inline hashval_t hash (const macinfo_entry *);
28224 static inline bool equal (const macinfo_entry *, const macinfo_entry *);
28225 };
28226
28227 inline hashval_t
28228 macinfo_entry_hasher::hash (const macinfo_entry *entry)
28229 {
28230 return htab_hash_string (entry->info);
28231 }
28232
28233 inline bool
28234 macinfo_entry_hasher::equal (const macinfo_entry *entry1,
28235 const macinfo_entry *entry2)
28236 {
28237 return !strcmp (entry1->info, entry2->info);
28238 }
28239
28240 typedef hash_table<macinfo_entry_hasher> macinfo_hash_type;
28241
28242 /* Output a single .debug_macinfo entry. */
28243
28244 static void
28245 output_macinfo_op (macinfo_entry *ref)
28246 {
28247 int file_num;
28248 size_t len;
28249 struct indirect_string_node *node;
28250 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28251 struct dwarf_file_data *fd;
28252
28253 switch (ref->code)
28254 {
28255 case DW_MACINFO_start_file:
28256 fd = lookup_filename (ref->info);
28257 file_num = maybe_emit_file (fd);
28258 dw2_asm_output_data (1, DW_MACINFO_start_file, "Start new file");
28259 dw2_asm_output_data_uleb128 (ref->lineno,
28260 "Included from line number %lu",
28261 (unsigned long) ref->lineno);
28262 dw2_asm_output_data_uleb128 (file_num, "file %s", ref->info);
28263 break;
28264 case DW_MACINFO_end_file:
28265 dw2_asm_output_data (1, DW_MACINFO_end_file, "End file");
28266 break;
28267 case DW_MACINFO_define:
28268 case DW_MACINFO_undef:
28269 len = strlen (ref->info) + 1;
28270 if (!dwarf_strict
28271 && len > DWARF_OFFSET_SIZE
28272 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28273 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28274 {
28275 ref->code = ref->code == DW_MACINFO_define
28276 ? DW_MACRO_define_strp : DW_MACRO_undef_strp;
28277 output_macinfo_op (ref);
28278 return;
28279 }
28280 dw2_asm_output_data (1, ref->code,
28281 ref->code == DW_MACINFO_define
28282 ? "Define macro" : "Undefine macro");
28283 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28284 (unsigned long) ref->lineno);
28285 dw2_asm_output_nstring (ref->info, -1, "The macro");
28286 break;
28287 case DW_MACRO_define_strp:
28288 case DW_MACRO_undef_strp:
28289 /* NB: dwarf2out_finish performs:
28290 1. save_macinfo_strings
28291 2. hash table traverse of index_string
28292 3. output_macinfo -> output_macinfo_op
28293 4. output_indirect_strings
28294 -> hash table traverse of output_index_string
28295
28296 When output_macinfo_op is called, all index strings have been
28297 added to hash table by save_macinfo_strings and we can't pass
28298 INSERT to find_slot_with_hash which may expand hash table, even
28299 if no insertion is needed, and change hash table traverse order
28300 between index_string and output_index_string. */
28301 node = find_AT_string (ref->info, NO_INSERT);
28302 gcc_assert (node
28303 && (node->form == DW_FORM_strp
28304 || node->form == dwarf_FORM (DW_FORM_strx)));
28305 dw2_asm_output_data (1, ref->code,
28306 ref->code == DW_MACRO_define_strp
28307 ? "Define macro strp"
28308 : "Undefine macro strp");
28309 dw2_asm_output_data_uleb128 (ref->lineno, "At line number %lu",
28310 (unsigned long) ref->lineno);
28311 if (node->form == DW_FORM_strp)
28312 dw2_asm_output_offset (DWARF_OFFSET_SIZE, node->label,
28313 debug_str_section, "The macro: \"%s\"",
28314 ref->info);
28315 else
28316 dw2_asm_output_data_uleb128 (node->index, "The macro: \"%s\"",
28317 ref->info);
28318 break;
28319 case DW_MACRO_import:
28320 dw2_asm_output_data (1, ref->code, "Import");
28321 ASM_GENERATE_INTERNAL_LABEL (label,
28322 DEBUG_MACRO_SECTION_LABEL,
28323 ref->lineno + macinfo_label_base);
28324 dw2_asm_output_offset (DWARF_OFFSET_SIZE, label, NULL, NULL);
28325 break;
28326 default:
28327 fprintf (asm_out_file, "%s unrecognized macinfo code %lu\n",
28328 ASM_COMMENT_START, (unsigned long) ref->code);
28329 break;
28330 }
28331 }
28332
28333 /* Attempt to make a sequence of define/undef macinfo ops shareable with
28334 other compilation unit .debug_macinfo sections. IDX is the first
28335 index of a define/undef, return the number of ops that should be
28336 emitted in a comdat .debug_macinfo section and emit
28337 a DW_MACRO_import entry referencing it.
28338 If the define/undef entry should be emitted normally, return 0. */
28339
28340 static unsigned
28341 optimize_macinfo_range (unsigned int idx, vec<macinfo_entry, va_gc> *files,
28342 macinfo_hash_type **macinfo_htab)
28343 {
28344 macinfo_entry *first, *second, *cur, *inc;
28345 char linebuf[sizeof (HOST_WIDE_INT) * 3 + 1];
28346 unsigned char checksum[16];
28347 struct md5_ctx ctx;
28348 char *grp_name, *tail;
28349 const char *base;
28350 unsigned int i, count, encoded_filename_len, linebuf_len;
28351 macinfo_entry **slot;
28352
28353 first = &(*macinfo_table)[idx];
28354 second = &(*macinfo_table)[idx + 1];
28355
28356 /* Optimize only if there are at least two consecutive define/undef ops,
28357 and either all of them are before first DW_MACINFO_start_file
28358 with lineno {0,1} (i.e. predefined macro block), or all of them are
28359 in some included header file. */
28360 if (second->code != DW_MACINFO_define && second->code != DW_MACINFO_undef)
28361 return 0;
28362 if (vec_safe_is_empty (files))
28363 {
28364 if (first->lineno > 1 || second->lineno > 1)
28365 return 0;
28366 }
28367 else if (first->lineno == 0)
28368 return 0;
28369
28370 /* Find the last define/undef entry that can be grouped together
28371 with first and at the same time compute md5 checksum of their
28372 codes, linenumbers and strings. */
28373 md5_init_ctx (&ctx);
28374 for (i = idx; macinfo_table->iterate (i, &cur); i++)
28375 if (cur->code != DW_MACINFO_define && cur->code != DW_MACINFO_undef)
28376 break;
28377 else if (vec_safe_is_empty (files) && cur->lineno > 1)
28378 break;
28379 else
28380 {
28381 unsigned char code = cur->code;
28382 md5_process_bytes (&code, 1, &ctx);
28383 checksum_uleb128 (cur->lineno, &ctx);
28384 md5_process_bytes (cur->info, strlen (cur->info) + 1, &ctx);
28385 }
28386 md5_finish_ctx (&ctx, checksum);
28387 count = i - idx;
28388
28389 /* From the containing include filename (if any) pick up just
28390 usable characters from its basename. */
28391 if (vec_safe_is_empty (files))
28392 base = "";
28393 else
28394 base = lbasename (files->last ().info);
28395 for (encoded_filename_len = 0, i = 0; base[i]; i++)
28396 if (ISIDNUM (base[i]) || base[i] == '.')
28397 encoded_filename_len++;
28398 /* Count . at the end. */
28399 if (encoded_filename_len)
28400 encoded_filename_len++;
28401
28402 sprintf (linebuf, HOST_WIDE_INT_PRINT_UNSIGNED, first->lineno);
28403 linebuf_len = strlen (linebuf);
28404
28405 /* The group name format is: wmN.[<encoded filename>.]<lineno>.<md5sum> */
28406 grp_name = XALLOCAVEC (char, 4 + encoded_filename_len + linebuf_len + 1
28407 + 16 * 2 + 1);
28408 memcpy (grp_name, DWARF_OFFSET_SIZE == 4 ? "wm4." : "wm8.", 4);
28409 tail = grp_name + 4;
28410 if (encoded_filename_len)
28411 {
28412 for (i = 0; base[i]; i++)
28413 if (ISIDNUM (base[i]) || base[i] == '.')
28414 *tail++ = base[i];
28415 *tail++ = '.';
28416 }
28417 memcpy (tail, linebuf, linebuf_len);
28418 tail += linebuf_len;
28419 *tail++ = '.';
28420 for (i = 0; i < 16; i++)
28421 sprintf (tail + i * 2, "%02x", checksum[i] & 0xff);
28422
28423 /* Construct a macinfo_entry for DW_MACRO_import
28424 in the empty vector entry before the first define/undef. */
28425 inc = &(*macinfo_table)[idx - 1];
28426 inc->code = DW_MACRO_import;
28427 inc->lineno = 0;
28428 inc->info = ggc_strdup (grp_name);
28429 if (!*macinfo_htab)
28430 *macinfo_htab = new macinfo_hash_type (10);
28431 /* Avoid emitting duplicates. */
28432 slot = (*macinfo_htab)->find_slot (inc, INSERT);
28433 if (*slot != NULL)
28434 {
28435 inc->code = 0;
28436 inc->info = NULL;
28437 /* If such an entry has been used before, just emit
28438 a DW_MACRO_import op. */
28439 inc = *slot;
28440 output_macinfo_op (inc);
28441 /* And clear all macinfo_entry in the range to avoid emitting them
28442 in the second pass. */
28443 for (i = idx; macinfo_table->iterate (i, &cur) && i < idx + count; i++)
28444 {
28445 cur->code = 0;
28446 cur->info = NULL;
28447 }
28448 }
28449 else
28450 {
28451 *slot = inc;
28452 inc->lineno = (*macinfo_htab)->elements ();
28453 output_macinfo_op (inc);
28454 }
28455 return count;
28456 }
28457
28458 /* Save any strings needed by the macinfo table in the debug str
28459 table. All strings must be collected into the table by the time
28460 index_string is called. */
28461
28462 static void
28463 save_macinfo_strings (void)
28464 {
28465 unsigned len;
28466 unsigned i;
28467 macinfo_entry *ref;
28468
28469 for (i = 0; macinfo_table && macinfo_table->iterate (i, &ref); i++)
28470 {
28471 switch (ref->code)
28472 {
28473 /* Match the logic in output_macinfo_op to decide on
28474 indirect strings. */
28475 case DW_MACINFO_define:
28476 case DW_MACINFO_undef:
28477 len = strlen (ref->info) + 1;
28478 if (!dwarf_strict
28479 && len > DWARF_OFFSET_SIZE
28480 && !DWARF2_INDIRECT_STRING_SUPPORT_MISSING_ON_TARGET
28481 && (debug_str_section->common.flags & SECTION_MERGE) != 0)
28482 set_indirect_string (find_AT_string (ref->info));
28483 break;
28484 case DW_MACINFO_start_file:
28485 /* -gsplit-dwarf -g3 will also output filename as indirect
28486 string. */
28487 if (!dwarf_split_debug_info)
28488 break;
28489 /* Fall through. */
28490 case DW_MACRO_define_strp:
28491 case DW_MACRO_undef_strp:
28492 set_indirect_string (find_AT_string (ref->info));
28493 break;
28494 default:
28495 break;
28496 }
28497 }
28498 }
28499
28500 /* Output macinfo section(s). */
28501
28502 static void
28503 output_macinfo (const char *debug_line_label, bool early_lto_debug)
28504 {
28505 unsigned i;
28506 unsigned long length = vec_safe_length (macinfo_table);
28507 macinfo_entry *ref;
28508 vec<macinfo_entry, va_gc> *files = NULL;
28509 macinfo_hash_type *macinfo_htab = NULL;
28510 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
28511
28512 if (! length)
28513 return;
28514
28515 /* output_macinfo* uses these interchangeably. */
28516 gcc_assert ((int) DW_MACINFO_define == (int) DW_MACRO_define
28517 && (int) DW_MACINFO_undef == (int) DW_MACRO_undef
28518 && (int) DW_MACINFO_start_file == (int) DW_MACRO_start_file
28519 && (int) DW_MACINFO_end_file == (int) DW_MACRO_end_file);
28520
28521 /* AIX Assembler inserts the length, so adjust the reference to match the
28522 offset expected by debuggers. */
28523 strcpy (dl_section_ref, debug_line_label);
28524 if (XCOFF_DEBUGGING_INFO)
28525 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
28526
28527 /* For .debug_macro emit the section header. */
28528 if (!dwarf_strict || dwarf_version >= 5)
28529 {
28530 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28531 "DWARF macro version number");
28532 if (DWARF_OFFSET_SIZE == 8)
28533 dw2_asm_output_data (1, 3, "Flags: 64-bit, lineptr present");
28534 else
28535 dw2_asm_output_data (1, 2, "Flags: 32-bit, lineptr present");
28536 dw2_asm_output_offset (DWARF_OFFSET_SIZE, debug_line_label,
28537 debug_line_section, NULL);
28538 }
28539
28540 /* In the first loop, it emits the primary .debug_macinfo section
28541 and after each emitted op the macinfo_entry is cleared.
28542 If a longer range of define/undef ops can be optimized using
28543 DW_MACRO_import, the DW_MACRO_import op is emitted and kept in
28544 the vector before the first define/undef in the range and the
28545 whole range of define/undef ops is not emitted and kept. */
28546 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28547 {
28548 switch (ref->code)
28549 {
28550 case DW_MACINFO_start_file:
28551 vec_safe_push (files, *ref);
28552 break;
28553 case DW_MACINFO_end_file:
28554 if (!vec_safe_is_empty (files))
28555 files->pop ();
28556 break;
28557 case DW_MACINFO_define:
28558 case DW_MACINFO_undef:
28559 if ((!dwarf_strict || dwarf_version >= 5)
28560 && HAVE_COMDAT_GROUP
28561 && vec_safe_length (files) != 1
28562 && i > 0
28563 && i + 1 < length
28564 && (*macinfo_table)[i - 1].code == 0)
28565 {
28566 unsigned count = optimize_macinfo_range (i, files, &macinfo_htab);
28567 if (count)
28568 {
28569 i += count - 1;
28570 continue;
28571 }
28572 }
28573 break;
28574 case 0:
28575 /* A dummy entry may be inserted at the beginning to be able
28576 to optimize the whole block of predefined macros. */
28577 if (i == 0)
28578 continue;
28579 default:
28580 break;
28581 }
28582 output_macinfo_op (ref);
28583 ref->info = NULL;
28584 ref->code = 0;
28585 }
28586
28587 if (!macinfo_htab)
28588 return;
28589
28590 /* Save the number of transparent includes so we can adjust the
28591 label number for the fat LTO object DWARF. */
28592 unsigned macinfo_label_base_adj = macinfo_htab->elements ();
28593
28594 delete macinfo_htab;
28595 macinfo_htab = NULL;
28596
28597 /* If any DW_MACRO_import were used, on those DW_MACRO_import entries
28598 terminate the current chain and switch to a new comdat .debug_macinfo
28599 section and emit the define/undef entries within it. */
28600 for (i = 0; macinfo_table->iterate (i, &ref); i++)
28601 switch (ref->code)
28602 {
28603 case 0:
28604 continue;
28605 case DW_MACRO_import:
28606 {
28607 char label[MAX_ARTIFICIAL_LABEL_BYTES];
28608 tree comdat_key = get_identifier (ref->info);
28609 /* Terminate the previous .debug_macinfo section. */
28610 dw2_asm_output_data (1, 0, "End compilation unit");
28611 targetm.asm_out.named_section (debug_macinfo_section_name,
28612 SECTION_DEBUG
28613 | SECTION_LINKONCE
28614 | (early_lto_debug
28615 ? SECTION_EXCLUDE : 0),
28616 comdat_key);
28617 ASM_GENERATE_INTERNAL_LABEL (label,
28618 DEBUG_MACRO_SECTION_LABEL,
28619 ref->lineno + macinfo_label_base);
28620 ASM_OUTPUT_LABEL (asm_out_file, label);
28621 ref->code = 0;
28622 ref->info = NULL;
28623 dw2_asm_output_data (2, dwarf_version >= 5 ? 5 : 4,
28624 "DWARF macro version number");
28625 if (DWARF_OFFSET_SIZE == 8)
28626 dw2_asm_output_data (1, 1, "Flags: 64-bit");
28627 else
28628 dw2_asm_output_data (1, 0, "Flags: 32-bit");
28629 }
28630 break;
28631 case DW_MACINFO_define:
28632 case DW_MACINFO_undef:
28633 output_macinfo_op (ref);
28634 ref->code = 0;
28635 ref->info = NULL;
28636 break;
28637 default:
28638 gcc_unreachable ();
28639 }
28640
28641 macinfo_label_base += macinfo_label_base_adj;
28642 }
28643
28644 /* Initialize the various sections and labels for dwarf output and prefix
28645 them with PREFIX if non-NULL. Returns the generation (zero based
28646 number of times function was called). */
28647
28648 static unsigned
28649 init_sections_and_labels (bool early_lto_debug)
28650 {
28651 /* As we may get called multiple times have a generation count for
28652 labels. */
28653 static unsigned generation = 0;
28654
28655 if (early_lto_debug)
28656 {
28657 if (!dwarf_split_debug_info)
28658 {
28659 debug_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28660 SECTION_DEBUG | SECTION_EXCLUDE,
28661 NULL);
28662 debug_abbrev_section = get_section (DEBUG_LTO_ABBREV_SECTION,
28663 SECTION_DEBUG | SECTION_EXCLUDE,
28664 NULL);
28665 debug_macinfo_section_name
28666 = ((dwarf_strict && dwarf_version < 5)
28667 ? DEBUG_LTO_MACINFO_SECTION : DEBUG_LTO_MACRO_SECTION);
28668 debug_macinfo_section = get_section (debug_macinfo_section_name,
28669 SECTION_DEBUG
28670 | SECTION_EXCLUDE, NULL);
28671 }
28672 else
28673 {
28674 /* ??? Which of the following do we need early? */
28675 debug_info_section = get_section (DEBUG_LTO_DWO_INFO_SECTION,
28676 SECTION_DEBUG | SECTION_EXCLUDE,
28677 NULL);
28678 debug_abbrev_section = get_section (DEBUG_LTO_DWO_ABBREV_SECTION,
28679 SECTION_DEBUG | SECTION_EXCLUDE,
28680 NULL);
28681 debug_skeleton_info_section = get_section (DEBUG_LTO_INFO_SECTION,
28682 SECTION_DEBUG
28683 | SECTION_EXCLUDE, NULL);
28684 debug_skeleton_abbrev_section
28685 = get_section (DEBUG_LTO_ABBREV_SECTION,
28686 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28687 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28688 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28689 generation);
28690
28691 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28692 stay in the main .o, but the skeleton_line goes into the split
28693 off dwo. */
28694 debug_skeleton_line_section
28695 = get_section (DEBUG_LTO_LINE_SECTION,
28696 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28697 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28698 DEBUG_SKELETON_LINE_SECTION_LABEL,
28699 generation);
28700 debug_str_offsets_section
28701 = get_section (DEBUG_LTO_DWO_STR_OFFSETS_SECTION,
28702 SECTION_DEBUG | SECTION_EXCLUDE,
28703 NULL);
28704 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28705 DEBUG_SKELETON_INFO_SECTION_LABEL,
28706 generation);
28707 debug_str_dwo_section = get_section (DEBUG_LTO_STR_DWO_SECTION,
28708 DEBUG_STR_DWO_SECTION_FLAGS,
28709 NULL);
28710 debug_macinfo_section_name
28711 = ((dwarf_strict && dwarf_version < 5)
28712 ? DEBUG_LTO_DWO_MACINFO_SECTION : DEBUG_LTO_DWO_MACRO_SECTION);
28713 debug_macinfo_section = get_section (debug_macinfo_section_name,
28714 SECTION_DEBUG | SECTION_EXCLUDE,
28715 NULL);
28716 }
28717 /* For macro info and the file table we have to refer to a
28718 debug_line section. */
28719 debug_line_section = get_section (DEBUG_LTO_LINE_SECTION,
28720 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28721 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28722 DEBUG_LINE_SECTION_LABEL, generation);
28723
28724 debug_str_section = get_section (DEBUG_LTO_STR_SECTION,
28725 DEBUG_STR_SECTION_FLAGS
28726 | SECTION_EXCLUDE, NULL);
28727 if (!dwarf_split_debug_info)
28728 debug_line_str_section
28729 = get_section (DEBUG_LTO_LINE_STR_SECTION,
28730 DEBUG_STR_SECTION_FLAGS | SECTION_EXCLUDE, NULL);
28731 }
28732 else
28733 {
28734 if (!dwarf_split_debug_info)
28735 {
28736 debug_info_section = get_section (DEBUG_INFO_SECTION,
28737 SECTION_DEBUG, NULL);
28738 debug_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28739 SECTION_DEBUG, NULL);
28740 debug_loc_section = get_section (dwarf_version >= 5
28741 ? DEBUG_LOCLISTS_SECTION
28742 : DEBUG_LOC_SECTION,
28743 SECTION_DEBUG, NULL);
28744 debug_macinfo_section_name
28745 = ((dwarf_strict && dwarf_version < 5)
28746 ? DEBUG_MACINFO_SECTION : DEBUG_MACRO_SECTION);
28747 debug_macinfo_section = get_section (debug_macinfo_section_name,
28748 SECTION_DEBUG, NULL);
28749 }
28750 else
28751 {
28752 debug_info_section = get_section (DEBUG_DWO_INFO_SECTION,
28753 SECTION_DEBUG | SECTION_EXCLUDE,
28754 NULL);
28755 debug_abbrev_section = get_section (DEBUG_DWO_ABBREV_SECTION,
28756 SECTION_DEBUG | SECTION_EXCLUDE,
28757 NULL);
28758 debug_addr_section = get_section (DEBUG_ADDR_SECTION,
28759 SECTION_DEBUG, NULL);
28760 debug_skeleton_info_section = get_section (DEBUG_INFO_SECTION,
28761 SECTION_DEBUG, NULL);
28762 debug_skeleton_abbrev_section = get_section (DEBUG_ABBREV_SECTION,
28763 SECTION_DEBUG, NULL);
28764 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_abbrev_section_label,
28765 DEBUG_SKELETON_ABBREV_SECTION_LABEL,
28766 generation);
28767
28768 /* Somewhat confusing detail: The skeleton_[abbrev|info] sections
28769 stay in the main .o, but the skeleton_line goes into the
28770 split off dwo. */
28771 debug_skeleton_line_section
28772 = get_section (DEBUG_DWO_LINE_SECTION,
28773 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28774 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_line_section_label,
28775 DEBUG_SKELETON_LINE_SECTION_LABEL,
28776 generation);
28777 debug_str_offsets_section
28778 = get_section (DEBUG_DWO_STR_OFFSETS_SECTION,
28779 SECTION_DEBUG | SECTION_EXCLUDE, NULL);
28780 ASM_GENERATE_INTERNAL_LABEL (debug_skeleton_info_section_label,
28781 DEBUG_SKELETON_INFO_SECTION_LABEL,
28782 generation);
28783 debug_loc_section = get_section (dwarf_version >= 5
28784 ? DEBUG_DWO_LOCLISTS_SECTION
28785 : DEBUG_DWO_LOC_SECTION,
28786 SECTION_DEBUG | SECTION_EXCLUDE,
28787 NULL);
28788 debug_str_dwo_section = get_section (DEBUG_STR_DWO_SECTION,
28789 DEBUG_STR_DWO_SECTION_FLAGS,
28790 NULL);
28791 debug_macinfo_section_name
28792 = ((dwarf_strict && dwarf_version < 5)
28793 ? DEBUG_DWO_MACINFO_SECTION : DEBUG_DWO_MACRO_SECTION);
28794 debug_macinfo_section = get_section (debug_macinfo_section_name,
28795 SECTION_DEBUG | SECTION_EXCLUDE,
28796 NULL);
28797 }
28798 debug_aranges_section = get_section (DEBUG_ARANGES_SECTION,
28799 SECTION_DEBUG, NULL);
28800 debug_line_section = get_section (DEBUG_LINE_SECTION,
28801 SECTION_DEBUG, NULL);
28802 debug_pubnames_section = get_section (DEBUG_PUBNAMES_SECTION,
28803 SECTION_DEBUG, NULL);
28804 debug_pubtypes_section = get_section (DEBUG_PUBTYPES_SECTION,
28805 SECTION_DEBUG, NULL);
28806 debug_str_section = get_section (DEBUG_STR_SECTION,
28807 DEBUG_STR_SECTION_FLAGS, NULL);
28808 if (!dwarf_split_debug_info && !output_asm_line_debug_info ())
28809 debug_line_str_section = get_section (DEBUG_LINE_STR_SECTION,
28810 DEBUG_STR_SECTION_FLAGS, NULL);
28811
28812 debug_ranges_section = get_section (dwarf_version >= 5
28813 ? DEBUG_RNGLISTS_SECTION
28814 : DEBUG_RANGES_SECTION,
28815 SECTION_DEBUG, NULL);
28816 debug_frame_section = get_section (DEBUG_FRAME_SECTION,
28817 SECTION_DEBUG, NULL);
28818 }
28819
28820 ASM_GENERATE_INTERNAL_LABEL (abbrev_section_label,
28821 DEBUG_ABBREV_SECTION_LABEL, generation);
28822 ASM_GENERATE_INTERNAL_LABEL (debug_info_section_label,
28823 DEBUG_INFO_SECTION_LABEL, generation);
28824 info_section_emitted = false;
28825 ASM_GENERATE_INTERNAL_LABEL (debug_line_section_label,
28826 DEBUG_LINE_SECTION_LABEL, generation);
28827 /* There are up to 4 unique ranges labels per generation.
28828 See also output_rnglists. */
28829 ASM_GENERATE_INTERNAL_LABEL (ranges_section_label,
28830 DEBUG_RANGES_SECTION_LABEL, generation * 4);
28831 if (dwarf_version >= 5 && dwarf_split_debug_info)
28832 ASM_GENERATE_INTERNAL_LABEL (ranges_base_label,
28833 DEBUG_RANGES_SECTION_LABEL,
28834 1 + generation * 4);
28835 ASM_GENERATE_INTERNAL_LABEL (debug_addr_section_label,
28836 DEBUG_ADDR_SECTION_LABEL, generation);
28837 ASM_GENERATE_INTERNAL_LABEL (macinfo_section_label,
28838 (dwarf_strict && dwarf_version < 5)
28839 ? DEBUG_MACINFO_SECTION_LABEL
28840 : DEBUG_MACRO_SECTION_LABEL, generation);
28841 ASM_GENERATE_INTERNAL_LABEL (loc_section_label, DEBUG_LOC_SECTION_LABEL,
28842 generation);
28843
28844 ++generation;
28845 return generation - 1;
28846 }
28847
28848 /* Set up for Dwarf output at the start of compilation. */
28849
28850 static void
28851 dwarf2out_init (const char *filename ATTRIBUTE_UNUSED)
28852 {
28853 /* Allocate the file_table. */
28854 file_table = hash_table<dwarf_file_hasher>::create_ggc (50);
28855
28856 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28857 /* Allocate the decl_die_table. */
28858 decl_die_table = hash_table<decl_die_hasher>::create_ggc (10);
28859
28860 /* Allocate the decl_loc_table. */
28861 decl_loc_table = hash_table<decl_loc_hasher>::create_ggc (10);
28862
28863 /* Allocate the cached_dw_loc_list_table. */
28864 cached_dw_loc_list_table = hash_table<dw_loc_list_hasher>::create_ggc (10);
28865
28866 /* Allocate the initial hunk of the abbrev_die_table. */
28867 vec_alloc (abbrev_die_table, 256);
28868 /* Zero-th entry is allocated, but unused. */
28869 abbrev_die_table->quick_push (NULL);
28870
28871 /* Allocate the dwarf_proc_stack_usage_map. */
28872 dwarf_proc_stack_usage_map = new hash_map<dw_die_ref, int>;
28873
28874 /* Allocate the pubtypes and pubnames vectors. */
28875 vec_alloc (pubname_table, 32);
28876 vec_alloc (pubtype_table, 32);
28877
28878 vec_alloc (incomplete_types, 64);
28879
28880 vec_alloc (used_rtx_array, 32);
28881
28882 if (debug_info_level >= DINFO_LEVEL_VERBOSE)
28883 vec_alloc (macinfo_table, 64);
28884 #endif
28885
28886 /* If front-ends already registered a main translation unit but we were not
28887 ready to perform the association, do this now. */
28888 if (main_translation_unit != NULL_TREE)
28889 equate_decl_number_to_die (main_translation_unit, comp_unit_die ());
28890 }
28891
28892 /* Called before compile () starts outputtting functions, variables
28893 and toplevel asms into assembly. */
28894
28895 static void
28896 dwarf2out_assembly_start (void)
28897 {
28898 if (text_section_line_info)
28899 return;
28900
28901 #ifndef DWARF2_LINENO_DEBUGGING_INFO
28902 ASM_GENERATE_INTERNAL_LABEL (text_section_label, TEXT_SECTION_LABEL, 0);
28903 ASM_GENERATE_INTERNAL_LABEL (text_end_label, TEXT_END_LABEL, 0);
28904 ASM_GENERATE_INTERNAL_LABEL (cold_text_section_label,
28905 COLD_TEXT_SECTION_LABEL, 0);
28906 ASM_GENERATE_INTERNAL_LABEL (cold_end_label, COLD_END_LABEL, 0);
28907
28908 switch_to_section (text_section);
28909 ASM_OUTPUT_LABEL (asm_out_file, text_section_label);
28910 #endif
28911
28912 /* Make sure the line number table for .text always exists. */
28913 text_section_line_info = new_line_info_table ();
28914 text_section_line_info->end_label = text_end_label;
28915
28916 #ifdef DWARF2_LINENO_DEBUGGING_INFO
28917 cur_line_info_table = text_section_line_info;
28918 #endif
28919
28920 if (HAVE_GAS_CFI_SECTIONS_DIRECTIVE
28921 && dwarf2out_do_cfi_asm ()
28922 && !dwarf2out_do_eh_frame ())
28923 fprintf (asm_out_file, "\t.cfi_sections\t.debug_frame\n");
28924 }
28925
28926 /* A helper function for dwarf2out_finish called through
28927 htab_traverse. Assign a string its index. All strings must be
28928 collected into the table by the time index_string is called,
28929 because the indexing code relies on htab_traverse to traverse nodes
28930 in the same order for each run. */
28931
28932 int
28933 index_string (indirect_string_node **h, unsigned int *index)
28934 {
28935 indirect_string_node *node = *h;
28936
28937 find_string_form (node);
28938 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28939 {
28940 gcc_assert (node->index == NO_INDEX_ASSIGNED);
28941 node->index = *index;
28942 *index += 1;
28943 }
28944 return 1;
28945 }
28946
28947 /* A helper function for output_indirect_strings called through
28948 htab_traverse. Output the offset to a string and update the
28949 current offset. */
28950
28951 int
28952 output_index_string_offset (indirect_string_node **h, unsigned int *offset)
28953 {
28954 indirect_string_node *node = *h;
28955
28956 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28957 {
28958 /* Assert that this node has been assigned an index. */
28959 gcc_assert (node->index != NO_INDEX_ASSIGNED
28960 && node->index != NOT_INDEXED);
28961 dw2_asm_output_data (DWARF_OFFSET_SIZE, *offset,
28962 "indexed string 0x%x: %s", node->index, node->str);
28963 *offset += strlen (node->str) + 1;
28964 }
28965 return 1;
28966 }
28967
28968 /* A helper function for dwarf2out_finish called through
28969 htab_traverse. Output the indexed string. */
28970
28971 int
28972 output_index_string (indirect_string_node **h, unsigned int *cur_idx)
28973 {
28974 struct indirect_string_node *node = *h;
28975
28976 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28977 {
28978 /* Assert that the strings are output in the same order as their
28979 indexes were assigned. */
28980 gcc_assert (*cur_idx == node->index);
28981 assemble_string (node->str, strlen (node->str) + 1);
28982 *cur_idx += 1;
28983 }
28984 return 1;
28985 }
28986
28987 /* A helper function for output_indirect_strings. Counts the number
28988 of index strings offsets. Must match the logic of the functions
28989 output_index_string[_offsets] above. */
28990 int
28991 count_index_strings (indirect_string_node **h, unsigned int *last_idx)
28992 {
28993 struct indirect_string_node *node = *h;
28994
28995 if (node->form == dwarf_FORM (DW_FORM_strx) && node->refcount > 0)
28996 *last_idx += 1;
28997 return 1;
28998 }
28999
29000 /* A helper function for dwarf2out_finish called through
29001 htab_traverse. Emit one queued .debug_str string. */
29002
29003 int
29004 output_indirect_string (indirect_string_node **h, enum dwarf_form form)
29005 {
29006 struct indirect_string_node *node = *h;
29007
29008 node->form = find_string_form (node);
29009 if (node->form == form && node->refcount > 0)
29010 {
29011 ASM_OUTPUT_LABEL (asm_out_file, node->label);
29012 assemble_string (node->str, strlen (node->str) + 1);
29013 }
29014
29015 return 1;
29016 }
29017
29018 /* Output the indexed string table. */
29019
29020 static void
29021 output_indirect_strings (void)
29022 {
29023 switch_to_section (debug_str_section);
29024 if (!dwarf_split_debug_info)
29025 debug_str_hash->traverse<enum dwarf_form,
29026 output_indirect_string> (DW_FORM_strp);
29027 else
29028 {
29029 unsigned int offset = 0;
29030 unsigned int cur_idx = 0;
29031
29032 if (skeleton_debug_str_hash)
29033 skeleton_debug_str_hash->traverse<enum dwarf_form,
29034 output_indirect_string> (DW_FORM_strp);
29035
29036 switch_to_section (debug_str_offsets_section);
29037 /* For DWARF5 the .debug_str_offsets[.dwo] section needs a unit
29038 header. Note that we don't need to generate a label to the
29039 actual index table following the header here, because this is
29040 for the split dwarf case only. In an .dwo file there is only
29041 one string offsets table (and one debug info section). But
29042 if we would start using string offset tables for the main (or
29043 skeleton) unit, then we have to add a DW_AT_str_offsets_base
29044 pointing to the actual index after the header. Split dwarf
29045 units will never have a string offsets base attribute. When
29046 a split unit is moved into a .dwp file the string offsets can
29047 be found through the .debug_cu_index section table. */
29048 if (dwarf_version >= 5)
29049 {
29050 unsigned int last_idx = 0;
29051 unsigned long str_offsets_length;
29052
29053 debug_str_hash->traverse_noresize
29054 <unsigned int *, count_index_strings> (&last_idx);
29055 str_offsets_length = last_idx * DWARF_OFFSET_SIZE + 4;
29056 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
29057 dw2_asm_output_data (4, 0xffffffff,
29058 "Escape value for 64-bit DWARF extension");
29059 dw2_asm_output_data (DWARF_OFFSET_SIZE, str_offsets_length,
29060 "Length of string offsets unit");
29061 dw2_asm_output_data (2, 5, "DWARF string offsets version");
29062 dw2_asm_output_data (2, 0, "Header zero padding");
29063 }
29064 debug_str_hash->traverse_noresize
29065 <unsigned int *, output_index_string_offset> (&offset);
29066 switch_to_section (debug_str_dwo_section);
29067 debug_str_hash->traverse_noresize<unsigned int *, output_index_string>
29068 (&cur_idx);
29069 }
29070 }
29071
29072 /* Callback for htab_traverse to assign an index to an entry in the
29073 table, and to write that entry to the .debug_addr section. */
29074
29075 int
29076 output_addr_table_entry (addr_table_entry **slot, unsigned int *cur_index)
29077 {
29078 addr_table_entry *entry = *slot;
29079
29080 if (entry->refcount == 0)
29081 {
29082 gcc_assert (entry->index == NO_INDEX_ASSIGNED
29083 || entry->index == NOT_INDEXED);
29084 return 1;
29085 }
29086
29087 gcc_assert (entry->index == *cur_index);
29088 (*cur_index)++;
29089
29090 switch (entry->kind)
29091 {
29092 case ate_kind_rtx:
29093 dw2_asm_output_addr_rtx (DWARF2_ADDR_SIZE, entry->addr.rtl,
29094 "0x%x", entry->index);
29095 break;
29096 case ate_kind_rtx_dtprel:
29097 gcc_assert (targetm.asm_out.output_dwarf_dtprel);
29098 targetm.asm_out.output_dwarf_dtprel (asm_out_file,
29099 DWARF2_ADDR_SIZE,
29100 entry->addr.rtl);
29101 fputc ('\n', asm_out_file);
29102 break;
29103 case ate_kind_label:
29104 dw2_asm_output_addr (DWARF2_ADDR_SIZE, entry->addr.label,
29105 "0x%x", entry->index);
29106 break;
29107 default:
29108 gcc_unreachable ();
29109 }
29110 return 1;
29111 }
29112
29113 /* A helper function for dwarf2out_finish. Counts the number
29114 of indexed addresses. Must match the logic of the functions
29115 output_addr_table_entry above. */
29116 int
29117 count_index_addrs (addr_table_entry **slot, unsigned int *last_idx)
29118 {
29119 addr_table_entry *entry = *slot;
29120
29121 if (entry->refcount > 0)
29122 *last_idx += 1;
29123 return 1;
29124 }
29125
29126 /* Produce the .debug_addr section. */
29127
29128 static void
29129 output_addr_table (void)
29130 {
29131 unsigned int index = 0;
29132 if (addr_index_table == NULL || addr_index_table->size () == 0)
29133 return;
29134
29135 switch_to_section (debug_addr_section);
29136 /* GNU DebugFission https://gcc.gnu.org/wiki/DebugFission
29137 which GCC uses to implement -gsplit-dwarf as DWARF GNU extension
29138 before DWARF5, didn't have a header for .debug_addr units.
29139 DWARF5 specifies a small header when address tables are used. */
29140 if (dwarf_version >= 5)
29141 {
29142 unsigned int last_idx = 0;
29143 unsigned long addrs_length;
29144
29145 addr_index_table->traverse_noresize
29146 <unsigned int *, count_index_addrs> (&last_idx);
29147 addrs_length = last_idx * DWARF2_ADDR_SIZE + 4;
29148
29149 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
29150 dw2_asm_output_data (4, 0xffffffff,
29151 "Escape value for 64-bit DWARF extension");
29152 dw2_asm_output_data (DWARF_OFFSET_SIZE, addrs_length,
29153 "Length of Address Unit");
29154 dw2_asm_output_data (2, 5, "DWARF addr version");
29155 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Size of Address");
29156 dw2_asm_output_data (1, 0, "Size of Segment Descriptor");
29157 }
29158 ASM_OUTPUT_LABEL (asm_out_file, debug_addr_section_label);
29159
29160 addr_index_table
29161 ->traverse_noresize<unsigned int *, output_addr_table_entry> (&index);
29162 }
29163
29164 #if ENABLE_ASSERT_CHECKING
29165 /* Verify that all marks are clear. */
29166
29167 static void
29168 verify_marks_clear (dw_die_ref die)
29169 {
29170 dw_die_ref c;
29171
29172 gcc_assert (! die->die_mark);
29173 FOR_EACH_CHILD (die, c, verify_marks_clear (c));
29174 }
29175 #endif /* ENABLE_ASSERT_CHECKING */
29176
29177 /* Clear the marks for a die and its children.
29178 Be cool if the mark isn't set. */
29179
29180 static void
29181 prune_unmark_dies (dw_die_ref die)
29182 {
29183 dw_die_ref c;
29184
29185 if (die->die_mark)
29186 die->die_mark = 0;
29187 FOR_EACH_CHILD (die, c, prune_unmark_dies (c));
29188 }
29189
29190 /* Given LOC that is referenced by a DIE we're marking as used, find all
29191 referenced DWARF procedures it references and mark them as used. */
29192
29193 static void
29194 prune_unused_types_walk_loc_descr (dw_loc_descr_ref loc)
29195 {
29196 for (; loc != NULL; loc = loc->dw_loc_next)
29197 switch (loc->dw_loc_opc)
29198 {
29199 case DW_OP_implicit_pointer:
29200 case DW_OP_convert:
29201 case DW_OP_reinterpret:
29202 case DW_OP_GNU_implicit_pointer:
29203 case DW_OP_GNU_convert:
29204 case DW_OP_GNU_reinterpret:
29205 if (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref)
29206 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
29207 break;
29208 case DW_OP_GNU_variable_value:
29209 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
29210 {
29211 dw_die_ref ref
29212 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
29213 if (ref == NULL)
29214 break;
29215 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
29216 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
29217 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
29218 }
29219 /* FALLTHRU */
29220 case DW_OP_call2:
29221 case DW_OP_call4:
29222 case DW_OP_call_ref:
29223 case DW_OP_const_type:
29224 case DW_OP_GNU_const_type:
29225 case DW_OP_GNU_parameter_ref:
29226 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_die_ref);
29227 prune_unused_types_mark (loc->dw_loc_oprnd1.v.val_die_ref.die, 1);
29228 break;
29229 case DW_OP_regval_type:
29230 case DW_OP_deref_type:
29231 case DW_OP_GNU_regval_type:
29232 case DW_OP_GNU_deref_type:
29233 gcc_assert (loc->dw_loc_oprnd2.val_class == dw_val_class_die_ref);
29234 prune_unused_types_mark (loc->dw_loc_oprnd2.v.val_die_ref.die, 1);
29235 break;
29236 case DW_OP_entry_value:
29237 case DW_OP_GNU_entry_value:
29238 gcc_assert (loc->dw_loc_oprnd1.val_class == dw_val_class_loc);
29239 prune_unused_types_walk_loc_descr (loc->dw_loc_oprnd1.v.val_loc);
29240 break;
29241 default:
29242 break;
29243 }
29244 }
29245
29246 /* Given DIE that we're marking as used, find any other dies
29247 it references as attributes and mark them as used. */
29248
29249 static void
29250 prune_unused_types_walk_attribs (dw_die_ref die)
29251 {
29252 dw_attr_node *a;
29253 unsigned ix;
29254
29255 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29256 {
29257 switch (AT_class (a))
29258 {
29259 /* Make sure DWARF procedures referenced by location descriptions will
29260 get emitted. */
29261 case dw_val_class_loc:
29262 prune_unused_types_walk_loc_descr (AT_loc (a));
29263 break;
29264 case dw_val_class_loc_list:
29265 for (dw_loc_list_ref list = AT_loc_list (a);
29266 list != NULL;
29267 list = list->dw_loc_next)
29268 prune_unused_types_walk_loc_descr (list->expr);
29269 break;
29270
29271 case dw_val_class_view_list:
29272 /* This points to a loc_list in another attribute, so it's
29273 already covered. */
29274 break;
29275
29276 case dw_val_class_die_ref:
29277 /* A reference to another DIE.
29278 Make sure that it will get emitted.
29279 If it was broken out into a comdat group, don't follow it. */
29280 if (! AT_ref (a)->comdat_type_p
29281 || a->dw_attr == DW_AT_specification)
29282 prune_unused_types_mark (a->dw_attr_val.v.val_die_ref.die, 1);
29283 break;
29284
29285 case dw_val_class_str:
29286 /* Set the string's refcount to 0 so that prune_unused_types_mark
29287 accounts properly for it. */
29288 a->dw_attr_val.v.val_str->refcount = 0;
29289 break;
29290
29291 default:
29292 break;
29293 }
29294 }
29295 }
29296
29297 /* Mark the generic parameters and arguments children DIEs of DIE. */
29298
29299 static void
29300 prune_unused_types_mark_generic_parms_dies (dw_die_ref die)
29301 {
29302 dw_die_ref c;
29303
29304 if (die == NULL || die->die_child == NULL)
29305 return;
29306 c = die->die_child;
29307 do
29308 {
29309 if (is_template_parameter (c))
29310 prune_unused_types_mark (c, 1);
29311 c = c->die_sib;
29312 } while (c && c != die->die_child);
29313 }
29314
29315 /* Mark DIE as being used. If DOKIDS is true, then walk down
29316 to DIE's children. */
29317
29318 static void
29319 prune_unused_types_mark (dw_die_ref die, int dokids)
29320 {
29321 dw_die_ref c;
29322
29323 if (die->die_mark == 0)
29324 {
29325 /* We haven't done this node yet. Mark it as used. */
29326 die->die_mark = 1;
29327 /* If this is the DIE of a generic type instantiation,
29328 mark the children DIEs that describe its generic parms and
29329 args. */
29330 prune_unused_types_mark_generic_parms_dies (die);
29331
29332 /* We also have to mark its parents as used.
29333 (But we don't want to mark our parent's kids due to this,
29334 unless it is a class.) */
29335 if (die->die_parent)
29336 prune_unused_types_mark (die->die_parent,
29337 class_scope_p (die->die_parent));
29338
29339 /* Mark any referenced nodes. */
29340 prune_unused_types_walk_attribs (die);
29341
29342 /* If this node is a specification,
29343 also mark the definition, if it exists. */
29344 if (get_AT_flag (die, DW_AT_declaration) && die->die_definition)
29345 prune_unused_types_mark (die->die_definition, 1);
29346 }
29347
29348 if (dokids && die->die_mark != 2)
29349 {
29350 /* We need to walk the children, but haven't done so yet.
29351 Remember that we've walked the kids. */
29352 die->die_mark = 2;
29353
29354 /* If this is an array type, we need to make sure our
29355 kids get marked, even if they're types. If we're
29356 breaking out types into comdat sections, do this
29357 for all type definitions. */
29358 if (die->die_tag == DW_TAG_array_type
29359 || (use_debug_types
29360 && is_type_die (die) && ! is_declaration_die (die)))
29361 FOR_EACH_CHILD (die, c, prune_unused_types_mark (c, 1));
29362 else
29363 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29364 }
29365 }
29366
29367 /* For local classes, look if any static member functions were emitted
29368 and if so, mark them. */
29369
29370 static void
29371 prune_unused_types_walk_local_classes (dw_die_ref die)
29372 {
29373 dw_die_ref c;
29374
29375 if (die->die_mark == 2)
29376 return;
29377
29378 switch (die->die_tag)
29379 {
29380 case DW_TAG_structure_type:
29381 case DW_TAG_union_type:
29382 case DW_TAG_class_type:
29383 case DW_TAG_interface_type:
29384 break;
29385
29386 case DW_TAG_subprogram:
29387 if (!get_AT_flag (die, DW_AT_declaration)
29388 || die->die_definition != NULL)
29389 prune_unused_types_mark (die, 1);
29390 return;
29391
29392 default:
29393 return;
29394 }
29395
29396 /* Mark children. */
29397 FOR_EACH_CHILD (die, c, prune_unused_types_walk_local_classes (c));
29398 }
29399
29400 /* Walk the tree DIE and mark types that we actually use. */
29401
29402 static void
29403 prune_unused_types_walk (dw_die_ref die)
29404 {
29405 dw_die_ref c;
29406
29407 /* Don't do anything if this node is already marked and
29408 children have been marked as well. */
29409 if (die->die_mark == 2)
29410 return;
29411
29412 switch (die->die_tag)
29413 {
29414 case DW_TAG_structure_type:
29415 case DW_TAG_union_type:
29416 case DW_TAG_class_type:
29417 case DW_TAG_interface_type:
29418 if (die->die_perennial_p)
29419 break;
29420
29421 for (c = die->die_parent; c; c = c->die_parent)
29422 if (c->die_tag == DW_TAG_subprogram)
29423 break;
29424
29425 /* Finding used static member functions inside of classes
29426 is needed just for local classes, because for other classes
29427 static member function DIEs with DW_AT_specification
29428 are emitted outside of the DW_TAG_*_type. If we ever change
29429 it, we'd need to call this even for non-local classes. */
29430 if (c)
29431 prune_unused_types_walk_local_classes (die);
29432
29433 /* It's a type node --- don't mark it. */
29434 return;
29435
29436 case DW_TAG_const_type:
29437 case DW_TAG_packed_type:
29438 case DW_TAG_pointer_type:
29439 case DW_TAG_reference_type:
29440 case DW_TAG_rvalue_reference_type:
29441 case DW_TAG_volatile_type:
29442 case DW_TAG_typedef:
29443 case DW_TAG_array_type:
29444 case DW_TAG_friend:
29445 case DW_TAG_enumeration_type:
29446 case DW_TAG_subroutine_type:
29447 case DW_TAG_string_type:
29448 case DW_TAG_set_type:
29449 case DW_TAG_subrange_type:
29450 case DW_TAG_ptr_to_member_type:
29451 case DW_TAG_file_type:
29452 /* Type nodes are useful only when other DIEs reference them --- don't
29453 mark them. */
29454 /* FALLTHROUGH */
29455
29456 case DW_TAG_dwarf_procedure:
29457 /* Likewise for DWARF procedures. */
29458
29459 if (die->die_perennial_p)
29460 break;
29461
29462 return;
29463
29464 case DW_TAG_variable:
29465 if (flag_debug_only_used_symbols)
29466 {
29467 if (die->die_perennial_p)
29468 break;
29469
29470 /* premark_used_variables marks external variables --- don't mark
29471 them here. But function-local externals are always considered
29472 used. */
29473 if (get_AT (die, DW_AT_external))
29474 {
29475 for (c = die->die_parent; c; c = c->die_parent)
29476 if (c->die_tag == DW_TAG_subprogram)
29477 break;
29478 if (!c)
29479 return;
29480 }
29481 }
29482 /* FALLTHROUGH */
29483
29484 default:
29485 /* Mark everything else. */
29486 break;
29487 }
29488
29489 if (die->die_mark == 0)
29490 {
29491 die->die_mark = 1;
29492
29493 /* Now, mark any dies referenced from here. */
29494 prune_unused_types_walk_attribs (die);
29495 }
29496
29497 die->die_mark = 2;
29498
29499 /* Mark children. */
29500 FOR_EACH_CHILD (die, c, prune_unused_types_walk (c));
29501 }
29502
29503 /* Increment the string counts on strings referred to from DIE's
29504 attributes. */
29505
29506 static void
29507 prune_unused_types_update_strings (dw_die_ref die)
29508 {
29509 dw_attr_node *a;
29510 unsigned ix;
29511
29512 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
29513 if (AT_class (a) == dw_val_class_str)
29514 {
29515 struct indirect_string_node *s = a->dw_attr_val.v.val_str;
29516 s->refcount++;
29517 /* Avoid unnecessarily putting strings that are used less than
29518 twice in the hash table. */
29519 if (s->refcount
29520 == ((DEBUG_STR_SECTION_FLAGS & SECTION_MERGE) ? 1 : 2))
29521 {
29522 indirect_string_node **slot
29523 = debug_str_hash->find_slot_with_hash (s->str,
29524 htab_hash_string (s->str),
29525 INSERT);
29526 gcc_assert (*slot == NULL);
29527 *slot = s;
29528 }
29529 }
29530 }
29531
29532 /* Mark DIE and its children as removed. */
29533
29534 static void
29535 mark_removed (dw_die_ref die)
29536 {
29537 dw_die_ref c;
29538 die->removed = true;
29539 FOR_EACH_CHILD (die, c, mark_removed (c));
29540 }
29541
29542 /* Remove from the tree DIE any dies that aren't marked. */
29543
29544 static void
29545 prune_unused_types_prune (dw_die_ref die)
29546 {
29547 dw_die_ref c;
29548
29549 gcc_assert (die->die_mark);
29550 prune_unused_types_update_strings (die);
29551
29552 if (! die->die_child)
29553 return;
29554
29555 c = die->die_child;
29556 do {
29557 dw_die_ref prev = c, next;
29558 for (c = c->die_sib; ! c->die_mark; c = next)
29559 if (c == die->die_child)
29560 {
29561 /* No marked children between 'prev' and the end of the list. */
29562 if (prev == c)
29563 /* No marked children at all. */
29564 die->die_child = NULL;
29565 else
29566 {
29567 prev->die_sib = c->die_sib;
29568 die->die_child = prev;
29569 }
29570 c->die_sib = NULL;
29571 mark_removed (c);
29572 return;
29573 }
29574 else
29575 {
29576 next = c->die_sib;
29577 c->die_sib = NULL;
29578 mark_removed (c);
29579 }
29580
29581 if (c != prev->die_sib)
29582 prev->die_sib = c;
29583 prune_unused_types_prune (c);
29584 } while (c != die->die_child);
29585 }
29586
29587 /* Remove dies representing declarations that we never use. */
29588
29589 static void
29590 prune_unused_types (void)
29591 {
29592 unsigned int i;
29593 limbo_die_node *node;
29594 comdat_type_node *ctnode;
29595 pubname_entry *pub;
29596 dw_die_ref base_type;
29597
29598 #if ENABLE_ASSERT_CHECKING
29599 /* All the marks should already be clear. */
29600 verify_marks_clear (comp_unit_die ());
29601 for (node = limbo_die_list; node; node = node->next)
29602 verify_marks_clear (node->die);
29603 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29604 verify_marks_clear (ctnode->root_die);
29605 #endif /* ENABLE_ASSERT_CHECKING */
29606
29607 /* Mark types that are used in global variables. */
29608 premark_types_used_by_global_vars ();
29609
29610 /* Mark variables used in the symtab. */
29611 if (flag_debug_only_used_symbols)
29612 premark_used_variables ();
29613
29614 /* Set the mark on nodes that are actually used. */
29615 prune_unused_types_walk (comp_unit_die ());
29616 for (node = limbo_die_list; node; node = node->next)
29617 prune_unused_types_walk (node->die);
29618 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29619 {
29620 prune_unused_types_walk (ctnode->root_die);
29621 prune_unused_types_mark (ctnode->type_die, 1);
29622 }
29623
29624 /* Also set the mark on nodes referenced from the pubname_table. Enumerators
29625 are unusual in that they are pubnames that are the children of pubtypes.
29626 They should only be marked via their parent DW_TAG_enumeration_type die,
29627 not as roots in themselves. */
29628 FOR_EACH_VEC_ELT (*pubname_table, i, pub)
29629 if (pub->die->die_tag != DW_TAG_enumerator)
29630 prune_unused_types_mark (pub->die, 1);
29631 for (i = 0; base_types.iterate (i, &base_type); i++)
29632 prune_unused_types_mark (base_type, 1);
29633
29634 /* For -fvar-tracking-assignments, also set the mark on nodes that could be
29635 referenced by DW_TAG_call_site DW_AT_call_origin (i.e. direct call
29636 callees). */
29637 cgraph_node *cnode;
29638 FOR_EACH_FUNCTION (cnode)
29639 if (cnode->referred_to_p (false))
29640 {
29641 dw_die_ref die = lookup_decl_die (cnode->decl);
29642 if (die == NULL || die->die_mark)
29643 continue;
29644 for (cgraph_edge *e = cnode->callers; e; e = e->next_caller)
29645 if (e->caller != cnode
29646 && opt_for_fn (e->caller->decl, flag_var_tracking_assignments))
29647 {
29648 prune_unused_types_mark (die, 1);
29649 break;
29650 }
29651 }
29652
29653 if (debug_str_hash)
29654 debug_str_hash->empty ();
29655 if (skeleton_debug_str_hash)
29656 skeleton_debug_str_hash->empty ();
29657 prune_unused_types_prune (comp_unit_die ());
29658 for (limbo_die_node **pnode = &limbo_die_list; *pnode; )
29659 {
29660 node = *pnode;
29661 if (!node->die->die_mark)
29662 *pnode = node->next;
29663 else
29664 {
29665 prune_unused_types_prune (node->die);
29666 pnode = &node->next;
29667 }
29668 }
29669 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29670 prune_unused_types_prune (ctnode->root_die);
29671
29672 /* Leave the marks clear. */
29673 prune_unmark_dies (comp_unit_die ());
29674 for (node = limbo_die_list; node; node = node->next)
29675 prune_unmark_dies (node->die);
29676 for (ctnode = comdat_type_list; ctnode; ctnode = ctnode->next)
29677 prune_unmark_dies (ctnode->root_die);
29678 }
29679
29680 /* Helpers to manipulate hash table of comdat type units. */
29681
29682 struct comdat_type_hasher : nofree_ptr_hash <comdat_type_node>
29683 {
29684 static inline hashval_t hash (const comdat_type_node *);
29685 static inline bool equal (const comdat_type_node *, const comdat_type_node *);
29686 };
29687
29688 inline hashval_t
29689 comdat_type_hasher::hash (const comdat_type_node *type_node)
29690 {
29691 hashval_t h;
29692 memcpy (&h, type_node->signature, sizeof (h));
29693 return h;
29694 }
29695
29696 inline bool
29697 comdat_type_hasher::equal (const comdat_type_node *type_node_1,
29698 const comdat_type_node *type_node_2)
29699 {
29700 return (! memcmp (type_node_1->signature, type_node_2->signature,
29701 DWARF_TYPE_SIGNATURE_SIZE));
29702 }
29703
29704 /* Move a DW_AT_{,MIPS_}linkage_name attribute just added to dw_die_ref
29705 to the location it would have been added, should we know its
29706 DECL_ASSEMBLER_NAME when we added other attributes. This will
29707 probably improve compactness of debug info, removing equivalent
29708 abbrevs, and hide any differences caused by deferring the
29709 computation of the assembler name, triggered by e.g. PCH. */
29710
29711 static inline void
29712 move_linkage_attr (dw_die_ref die)
29713 {
29714 unsigned ix = vec_safe_length (die->die_attr);
29715 dw_attr_node linkage = (*die->die_attr)[ix - 1];
29716
29717 gcc_assert (linkage.dw_attr == DW_AT_linkage_name
29718 || linkage.dw_attr == DW_AT_MIPS_linkage_name);
29719
29720 while (--ix > 0)
29721 {
29722 dw_attr_node *prev = &(*die->die_attr)[ix - 1];
29723
29724 if (prev->dw_attr == DW_AT_decl_line
29725 || prev->dw_attr == DW_AT_decl_column
29726 || prev->dw_attr == DW_AT_name)
29727 break;
29728 }
29729
29730 if (ix != vec_safe_length (die->die_attr) - 1)
29731 {
29732 die->die_attr->pop ();
29733 die->die_attr->quick_insert (ix, linkage);
29734 }
29735 }
29736
29737 /* Helper function for resolve_addr, mark DW_TAG_base_type nodes
29738 referenced from typed stack ops and count how often they are used. */
29739
29740 static void
29741 mark_base_types (dw_loc_descr_ref loc)
29742 {
29743 dw_die_ref base_type = NULL;
29744
29745 for (; loc; loc = loc->dw_loc_next)
29746 {
29747 switch (loc->dw_loc_opc)
29748 {
29749 case DW_OP_regval_type:
29750 case DW_OP_deref_type:
29751 case DW_OP_GNU_regval_type:
29752 case DW_OP_GNU_deref_type:
29753 base_type = loc->dw_loc_oprnd2.v.val_die_ref.die;
29754 break;
29755 case DW_OP_convert:
29756 case DW_OP_reinterpret:
29757 case DW_OP_GNU_convert:
29758 case DW_OP_GNU_reinterpret:
29759 if (loc->dw_loc_oprnd1.val_class == dw_val_class_unsigned_const)
29760 continue;
29761 /* FALLTHRU */
29762 case DW_OP_const_type:
29763 case DW_OP_GNU_const_type:
29764 base_type = loc->dw_loc_oprnd1.v.val_die_ref.die;
29765 break;
29766 case DW_OP_entry_value:
29767 case DW_OP_GNU_entry_value:
29768 mark_base_types (loc->dw_loc_oprnd1.v.val_loc);
29769 continue;
29770 default:
29771 continue;
29772 }
29773 gcc_assert (base_type->die_parent == comp_unit_die ());
29774 if (base_type->die_mark)
29775 base_type->die_mark++;
29776 else
29777 {
29778 base_types.safe_push (base_type);
29779 base_type->die_mark = 1;
29780 }
29781 }
29782 }
29783
29784 /* Comparison function for sorting marked base types. */
29785
29786 static int
29787 base_type_cmp (const void *x, const void *y)
29788 {
29789 dw_die_ref dx = *(const dw_die_ref *) x;
29790 dw_die_ref dy = *(const dw_die_ref *) y;
29791 unsigned int byte_size1, byte_size2;
29792 unsigned int encoding1, encoding2;
29793 unsigned int align1, align2;
29794 if (dx->die_mark > dy->die_mark)
29795 return -1;
29796 if (dx->die_mark < dy->die_mark)
29797 return 1;
29798 byte_size1 = get_AT_unsigned (dx, DW_AT_byte_size);
29799 byte_size2 = get_AT_unsigned (dy, DW_AT_byte_size);
29800 if (byte_size1 < byte_size2)
29801 return 1;
29802 if (byte_size1 > byte_size2)
29803 return -1;
29804 encoding1 = get_AT_unsigned (dx, DW_AT_encoding);
29805 encoding2 = get_AT_unsigned (dy, DW_AT_encoding);
29806 if (encoding1 < encoding2)
29807 return 1;
29808 if (encoding1 > encoding2)
29809 return -1;
29810 align1 = get_AT_unsigned (dx, DW_AT_alignment);
29811 align2 = get_AT_unsigned (dy, DW_AT_alignment);
29812 if (align1 < align2)
29813 return 1;
29814 if (align1 > align2)
29815 return -1;
29816 return 0;
29817 }
29818
29819 /* Move base types marked by mark_base_types as early as possible
29820 in the CU, sorted by decreasing usage count both to make the
29821 uleb128 references as small as possible and to make sure they
29822 will have die_offset already computed by calc_die_sizes when
29823 sizes of typed stack loc ops is computed. */
29824
29825 static void
29826 move_marked_base_types (void)
29827 {
29828 unsigned int i;
29829 dw_die_ref base_type, die, c;
29830
29831 if (base_types.is_empty ())
29832 return;
29833
29834 /* Sort by decreasing usage count, they will be added again in that
29835 order later on. */
29836 base_types.qsort (base_type_cmp);
29837 die = comp_unit_die ();
29838 c = die->die_child;
29839 do
29840 {
29841 dw_die_ref prev = c;
29842 c = c->die_sib;
29843 while (c->die_mark)
29844 {
29845 remove_child_with_prev (c, prev);
29846 /* As base types got marked, there must be at least
29847 one node other than DW_TAG_base_type. */
29848 gcc_assert (die->die_child != NULL);
29849 c = prev->die_sib;
29850 }
29851 }
29852 while (c != die->die_child);
29853 gcc_assert (die->die_child);
29854 c = die->die_child;
29855 for (i = 0; base_types.iterate (i, &base_type); i++)
29856 {
29857 base_type->die_mark = 0;
29858 base_type->die_sib = c->die_sib;
29859 c->die_sib = base_type;
29860 c = base_type;
29861 }
29862 }
29863
29864 /* Helper function for resolve_addr, attempt to resolve
29865 one CONST_STRING, return true if successful. Similarly verify that
29866 SYMBOL_REFs refer to variables emitted in the current CU. */
29867
29868 static bool
29869 resolve_one_addr (rtx *addr)
29870 {
29871 rtx rtl = *addr;
29872
29873 if (GET_CODE (rtl) == CONST_STRING)
29874 {
29875 size_t len = strlen (XSTR (rtl, 0)) + 1;
29876 tree t = build_string (len, XSTR (rtl, 0));
29877 tree tlen = size_int (len - 1);
29878 TREE_TYPE (t)
29879 = build_array_type (char_type_node, build_index_type (tlen));
29880 rtl = lookup_constant_def (t);
29881 if (!rtl || !MEM_P (rtl))
29882 return false;
29883 rtl = XEXP (rtl, 0);
29884 if (GET_CODE (rtl) == SYMBOL_REF
29885 && SYMBOL_REF_DECL (rtl)
29886 && !TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29887 return false;
29888 vec_safe_push (used_rtx_array, rtl);
29889 *addr = rtl;
29890 return true;
29891 }
29892
29893 if (GET_CODE (rtl) == SYMBOL_REF
29894 && SYMBOL_REF_DECL (rtl))
29895 {
29896 if (TREE_CONSTANT_POOL_ADDRESS_P (rtl))
29897 {
29898 if (!TREE_ASM_WRITTEN (DECL_INITIAL (SYMBOL_REF_DECL (rtl))))
29899 return false;
29900 }
29901 else if (!TREE_ASM_WRITTEN (SYMBOL_REF_DECL (rtl)))
29902 return false;
29903 }
29904
29905 if (GET_CODE (rtl) == CONST)
29906 {
29907 subrtx_ptr_iterator::array_type array;
29908 FOR_EACH_SUBRTX_PTR (iter, array, &XEXP (rtl, 0), ALL)
29909 if (!resolve_one_addr (*iter))
29910 return false;
29911 }
29912
29913 return true;
29914 }
29915
29916 /* For STRING_CST, return SYMBOL_REF of its constant pool entry,
29917 if possible, and create DW_TAG_dwarf_procedure that can be referenced
29918 from DW_OP_implicit_pointer if the string hasn't been seen yet. */
29919
29920 static rtx
29921 string_cst_pool_decl (tree t)
29922 {
29923 rtx rtl = output_constant_def (t, 1);
29924 unsigned char *array;
29925 dw_loc_descr_ref l;
29926 tree decl;
29927 size_t len;
29928 dw_die_ref ref;
29929
29930 if (!rtl || !MEM_P (rtl))
29931 return NULL_RTX;
29932 rtl = XEXP (rtl, 0);
29933 if (GET_CODE (rtl) != SYMBOL_REF
29934 || SYMBOL_REF_DECL (rtl) == NULL_TREE)
29935 return NULL_RTX;
29936
29937 decl = SYMBOL_REF_DECL (rtl);
29938 if (!lookup_decl_die (decl))
29939 {
29940 len = TREE_STRING_LENGTH (t);
29941 vec_safe_push (used_rtx_array, rtl);
29942 ref = new_die (DW_TAG_dwarf_procedure, comp_unit_die (), decl);
29943 array = ggc_vec_alloc<unsigned char> (len);
29944 memcpy (array, TREE_STRING_POINTER (t), len);
29945 l = new_loc_descr (DW_OP_implicit_value, len, 0);
29946 l->dw_loc_oprnd2.val_class = dw_val_class_vec;
29947 l->dw_loc_oprnd2.v.val_vec.length = len;
29948 l->dw_loc_oprnd2.v.val_vec.elt_size = 1;
29949 l->dw_loc_oprnd2.v.val_vec.array = array;
29950 add_AT_loc (ref, DW_AT_location, l);
29951 equate_decl_number_to_die (decl, ref);
29952 }
29953 return rtl;
29954 }
29955
29956 /* Helper function of resolve_addr_in_expr. LOC is
29957 a DW_OP_addr followed by DW_OP_stack_value, either at the start
29958 of exprloc or after DW_OP_{,bit_}piece, and val_addr can't be
29959 resolved. Replace it (both DW_OP_addr and DW_OP_stack_value)
29960 with DW_OP_implicit_pointer if possible
29961 and return true, if unsuccessful, return false. */
29962
29963 static bool
29964 optimize_one_addr_into_implicit_ptr (dw_loc_descr_ref loc)
29965 {
29966 rtx rtl = loc->dw_loc_oprnd1.v.val_addr;
29967 HOST_WIDE_INT offset = 0;
29968 dw_die_ref ref = NULL;
29969 tree decl;
29970
29971 if (GET_CODE (rtl) == CONST
29972 && GET_CODE (XEXP (rtl, 0)) == PLUS
29973 && CONST_INT_P (XEXP (XEXP (rtl, 0), 1)))
29974 {
29975 offset = INTVAL (XEXP (XEXP (rtl, 0), 1));
29976 rtl = XEXP (XEXP (rtl, 0), 0);
29977 }
29978 if (GET_CODE (rtl) == CONST_STRING)
29979 {
29980 size_t len = strlen (XSTR (rtl, 0)) + 1;
29981 tree t = build_string (len, XSTR (rtl, 0));
29982 tree tlen = size_int (len - 1);
29983
29984 TREE_TYPE (t)
29985 = build_array_type (char_type_node, build_index_type (tlen));
29986 rtl = string_cst_pool_decl (t);
29987 if (!rtl)
29988 return false;
29989 }
29990 if (GET_CODE (rtl) == SYMBOL_REF && SYMBOL_REF_DECL (rtl))
29991 {
29992 decl = SYMBOL_REF_DECL (rtl);
29993 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
29994 {
29995 ref = lookup_decl_die (decl);
29996 if (ref && (get_AT (ref, DW_AT_location)
29997 || get_AT (ref, DW_AT_const_value)))
29998 {
29999 loc->dw_loc_opc = dwarf_OP (DW_OP_implicit_pointer);
30000 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30001 loc->dw_loc_oprnd1.val_entry = NULL;
30002 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
30003 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
30004 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
30005 loc->dw_loc_oprnd2.v.val_int = offset;
30006 return true;
30007 }
30008 }
30009 }
30010 return false;
30011 }
30012
30013 /* Helper function for resolve_addr, handle one location
30014 expression, return false if at least one CONST_STRING or SYMBOL_REF in
30015 the location list couldn't be resolved. */
30016
30017 static bool
30018 resolve_addr_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
30019 {
30020 dw_loc_descr_ref keep = NULL;
30021 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = loc->dw_loc_next)
30022 switch (loc->dw_loc_opc)
30023 {
30024 case DW_OP_addr:
30025 if (!resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
30026 {
30027 if ((prev == NULL
30028 || prev->dw_loc_opc == DW_OP_piece
30029 || prev->dw_loc_opc == DW_OP_bit_piece)
30030 && loc->dw_loc_next
30031 && loc->dw_loc_next->dw_loc_opc == DW_OP_stack_value
30032 && (!dwarf_strict || dwarf_version >= 5)
30033 && optimize_one_addr_into_implicit_ptr (loc))
30034 break;
30035 return false;
30036 }
30037 break;
30038 case DW_OP_GNU_addr_index:
30039 case DW_OP_addrx:
30040 case DW_OP_GNU_const_index:
30041 case DW_OP_constx:
30042 if ((loc->dw_loc_opc == DW_OP_GNU_addr_index
30043 || loc->dw_loc_opc == DW_OP_addrx)
30044 || ((loc->dw_loc_opc == DW_OP_GNU_const_index
30045 || loc->dw_loc_opc == DW_OP_constx)
30046 && loc->dtprel))
30047 {
30048 rtx rtl = loc->dw_loc_oprnd1.val_entry->addr.rtl;
30049 if (!resolve_one_addr (&rtl))
30050 return false;
30051 remove_addr_table_entry (loc->dw_loc_oprnd1.val_entry);
30052 loc->dw_loc_oprnd1.val_entry
30053 = add_addr_table_entry (rtl, ate_kind_rtx);
30054 }
30055 break;
30056 case DW_OP_const4u:
30057 case DW_OP_const8u:
30058 if (loc->dtprel
30059 && !resolve_one_addr (&loc->dw_loc_oprnd1.v.val_addr))
30060 return false;
30061 break;
30062 case DW_OP_plus_uconst:
30063 if (size_of_loc_descr (loc)
30064 > size_of_int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned)
30065 + 1
30066 && loc->dw_loc_oprnd1.v.val_unsigned > 0)
30067 {
30068 dw_loc_descr_ref repl
30069 = int_loc_descriptor (loc->dw_loc_oprnd1.v.val_unsigned);
30070 add_loc_descr (&repl, new_loc_descr (DW_OP_plus, 0, 0));
30071 add_loc_descr (&repl, loc->dw_loc_next);
30072 *loc = *repl;
30073 }
30074 break;
30075 case DW_OP_implicit_value:
30076 if (loc->dw_loc_oprnd2.val_class == dw_val_class_addr
30077 && !resolve_one_addr (&loc->dw_loc_oprnd2.v.val_addr))
30078 return false;
30079 break;
30080 case DW_OP_implicit_pointer:
30081 case DW_OP_GNU_implicit_pointer:
30082 case DW_OP_GNU_parameter_ref:
30083 case DW_OP_GNU_variable_value:
30084 if (loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
30085 {
30086 dw_die_ref ref
30087 = lookup_decl_die (loc->dw_loc_oprnd1.v.val_decl_ref);
30088 if (ref == NULL)
30089 return false;
30090 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30091 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
30092 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
30093 }
30094 if (loc->dw_loc_opc == DW_OP_GNU_variable_value)
30095 {
30096 if (prev == NULL
30097 && loc->dw_loc_next == NULL
30098 && AT_class (a) == dw_val_class_loc)
30099 switch (a->dw_attr)
30100 {
30101 /* Following attributes allow both exprloc and reference,
30102 so if the whole expression is DW_OP_GNU_variable_value
30103 alone we could transform it into reference. */
30104 case DW_AT_byte_size:
30105 case DW_AT_bit_size:
30106 case DW_AT_lower_bound:
30107 case DW_AT_upper_bound:
30108 case DW_AT_bit_stride:
30109 case DW_AT_count:
30110 case DW_AT_allocated:
30111 case DW_AT_associated:
30112 case DW_AT_byte_stride:
30113 a->dw_attr_val.val_class = dw_val_class_die_ref;
30114 a->dw_attr_val.val_entry = NULL;
30115 a->dw_attr_val.v.val_die_ref.die
30116 = loc->dw_loc_oprnd1.v.val_die_ref.die;
30117 a->dw_attr_val.v.val_die_ref.external = 0;
30118 return true;
30119 default:
30120 break;
30121 }
30122 if (dwarf_strict)
30123 return false;
30124 }
30125 break;
30126 case DW_OP_const_type:
30127 case DW_OP_regval_type:
30128 case DW_OP_deref_type:
30129 case DW_OP_convert:
30130 case DW_OP_reinterpret:
30131 case DW_OP_GNU_const_type:
30132 case DW_OP_GNU_regval_type:
30133 case DW_OP_GNU_deref_type:
30134 case DW_OP_GNU_convert:
30135 case DW_OP_GNU_reinterpret:
30136 while (loc->dw_loc_next
30137 && (loc->dw_loc_next->dw_loc_opc == DW_OP_convert
30138 || loc->dw_loc_next->dw_loc_opc == DW_OP_GNU_convert))
30139 {
30140 dw_die_ref base1, base2;
30141 unsigned enc1, enc2, size1, size2;
30142 if (loc->dw_loc_opc == DW_OP_regval_type
30143 || loc->dw_loc_opc == DW_OP_deref_type
30144 || loc->dw_loc_opc == DW_OP_GNU_regval_type
30145 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
30146 base1 = loc->dw_loc_oprnd2.v.val_die_ref.die;
30147 else if (loc->dw_loc_oprnd1.val_class
30148 == dw_val_class_unsigned_const)
30149 break;
30150 else
30151 base1 = loc->dw_loc_oprnd1.v.val_die_ref.die;
30152 if (loc->dw_loc_next->dw_loc_oprnd1.val_class
30153 == dw_val_class_unsigned_const)
30154 break;
30155 base2 = loc->dw_loc_next->dw_loc_oprnd1.v.val_die_ref.die;
30156 gcc_assert (base1->die_tag == DW_TAG_base_type
30157 && base2->die_tag == DW_TAG_base_type);
30158 enc1 = get_AT_unsigned (base1, DW_AT_encoding);
30159 enc2 = get_AT_unsigned (base2, DW_AT_encoding);
30160 size1 = get_AT_unsigned (base1, DW_AT_byte_size);
30161 size2 = get_AT_unsigned (base2, DW_AT_byte_size);
30162 if (size1 == size2
30163 && (((enc1 == DW_ATE_unsigned || enc1 == DW_ATE_signed)
30164 && (enc2 == DW_ATE_unsigned || enc2 == DW_ATE_signed)
30165 && loc != keep)
30166 || enc1 == enc2))
30167 {
30168 /* Optimize away next DW_OP_convert after
30169 adjusting LOC's base type die reference. */
30170 if (loc->dw_loc_opc == DW_OP_regval_type
30171 || loc->dw_loc_opc == DW_OP_deref_type
30172 || loc->dw_loc_opc == DW_OP_GNU_regval_type
30173 || loc->dw_loc_opc == DW_OP_GNU_deref_type)
30174 loc->dw_loc_oprnd2.v.val_die_ref.die = base2;
30175 else
30176 loc->dw_loc_oprnd1.v.val_die_ref.die = base2;
30177 loc->dw_loc_next = loc->dw_loc_next->dw_loc_next;
30178 continue;
30179 }
30180 /* Don't change integer DW_OP_convert after e.g. floating
30181 point typed stack entry. */
30182 else if (enc1 != DW_ATE_unsigned && enc1 != DW_ATE_signed)
30183 keep = loc->dw_loc_next;
30184 break;
30185 }
30186 break;
30187 default:
30188 break;
30189 }
30190 return true;
30191 }
30192
30193 /* Helper function of resolve_addr. DIE had DW_AT_location of
30194 DW_OP_addr alone, which referred to DECL in DW_OP_addr's operand
30195 and DW_OP_addr couldn't be resolved. resolve_addr has already
30196 removed the DW_AT_location attribute. This function attempts to
30197 add a new DW_AT_location attribute with DW_OP_implicit_pointer
30198 to it or DW_AT_const_value attribute, if possible. */
30199
30200 static void
30201 optimize_location_into_implicit_ptr (dw_die_ref die, tree decl)
30202 {
30203 if (!VAR_P (decl)
30204 || lookup_decl_die (decl) != die
30205 || DECL_EXTERNAL (decl)
30206 || !TREE_STATIC (decl)
30207 || DECL_INITIAL (decl) == NULL_TREE
30208 || DECL_P (DECL_INITIAL (decl))
30209 || get_AT (die, DW_AT_const_value))
30210 return;
30211
30212 tree init = DECL_INITIAL (decl);
30213 HOST_WIDE_INT offset = 0;
30214 /* For variables that have been optimized away and thus
30215 don't have a memory location, see if we can emit
30216 DW_AT_const_value instead. */
30217 if (tree_add_const_value_attribute (die, init))
30218 return;
30219 if (dwarf_strict && dwarf_version < 5)
30220 return;
30221 /* If init is ADDR_EXPR or POINTER_PLUS_EXPR of ADDR_EXPR,
30222 and ADDR_EXPR refers to a decl that has DW_AT_location or
30223 DW_AT_const_value (but isn't addressable, otherwise
30224 resolving the original DW_OP_addr wouldn't fail), see if
30225 we can add DW_OP_implicit_pointer. */
30226 STRIP_NOPS (init);
30227 if (TREE_CODE (init) == POINTER_PLUS_EXPR
30228 && tree_fits_shwi_p (TREE_OPERAND (init, 1)))
30229 {
30230 offset = tree_to_shwi (TREE_OPERAND (init, 1));
30231 init = TREE_OPERAND (init, 0);
30232 STRIP_NOPS (init);
30233 }
30234 if (TREE_CODE (init) != ADDR_EXPR)
30235 return;
30236 if ((TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST
30237 && !TREE_ASM_WRITTEN (TREE_OPERAND (init, 0)))
30238 || (TREE_CODE (TREE_OPERAND (init, 0)) == VAR_DECL
30239 && !DECL_EXTERNAL (TREE_OPERAND (init, 0))
30240 && TREE_OPERAND (init, 0) != decl))
30241 {
30242 dw_die_ref ref;
30243 dw_loc_descr_ref l;
30244
30245 if (TREE_CODE (TREE_OPERAND (init, 0)) == STRING_CST)
30246 {
30247 rtx rtl = string_cst_pool_decl (TREE_OPERAND (init, 0));
30248 if (!rtl)
30249 return;
30250 decl = SYMBOL_REF_DECL (rtl);
30251 }
30252 else
30253 decl = TREE_OPERAND (init, 0);
30254 ref = lookup_decl_die (decl);
30255 if (ref == NULL
30256 || (!get_AT (ref, DW_AT_location)
30257 && !get_AT (ref, DW_AT_const_value)))
30258 return;
30259 l = new_loc_descr (dwarf_OP (DW_OP_implicit_pointer), 0, offset);
30260 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30261 l->dw_loc_oprnd1.v.val_die_ref.die = ref;
30262 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
30263 add_AT_loc (die, DW_AT_location, l);
30264 }
30265 }
30266
30267 /* Return NULL if l is a DWARF expression, or first op that is not
30268 valid DWARF expression. */
30269
30270 static dw_loc_descr_ref
30271 non_dwarf_expression (dw_loc_descr_ref l)
30272 {
30273 while (l)
30274 {
30275 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
30276 return l;
30277 switch (l->dw_loc_opc)
30278 {
30279 case DW_OP_regx:
30280 case DW_OP_implicit_value:
30281 case DW_OP_stack_value:
30282 case DW_OP_implicit_pointer:
30283 case DW_OP_GNU_implicit_pointer:
30284 case DW_OP_GNU_parameter_ref:
30285 case DW_OP_piece:
30286 case DW_OP_bit_piece:
30287 return l;
30288 default:
30289 break;
30290 }
30291 l = l->dw_loc_next;
30292 }
30293 return NULL;
30294 }
30295
30296 /* Return adjusted copy of EXPR:
30297 If it is empty DWARF expression, return it.
30298 If it is valid non-empty DWARF expression,
30299 return copy of EXPR with DW_OP_deref appended to it.
30300 If it is DWARF expression followed by DW_OP_reg{N,x}, return
30301 copy of the DWARF expression with DW_OP_breg{N,x} <0> appended.
30302 If it is DWARF expression followed by DW_OP_stack_value, return
30303 copy of the DWARF expression without anything appended.
30304 Otherwise, return NULL. */
30305
30306 static dw_loc_descr_ref
30307 copy_deref_exprloc (dw_loc_descr_ref expr)
30308 {
30309 dw_loc_descr_ref tail = NULL;
30310
30311 if (expr == NULL)
30312 return NULL;
30313
30314 dw_loc_descr_ref l = non_dwarf_expression (expr);
30315 if (l && l->dw_loc_next)
30316 return NULL;
30317
30318 if (l)
30319 {
30320 if (l->dw_loc_opc >= DW_OP_reg0 && l->dw_loc_opc <= DW_OP_reg31)
30321 tail = new_loc_descr ((enum dwarf_location_atom)
30322 (DW_OP_breg0 + (l->dw_loc_opc - DW_OP_reg0)),
30323 0, 0);
30324 else
30325 switch (l->dw_loc_opc)
30326 {
30327 case DW_OP_regx:
30328 tail = new_loc_descr (DW_OP_bregx,
30329 l->dw_loc_oprnd1.v.val_unsigned, 0);
30330 break;
30331 case DW_OP_stack_value:
30332 break;
30333 default:
30334 return NULL;
30335 }
30336 }
30337 else
30338 tail = new_loc_descr (DW_OP_deref, 0, 0);
30339
30340 dw_loc_descr_ref ret = NULL, *p = &ret;
30341 while (expr != l)
30342 {
30343 *p = new_loc_descr (expr->dw_loc_opc, 0, 0);
30344 (*p)->dw_loc_oprnd1 = expr->dw_loc_oprnd1;
30345 (*p)->dw_loc_oprnd2 = expr->dw_loc_oprnd2;
30346 p = &(*p)->dw_loc_next;
30347 expr = expr->dw_loc_next;
30348 }
30349 *p = tail;
30350 return ret;
30351 }
30352
30353 /* For DW_AT_string_length attribute with DW_OP_GNU_variable_value
30354 reference to a variable or argument, adjust it if needed and return:
30355 -1 if the DW_AT_string_length attribute and DW_AT_{string_length_,}byte_size
30356 attribute if present should be removed
30357 0 keep the attribute perhaps with minor modifications, no need to rescan
30358 1 if the attribute has been successfully adjusted. */
30359
30360 static int
30361 optimize_string_length (dw_attr_node *a)
30362 {
30363 dw_loc_descr_ref l = AT_loc (a), lv;
30364 dw_die_ref die;
30365 if (l->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
30366 {
30367 tree decl = l->dw_loc_oprnd1.v.val_decl_ref;
30368 die = lookup_decl_die (decl);
30369 if (die)
30370 {
30371 l->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
30372 l->dw_loc_oprnd1.v.val_die_ref.die = die;
30373 l->dw_loc_oprnd1.v.val_die_ref.external = 0;
30374 }
30375 else
30376 return -1;
30377 }
30378 else
30379 die = l->dw_loc_oprnd1.v.val_die_ref.die;
30380
30381 /* DWARF5 allows reference class, so we can then reference the DIE.
30382 Only do this for DW_OP_GNU_variable_value DW_OP_stack_value. */
30383 if (l->dw_loc_next != NULL && dwarf_version >= 5)
30384 {
30385 a->dw_attr_val.val_class = dw_val_class_die_ref;
30386 a->dw_attr_val.val_entry = NULL;
30387 a->dw_attr_val.v.val_die_ref.die = die;
30388 a->dw_attr_val.v.val_die_ref.external = 0;
30389 return 0;
30390 }
30391
30392 dw_attr_node *av = get_AT (die, DW_AT_location);
30393 dw_loc_list_ref d;
30394 bool non_dwarf_expr = false;
30395
30396 if (av == NULL)
30397 return dwarf_strict ? -1 : 0;
30398 switch (AT_class (av))
30399 {
30400 case dw_val_class_loc_list:
30401 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30402 if (d->expr && non_dwarf_expression (d->expr))
30403 non_dwarf_expr = true;
30404 break;
30405 case dw_val_class_view_list:
30406 gcc_unreachable ();
30407 case dw_val_class_loc:
30408 lv = AT_loc (av);
30409 if (lv == NULL)
30410 return dwarf_strict ? -1 : 0;
30411 if (non_dwarf_expression (lv))
30412 non_dwarf_expr = true;
30413 break;
30414 default:
30415 return dwarf_strict ? -1 : 0;
30416 }
30417
30418 /* If it is safe to transform DW_OP_GNU_variable_value DW_OP_stack_value
30419 into DW_OP_call4 or DW_OP_GNU_variable_value into
30420 DW_OP_call4 DW_OP_deref, do so. */
30421 if (!non_dwarf_expr
30422 && (l->dw_loc_next != NULL || AT_class (av) == dw_val_class_loc))
30423 {
30424 l->dw_loc_opc = DW_OP_call4;
30425 if (l->dw_loc_next)
30426 l->dw_loc_next = NULL;
30427 else
30428 l->dw_loc_next = new_loc_descr (DW_OP_deref, 0, 0);
30429 return 0;
30430 }
30431
30432 /* For DW_OP_GNU_variable_value DW_OP_stack_value, we can just
30433 copy over the DW_AT_location attribute from die to a. */
30434 if (l->dw_loc_next != NULL)
30435 {
30436 a->dw_attr_val = av->dw_attr_val;
30437 return 1;
30438 }
30439
30440 dw_loc_list_ref list, *p;
30441 switch (AT_class (av))
30442 {
30443 case dw_val_class_loc_list:
30444 p = &list;
30445 list = NULL;
30446 for (d = AT_loc_list (av); d != NULL; d = d->dw_loc_next)
30447 {
30448 lv = copy_deref_exprloc (d->expr);
30449 if (lv)
30450 {
30451 *p = new_loc_list (lv, d->begin, d->vbegin, d->end, d->vend, d->section);
30452 p = &(*p)->dw_loc_next;
30453 }
30454 else if (!dwarf_strict && d->expr)
30455 return 0;
30456 }
30457 if (list == NULL)
30458 return dwarf_strict ? -1 : 0;
30459 a->dw_attr_val.val_class = dw_val_class_loc_list;
30460 gen_llsym (list);
30461 *AT_loc_list_ptr (a) = list;
30462 return 1;
30463 case dw_val_class_loc:
30464 lv = copy_deref_exprloc (AT_loc (av));
30465 if (lv == NULL)
30466 return dwarf_strict ? -1 : 0;
30467 a->dw_attr_val.v.val_loc = lv;
30468 return 1;
30469 default:
30470 gcc_unreachable ();
30471 }
30472 }
30473
30474 /* Resolve DW_OP_addr and DW_AT_const_value CONST_STRING arguments to
30475 an address in .rodata section if the string literal is emitted there,
30476 or remove the containing location list or replace DW_AT_const_value
30477 with DW_AT_location and empty location expression, if it isn't found
30478 in .rodata. Similarly for SYMBOL_REFs, keep only those that refer
30479 to something that has been emitted in the current CU. */
30480
30481 static void
30482 resolve_addr (dw_die_ref die)
30483 {
30484 dw_die_ref c;
30485 dw_attr_node *a;
30486 dw_loc_list_ref *curr, *start, loc;
30487 unsigned ix;
30488 bool remove_AT_byte_size = false;
30489
30490 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
30491 switch (AT_class (a))
30492 {
30493 case dw_val_class_loc_list:
30494 start = curr = AT_loc_list_ptr (a);
30495 loc = *curr;
30496 gcc_assert (loc);
30497 /* The same list can be referenced more than once. See if we have
30498 already recorded the result from a previous pass. */
30499 if (loc->replaced)
30500 *curr = loc->dw_loc_next;
30501 else if (!loc->resolved_addr)
30502 {
30503 /* As things stand, we do not expect or allow one die to
30504 reference a suffix of another die's location list chain.
30505 References must be identical or completely separate.
30506 There is therefore no need to cache the result of this
30507 pass on any list other than the first; doing so
30508 would lead to unnecessary writes. */
30509 while (*curr)
30510 {
30511 gcc_assert (!(*curr)->replaced && !(*curr)->resolved_addr);
30512 if (!resolve_addr_in_expr (a, (*curr)->expr))
30513 {
30514 dw_loc_list_ref next = (*curr)->dw_loc_next;
30515 dw_loc_descr_ref l = (*curr)->expr;
30516
30517 if (next && (*curr)->ll_symbol)
30518 {
30519 gcc_assert (!next->ll_symbol);
30520 next->ll_symbol = (*curr)->ll_symbol;
30521 next->vl_symbol = (*curr)->vl_symbol;
30522 }
30523 if (dwarf_split_debug_info)
30524 remove_loc_list_addr_table_entries (l);
30525 *curr = next;
30526 }
30527 else
30528 {
30529 mark_base_types ((*curr)->expr);
30530 curr = &(*curr)->dw_loc_next;
30531 }
30532 }
30533 if (loc == *start)
30534 loc->resolved_addr = 1;
30535 else
30536 {
30537 loc->replaced = 1;
30538 loc->dw_loc_next = *start;
30539 }
30540 }
30541 if (!*start)
30542 {
30543 remove_AT (die, a->dw_attr);
30544 ix--;
30545 }
30546 break;
30547 case dw_val_class_view_list:
30548 {
30549 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
30550 gcc_checking_assert (dwarf2out_locviews_in_attribute ());
30551 dw_val_node *llnode
30552 = view_list_to_loc_list_val_node (&a->dw_attr_val);
30553 /* If we no longer have a loclist, or it no longer needs
30554 views, drop this attribute. */
30555 if (!llnode || !llnode->v.val_loc_list->vl_symbol)
30556 {
30557 remove_AT (die, a->dw_attr);
30558 ix--;
30559 }
30560 break;
30561 }
30562 case dw_val_class_loc:
30563 {
30564 dw_loc_descr_ref l = AT_loc (a);
30565 /* DW_OP_GNU_variable_value DW_OP_stack_value or
30566 DW_OP_GNU_variable_value in DW_AT_string_length can be converted
30567 into DW_OP_call4 or DW_OP_call4 DW_OP_deref, which is standard
30568 DWARF4 unlike DW_OP_GNU_variable_value. Or for DWARF5
30569 DW_OP_GNU_variable_value DW_OP_stack_value can be replaced
30570 with DW_FORM_ref referencing the same DIE as
30571 DW_OP_GNU_variable_value used to reference. */
30572 if (a->dw_attr == DW_AT_string_length
30573 && l
30574 && l->dw_loc_opc == DW_OP_GNU_variable_value
30575 && (l->dw_loc_next == NULL
30576 || (l->dw_loc_next->dw_loc_next == NULL
30577 && l->dw_loc_next->dw_loc_opc == DW_OP_stack_value)))
30578 {
30579 switch (optimize_string_length (a))
30580 {
30581 case -1:
30582 remove_AT (die, a->dw_attr);
30583 ix--;
30584 /* If we drop DW_AT_string_length, we need to drop also
30585 DW_AT_{string_length_,}byte_size. */
30586 remove_AT_byte_size = true;
30587 continue;
30588 default:
30589 break;
30590 case 1:
30591 /* Even if we keep the optimized DW_AT_string_length,
30592 it might have changed AT_class, so process it again. */
30593 ix--;
30594 continue;
30595 }
30596 }
30597 /* For -gdwarf-2 don't attempt to optimize
30598 DW_AT_data_member_location containing
30599 DW_OP_plus_uconst - older consumers might
30600 rely on it being that op instead of a more complex,
30601 but shorter, location description. */
30602 if ((dwarf_version > 2
30603 || a->dw_attr != DW_AT_data_member_location
30604 || l == NULL
30605 || l->dw_loc_opc != DW_OP_plus_uconst
30606 || l->dw_loc_next != NULL)
30607 && !resolve_addr_in_expr (a, l))
30608 {
30609 if (dwarf_split_debug_info)
30610 remove_loc_list_addr_table_entries (l);
30611 if (l != NULL
30612 && l->dw_loc_next == NULL
30613 && l->dw_loc_opc == DW_OP_addr
30614 && GET_CODE (l->dw_loc_oprnd1.v.val_addr) == SYMBOL_REF
30615 && SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr)
30616 && a->dw_attr == DW_AT_location)
30617 {
30618 tree decl = SYMBOL_REF_DECL (l->dw_loc_oprnd1.v.val_addr);
30619 remove_AT (die, a->dw_attr);
30620 ix--;
30621 optimize_location_into_implicit_ptr (die, decl);
30622 break;
30623 }
30624 if (a->dw_attr == DW_AT_string_length)
30625 /* If we drop DW_AT_string_length, we need to drop also
30626 DW_AT_{string_length_,}byte_size. */
30627 remove_AT_byte_size = true;
30628 remove_AT (die, a->dw_attr);
30629 ix--;
30630 }
30631 else
30632 mark_base_types (l);
30633 }
30634 break;
30635 case dw_val_class_addr:
30636 if (a->dw_attr == DW_AT_const_value
30637 && !resolve_one_addr (&a->dw_attr_val.v.val_addr))
30638 {
30639 if (AT_index (a) != NOT_INDEXED)
30640 remove_addr_table_entry (a->dw_attr_val.val_entry);
30641 remove_AT (die, a->dw_attr);
30642 ix--;
30643 }
30644 if ((die->die_tag == DW_TAG_call_site
30645 && a->dw_attr == DW_AT_call_origin)
30646 || (die->die_tag == DW_TAG_GNU_call_site
30647 && a->dw_attr == DW_AT_abstract_origin))
30648 {
30649 tree tdecl = SYMBOL_REF_DECL (a->dw_attr_val.v.val_addr);
30650 dw_die_ref tdie = lookup_decl_die (tdecl);
30651 dw_die_ref cdie;
30652 if (tdie == NULL
30653 && DECL_EXTERNAL (tdecl)
30654 && DECL_ABSTRACT_ORIGIN (tdecl) == NULL_TREE
30655 && (cdie = lookup_context_die (DECL_CONTEXT (tdecl))))
30656 {
30657 dw_die_ref pdie = cdie;
30658 /* Make sure we don't add these DIEs into type units.
30659 We could emit skeleton DIEs for context (namespaces,
30660 outer structs/classes) and a skeleton DIE for the
30661 innermost context with DW_AT_signature pointing to the
30662 type unit. See PR78835. */
30663 while (pdie && pdie->die_tag != DW_TAG_type_unit)
30664 pdie = pdie->die_parent;
30665 if (pdie == NULL)
30666 {
30667 /* Creating a full DIE for tdecl is overly expensive and
30668 at this point even wrong when in the LTO phase
30669 as it can end up generating new type DIEs we didn't
30670 output and thus optimize_external_refs will crash. */
30671 tdie = new_die (DW_TAG_subprogram, cdie, NULL_TREE);
30672 add_AT_flag (tdie, DW_AT_external, 1);
30673 add_AT_flag (tdie, DW_AT_declaration, 1);
30674 add_linkage_attr (tdie, tdecl);
30675 add_name_and_src_coords_attributes (tdie, tdecl, true);
30676 equate_decl_number_to_die (tdecl, tdie);
30677 }
30678 }
30679 if (tdie)
30680 {
30681 a->dw_attr_val.val_class = dw_val_class_die_ref;
30682 a->dw_attr_val.v.val_die_ref.die = tdie;
30683 a->dw_attr_val.v.val_die_ref.external = 0;
30684 }
30685 else
30686 {
30687 if (AT_index (a) != NOT_INDEXED)
30688 remove_addr_table_entry (a->dw_attr_val.val_entry);
30689 remove_AT (die, a->dw_attr);
30690 ix--;
30691 }
30692 }
30693 break;
30694 default:
30695 break;
30696 }
30697
30698 if (remove_AT_byte_size)
30699 remove_AT (die, dwarf_version >= 5
30700 ? DW_AT_string_length_byte_size
30701 : DW_AT_byte_size);
30702
30703 FOR_EACH_CHILD (die, c, resolve_addr (c));
30704 }
30705 \f
30706 /* Helper routines for optimize_location_lists.
30707 This pass tries to share identical local lists in .debug_loc
30708 section. */
30709
30710 /* Iteratively hash operands of LOC opcode into HSTATE. */
30711
30712 static void
30713 hash_loc_operands (dw_loc_descr_ref loc, inchash::hash &hstate)
30714 {
30715 dw_val_ref val1 = &loc->dw_loc_oprnd1;
30716 dw_val_ref val2 = &loc->dw_loc_oprnd2;
30717
30718 switch (loc->dw_loc_opc)
30719 {
30720 case DW_OP_const4u:
30721 case DW_OP_const8u:
30722 if (loc->dtprel)
30723 goto hash_addr;
30724 /* FALLTHRU */
30725 case DW_OP_const1u:
30726 case DW_OP_const1s:
30727 case DW_OP_const2u:
30728 case DW_OP_const2s:
30729 case DW_OP_const4s:
30730 case DW_OP_const8s:
30731 case DW_OP_constu:
30732 case DW_OP_consts:
30733 case DW_OP_pick:
30734 case DW_OP_plus_uconst:
30735 case DW_OP_breg0:
30736 case DW_OP_breg1:
30737 case DW_OP_breg2:
30738 case DW_OP_breg3:
30739 case DW_OP_breg4:
30740 case DW_OP_breg5:
30741 case DW_OP_breg6:
30742 case DW_OP_breg7:
30743 case DW_OP_breg8:
30744 case DW_OP_breg9:
30745 case DW_OP_breg10:
30746 case DW_OP_breg11:
30747 case DW_OP_breg12:
30748 case DW_OP_breg13:
30749 case DW_OP_breg14:
30750 case DW_OP_breg15:
30751 case DW_OP_breg16:
30752 case DW_OP_breg17:
30753 case DW_OP_breg18:
30754 case DW_OP_breg19:
30755 case DW_OP_breg20:
30756 case DW_OP_breg21:
30757 case DW_OP_breg22:
30758 case DW_OP_breg23:
30759 case DW_OP_breg24:
30760 case DW_OP_breg25:
30761 case DW_OP_breg26:
30762 case DW_OP_breg27:
30763 case DW_OP_breg28:
30764 case DW_OP_breg29:
30765 case DW_OP_breg30:
30766 case DW_OP_breg31:
30767 case DW_OP_regx:
30768 case DW_OP_fbreg:
30769 case DW_OP_piece:
30770 case DW_OP_deref_size:
30771 case DW_OP_xderef_size:
30772 hstate.add_object (val1->v.val_int);
30773 break;
30774 case DW_OP_skip:
30775 case DW_OP_bra:
30776 {
30777 int offset;
30778
30779 gcc_assert (val1->val_class == dw_val_class_loc);
30780 offset = val1->v.val_loc->dw_loc_addr - (loc->dw_loc_addr + 3);
30781 hstate.add_object (offset);
30782 }
30783 break;
30784 case DW_OP_implicit_value:
30785 hstate.add_object (val1->v.val_unsigned);
30786 switch (val2->val_class)
30787 {
30788 case dw_val_class_const:
30789 hstate.add_object (val2->v.val_int);
30790 break;
30791 case dw_val_class_vec:
30792 {
30793 unsigned int elt_size = val2->v.val_vec.elt_size;
30794 unsigned int len = val2->v.val_vec.length;
30795
30796 hstate.add_int (elt_size);
30797 hstate.add_int (len);
30798 hstate.add (val2->v.val_vec.array, len * elt_size);
30799 }
30800 break;
30801 case dw_val_class_const_double:
30802 hstate.add_object (val2->v.val_double.low);
30803 hstate.add_object (val2->v.val_double.high);
30804 break;
30805 case dw_val_class_wide_int:
30806 hstate.add (val2->v.val_wide->get_val (),
30807 get_full_len (*val2->v.val_wide)
30808 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30809 break;
30810 case dw_val_class_addr:
30811 inchash::add_rtx (val2->v.val_addr, hstate);
30812 break;
30813 default:
30814 gcc_unreachable ();
30815 }
30816 break;
30817 case DW_OP_bregx:
30818 case DW_OP_bit_piece:
30819 hstate.add_object (val1->v.val_int);
30820 hstate.add_object (val2->v.val_int);
30821 break;
30822 case DW_OP_addr:
30823 hash_addr:
30824 if (loc->dtprel)
30825 {
30826 unsigned char dtprel = 0xd1;
30827 hstate.add_object (dtprel);
30828 }
30829 inchash::add_rtx (val1->v.val_addr, hstate);
30830 break;
30831 case DW_OP_GNU_addr_index:
30832 case DW_OP_addrx:
30833 case DW_OP_GNU_const_index:
30834 case DW_OP_constx:
30835 {
30836 if (loc->dtprel)
30837 {
30838 unsigned char dtprel = 0xd1;
30839 hstate.add_object (dtprel);
30840 }
30841 inchash::add_rtx (val1->val_entry->addr.rtl, hstate);
30842 }
30843 break;
30844 case DW_OP_implicit_pointer:
30845 case DW_OP_GNU_implicit_pointer:
30846 hstate.add_int (val2->v.val_int);
30847 break;
30848 case DW_OP_entry_value:
30849 case DW_OP_GNU_entry_value:
30850 hstate.add_object (val1->v.val_loc);
30851 break;
30852 case DW_OP_regval_type:
30853 case DW_OP_deref_type:
30854 case DW_OP_GNU_regval_type:
30855 case DW_OP_GNU_deref_type:
30856 {
30857 unsigned int byte_size
30858 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_byte_size);
30859 unsigned int encoding
30860 = get_AT_unsigned (val2->v.val_die_ref.die, DW_AT_encoding);
30861 hstate.add_object (val1->v.val_int);
30862 hstate.add_object (byte_size);
30863 hstate.add_object (encoding);
30864 }
30865 break;
30866 case DW_OP_convert:
30867 case DW_OP_reinterpret:
30868 case DW_OP_GNU_convert:
30869 case DW_OP_GNU_reinterpret:
30870 if (val1->val_class == dw_val_class_unsigned_const)
30871 {
30872 hstate.add_object (val1->v.val_unsigned);
30873 break;
30874 }
30875 /* FALLTHRU */
30876 case DW_OP_const_type:
30877 case DW_OP_GNU_const_type:
30878 {
30879 unsigned int byte_size
30880 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_byte_size);
30881 unsigned int encoding
30882 = get_AT_unsigned (val1->v.val_die_ref.die, DW_AT_encoding);
30883 hstate.add_object (byte_size);
30884 hstate.add_object (encoding);
30885 if (loc->dw_loc_opc != DW_OP_const_type
30886 && loc->dw_loc_opc != DW_OP_GNU_const_type)
30887 break;
30888 hstate.add_object (val2->val_class);
30889 switch (val2->val_class)
30890 {
30891 case dw_val_class_const:
30892 hstate.add_object (val2->v.val_int);
30893 break;
30894 case dw_val_class_vec:
30895 {
30896 unsigned int elt_size = val2->v.val_vec.elt_size;
30897 unsigned int len = val2->v.val_vec.length;
30898
30899 hstate.add_object (elt_size);
30900 hstate.add_object (len);
30901 hstate.add (val2->v.val_vec.array, len * elt_size);
30902 }
30903 break;
30904 case dw_val_class_const_double:
30905 hstate.add_object (val2->v.val_double.low);
30906 hstate.add_object (val2->v.val_double.high);
30907 break;
30908 case dw_val_class_wide_int:
30909 hstate.add (val2->v.val_wide->get_val (),
30910 get_full_len (*val2->v.val_wide)
30911 * HOST_BITS_PER_WIDE_INT / HOST_BITS_PER_CHAR);
30912 break;
30913 default:
30914 gcc_unreachable ();
30915 }
30916 }
30917 break;
30918
30919 default:
30920 /* Other codes have no operands. */
30921 break;
30922 }
30923 }
30924
30925 /* Iteratively hash the whole DWARF location expression LOC into HSTATE. */
30926
30927 static inline void
30928 hash_locs (dw_loc_descr_ref loc, inchash::hash &hstate)
30929 {
30930 dw_loc_descr_ref l;
30931 bool sizes_computed = false;
30932 /* Compute sizes, so that DW_OP_skip/DW_OP_bra can be checksummed. */
30933 size_of_locs (loc);
30934
30935 for (l = loc; l != NULL; l = l->dw_loc_next)
30936 {
30937 enum dwarf_location_atom opc = l->dw_loc_opc;
30938 hstate.add_object (opc);
30939 if ((opc == DW_OP_skip || opc == DW_OP_bra) && !sizes_computed)
30940 {
30941 size_of_locs (loc);
30942 sizes_computed = true;
30943 }
30944 hash_loc_operands (l, hstate);
30945 }
30946 }
30947
30948 /* Compute hash of the whole location list LIST_HEAD. */
30949
30950 static inline void
30951 hash_loc_list (dw_loc_list_ref list_head)
30952 {
30953 dw_loc_list_ref curr = list_head;
30954 inchash::hash hstate;
30955
30956 for (curr = list_head; curr != NULL; curr = curr->dw_loc_next)
30957 {
30958 hstate.add (curr->begin, strlen (curr->begin) + 1);
30959 hstate.add (curr->end, strlen (curr->end) + 1);
30960 hstate.add_object (curr->vbegin);
30961 hstate.add_object (curr->vend);
30962 if (curr->section)
30963 hstate.add (curr->section, strlen (curr->section) + 1);
30964 hash_locs (curr->expr, hstate);
30965 }
30966 list_head->hash = hstate.end ();
30967 }
30968
30969 /* Return true if X and Y opcodes have the same operands. */
30970
30971 static inline bool
30972 compare_loc_operands (dw_loc_descr_ref x, dw_loc_descr_ref y)
30973 {
30974 dw_val_ref valx1 = &x->dw_loc_oprnd1;
30975 dw_val_ref valx2 = &x->dw_loc_oprnd2;
30976 dw_val_ref valy1 = &y->dw_loc_oprnd1;
30977 dw_val_ref valy2 = &y->dw_loc_oprnd2;
30978
30979 switch (x->dw_loc_opc)
30980 {
30981 case DW_OP_const4u:
30982 case DW_OP_const8u:
30983 if (x->dtprel)
30984 goto hash_addr;
30985 /* FALLTHRU */
30986 case DW_OP_const1u:
30987 case DW_OP_const1s:
30988 case DW_OP_const2u:
30989 case DW_OP_const2s:
30990 case DW_OP_const4s:
30991 case DW_OP_const8s:
30992 case DW_OP_constu:
30993 case DW_OP_consts:
30994 case DW_OP_pick:
30995 case DW_OP_plus_uconst:
30996 case DW_OP_breg0:
30997 case DW_OP_breg1:
30998 case DW_OP_breg2:
30999 case DW_OP_breg3:
31000 case DW_OP_breg4:
31001 case DW_OP_breg5:
31002 case DW_OP_breg6:
31003 case DW_OP_breg7:
31004 case DW_OP_breg8:
31005 case DW_OP_breg9:
31006 case DW_OP_breg10:
31007 case DW_OP_breg11:
31008 case DW_OP_breg12:
31009 case DW_OP_breg13:
31010 case DW_OP_breg14:
31011 case DW_OP_breg15:
31012 case DW_OP_breg16:
31013 case DW_OP_breg17:
31014 case DW_OP_breg18:
31015 case DW_OP_breg19:
31016 case DW_OP_breg20:
31017 case DW_OP_breg21:
31018 case DW_OP_breg22:
31019 case DW_OP_breg23:
31020 case DW_OP_breg24:
31021 case DW_OP_breg25:
31022 case DW_OP_breg26:
31023 case DW_OP_breg27:
31024 case DW_OP_breg28:
31025 case DW_OP_breg29:
31026 case DW_OP_breg30:
31027 case DW_OP_breg31:
31028 case DW_OP_regx:
31029 case DW_OP_fbreg:
31030 case DW_OP_piece:
31031 case DW_OP_deref_size:
31032 case DW_OP_xderef_size:
31033 return valx1->v.val_int == valy1->v.val_int;
31034 case DW_OP_skip:
31035 case DW_OP_bra:
31036 /* If splitting debug info, the use of DW_OP_GNU_addr_index
31037 can cause irrelevant differences in dw_loc_addr. */
31038 gcc_assert (valx1->val_class == dw_val_class_loc
31039 && valy1->val_class == dw_val_class_loc
31040 && (dwarf_split_debug_info
31041 || x->dw_loc_addr == y->dw_loc_addr));
31042 return valx1->v.val_loc->dw_loc_addr == valy1->v.val_loc->dw_loc_addr;
31043 case DW_OP_implicit_value:
31044 if (valx1->v.val_unsigned != valy1->v.val_unsigned
31045 || valx2->val_class != valy2->val_class)
31046 return false;
31047 switch (valx2->val_class)
31048 {
31049 case dw_val_class_const:
31050 return valx2->v.val_int == valy2->v.val_int;
31051 case dw_val_class_vec:
31052 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
31053 && valx2->v.val_vec.length == valy2->v.val_vec.length
31054 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
31055 valx2->v.val_vec.elt_size
31056 * valx2->v.val_vec.length) == 0;
31057 case dw_val_class_const_double:
31058 return valx2->v.val_double.low == valy2->v.val_double.low
31059 && valx2->v.val_double.high == valy2->v.val_double.high;
31060 case dw_val_class_wide_int:
31061 return *valx2->v.val_wide == *valy2->v.val_wide;
31062 case dw_val_class_addr:
31063 return rtx_equal_p (valx2->v.val_addr, valy2->v.val_addr);
31064 default:
31065 gcc_unreachable ();
31066 }
31067 case DW_OP_bregx:
31068 case DW_OP_bit_piece:
31069 return valx1->v.val_int == valy1->v.val_int
31070 && valx2->v.val_int == valy2->v.val_int;
31071 case DW_OP_addr:
31072 hash_addr:
31073 return rtx_equal_p (valx1->v.val_addr, valy1->v.val_addr);
31074 case DW_OP_GNU_addr_index:
31075 case DW_OP_addrx:
31076 case DW_OP_GNU_const_index:
31077 case DW_OP_constx:
31078 {
31079 rtx ax1 = valx1->val_entry->addr.rtl;
31080 rtx ay1 = valy1->val_entry->addr.rtl;
31081 return rtx_equal_p (ax1, ay1);
31082 }
31083 case DW_OP_implicit_pointer:
31084 case DW_OP_GNU_implicit_pointer:
31085 return valx1->val_class == dw_val_class_die_ref
31086 && valx1->val_class == valy1->val_class
31087 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die
31088 && valx2->v.val_int == valy2->v.val_int;
31089 case DW_OP_entry_value:
31090 case DW_OP_GNU_entry_value:
31091 return compare_loc_operands (valx1->v.val_loc, valy1->v.val_loc);
31092 case DW_OP_const_type:
31093 case DW_OP_GNU_const_type:
31094 if (valx1->v.val_die_ref.die != valy1->v.val_die_ref.die
31095 || valx2->val_class != valy2->val_class)
31096 return false;
31097 switch (valx2->val_class)
31098 {
31099 case dw_val_class_const:
31100 return valx2->v.val_int == valy2->v.val_int;
31101 case dw_val_class_vec:
31102 return valx2->v.val_vec.elt_size == valy2->v.val_vec.elt_size
31103 && valx2->v.val_vec.length == valy2->v.val_vec.length
31104 && memcmp (valx2->v.val_vec.array, valy2->v.val_vec.array,
31105 valx2->v.val_vec.elt_size
31106 * valx2->v.val_vec.length) == 0;
31107 case dw_val_class_const_double:
31108 return valx2->v.val_double.low == valy2->v.val_double.low
31109 && valx2->v.val_double.high == valy2->v.val_double.high;
31110 case dw_val_class_wide_int:
31111 return *valx2->v.val_wide == *valy2->v.val_wide;
31112 default:
31113 gcc_unreachable ();
31114 }
31115 case DW_OP_regval_type:
31116 case DW_OP_deref_type:
31117 case DW_OP_GNU_regval_type:
31118 case DW_OP_GNU_deref_type:
31119 return valx1->v.val_int == valy1->v.val_int
31120 && valx2->v.val_die_ref.die == valy2->v.val_die_ref.die;
31121 case DW_OP_convert:
31122 case DW_OP_reinterpret:
31123 case DW_OP_GNU_convert:
31124 case DW_OP_GNU_reinterpret:
31125 if (valx1->val_class != valy1->val_class)
31126 return false;
31127 if (valx1->val_class == dw_val_class_unsigned_const)
31128 return valx1->v.val_unsigned == valy1->v.val_unsigned;
31129 return valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
31130 case DW_OP_GNU_parameter_ref:
31131 return valx1->val_class == dw_val_class_die_ref
31132 && valx1->val_class == valy1->val_class
31133 && valx1->v.val_die_ref.die == valy1->v.val_die_ref.die;
31134 default:
31135 /* Other codes have no operands. */
31136 return true;
31137 }
31138 }
31139
31140 /* Return true if DWARF location expressions X and Y are the same. */
31141
31142 static inline bool
31143 compare_locs (dw_loc_descr_ref x, dw_loc_descr_ref y)
31144 {
31145 for (; x != NULL && y != NULL; x = x->dw_loc_next, y = y->dw_loc_next)
31146 if (x->dw_loc_opc != y->dw_loc_opc
31147 || x->dtprel != y->dtprel
31148 || !compare_loc_operands (x, y))
31149 break;
31150 return x == NULL && y == NULL;
31151 }
31152
31153 /* Hashtable helpers. */
31154
31155 struct loc_list_hasher : nofree_ptr_hash <dw_loc_list_struct>
31156 {
31157 static inline hashval_t hash (const dw_loc_list_struct *);
31158 static inline bool equal (const dw_loc_list_struct *,
31159 const dw_loc_list_struct *);
31160 };
31161
31162 /* Return precomputed hash of location list X. */
31163
31164 inline hashval_t
31165 loc_list_hasher::hash (const dw_loc_list_struct *x)
31166 {
31167 return x->hash;
31168 }
31169
31170 /* Return true if location lists A and B are the same. */
31171
31172 inline bool
31173 loc_list_hasher::equal (const dw_loc_list_struct *a,
31174 const dw_loc_list_struct *b)
31175 {
31176 if (a == b)
31177 return 1;
31178 if (a->hash != b->hash)
31179 return 0;
31180 for (; a != NULL && b != NULL; a = a->dw_loc_next, b = b->dw_loc_next)
31181 if (strcmp (a->begin, b->begin) != 0
31182 || strcmp (a->end, b->end) != 0
31183 || (a->section == NULL) != (b->section == NULL)
31184 || (a->section && strcmp (a->section, b->section) != 0)
31185 || a->vbegin != b->vbegin || a->vend != b->vend
31186 || !compare_locs (a->expr, b->expr))
31187 break;
31188 return a == NULL && b == NULL;
31189 }
31190
31191 typedef hash_table<loc_list_hasher> loc_list_hash_type;
31192
31193
31194 /* Recursively optimize location lists referenced from DIE
31195 children and share them whenever possible. */
31196
31197 static void
31198 optimize_location_lists_1 (dw_die_ref die, loc_list_hash_type *htab)
31199 {
31200 dw_die_ref c;
31201 dw_attr_node *a;
31202 unsigned ix;
31203 dw_loc_list_struct **slot;
31204 bool drop_locviews = false;
31205 bool has_locviews = false;
31206
31207 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31208 if (AT_class (a) == dw_val_class_loc_list)
31209 {
31210 dw_loc_list_ref list = AT_loc_list (a);
31211 /* TODO: perform some optimizations here, before hashing
31212 it and storing into the hash table. */
31213 hash_loc_list (list);
31214 slot = htab->find_slot_with_hash (list, list->hash, INSERT);
31215 if (*slot == NULL)
31216 {
31217 *slot = list;
31218 if (loc_list_has_views (list))
31219 gcc_assert (list->vl_symbol);
31220 else if (list->vl_symbol)
31221 {
31222 drop_locviews = true;
31223 list->vl_symbol = NULL;
31224 }
31225 }
31226 else
31227 {
31228 if (list->vl_symbol && !(*slot)->vl_symbol)
31229 drop_locviews = true;
31230 a->dw_attr_val.v.val_loc_list = *slot;
31231 }
31232 }
31233 else if (AT_class (a) == dw_val_class_view_list)
31234 {
31235 gcc_checking_assert (a->dw_attr == DW_AT_GNU_locviews);
31236 has_locviews = true;
31237 }
31238
31239
31240 if (drop_locviews && has_locviews)
31241 remove_AT (die, DW_AT_GNU_locviews);
31242
31243 FOR_EACH_CHILD (die, c, optimize_location_lists_1 (c, htab));
31244 }
31245
31246
31247 /* Recursively assign each location list a unique index into the debug_addr
31248 section. */
31249
31250 static void
31251 index_location_lists (dw_die_ref die)
31252 {
31253 dw_die_ref c;
31254 dw_attr_node *a;
31255 unsigned ix;
31256
31257 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31258 if (AT_class (a) == dw_val_class_loc_list)
31259 {
31260 dw_loc_list_ref list = AT_loc_list (a);
31261 dw_loc_list_ref curr;
31262 for (curr = list; curr != NULL; curr = curr->dw_loc_next)
31263 {
31264 /* Don't index an entry that has already been indexed
31265 or won't be output. Make sure skip_loc_list_entry doesn't
31266 call size_of_locs, because that might cause circular dependency,
31267 index_location_lists requiring address table indexes to be
31268 computed, but adding new indexes through add_addr_table_entry
31269 and address table index computation requiring no new additions
31270 to the hash table. In the rare case of DWARF[234] >= 64KB
31271 location expression, we'll just waste unused address table entry
31272 for it. */
31273 if (curr->begin_entry != NULL
31274 || skip_loc_list_entry (curr))
31275 continue;
31276
31277 curr->begin_entry
31278 = add_addr_table_entry (xstrdup (curr->begin), ate_kind_label);
31279 }
31280 }
31281
31282 FOR_EACH_CHILD (die, c, index_location_lists (c));
31283 }
31284
31285 /* Optimize location lists referenced from DIE
31286 children and share them whenever possible. */
31287
31288 static void
31289 optimize_location_lists (dw_die_ref die)
31290 {
31291 loc_list_hash_type htab (500);
31292 optimize_location_lists_1 (die, &htab);
31293 }
31294 \f
31295 /* Traverse the limbo die list, and add parent/child links. The only
31296 dies without parents that should be here are concrete instances of
31297 inline functions, and the comp_unit_die. We can ignore the comp_unit_die.
31298 For concrete instances, we can get the parent die from the abstract
31299 instance. */
31300
31301 static void
31302 flush_limbo_die_list (void)
31303 {
31304 limbo_die_node *node;
31305
31306 /* get_context_die calls force_decl_die, which can put new DIEs on the
31307 limbo list in LTO mode when nested functions are put in a different
31308 partition than that of their parent function. */
31309 while ((node = limbo_die_list))
31310 {
31311 dw_die_ref die = node->die;
31312 limbo_die_list = node->next;
31313
31314 if (die->die_parent == NULL)
31315 {
31316 dw_die_ref origin = get_AT_ref (die, DW_AT_abstract_origin);
31317
31318 if (origin && origin->die_parent)
31319 add_child_die (origin->die_parent, die);
31320 else if (is_cu_die (die))
31321 ;
31322 else if (seen_error ())
31323 /* It's OK to be confused by errors in the input. */
31324 add_child_die (comp_unit_die (), die);
31325 else
31326 {
31327 /* In certain situations, the lexical block containing a
31328 nested function can be optimized away, which results
31329 in the nested function die being orphaned. Likewise
31330 with the return type of that nested function. Force
31331 this to be a child of the containing function.
31332
31333 It may happen that even the containing function got fully
31334 inlined and optimized out. In that case we are lost and
31335 assign the empty child. This should not be big issue as
31336 the function is likely unreachable too. */
31337 gcc_assert (node->created_for);
31338
31339 if (DECL_P (node->created_for))
31340 origin = get_context_die (DECL_CONTEXT (node->created_for));
31341 else if (TYPE_P (node->created_for))
31342 origin = scope_die_for (node->created_for, comp_unit_die ());
31343 else
31344 origin = comp_unit_die ();
31345
31346 add_child_die (origin, die);
31347 }
31348 }
31349 }
31350 }
31351
31352 /* Reset DIEs so we can output them again. */
31353
31354 static void
31355 reset_dies (dw_die_ref die)
31356 {
31357 dw_die_ref c;
31358
31359 /* Remove stuff we re-generate. */
31360 die->die_mark = 0;
31361 die->die_offset = 0;
31362 die->die_abbrev = 0;
31363 remove_AT (die, DW_AT_sibling);
31364
31365 FOR_EACH_CHILD (die, c, reset_dies (c));
31366 }
31367
31368 /* Output stuff that dwarf requires at the end of every file,
31369 and generate the DWARF-2 debugging info. */
31370
31371 static void
31372 dwarf2out_finish (const char *filename)
31373 {
31374 comdat_type_node *ctnode;
31375 dw_die_ref main_comp_unit_die;
31376 unsigned char checksum[16];
31377 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
31378
31379 /* Flush out any latecomers to the limbo party. */
31380 flush_limbo_die_list ();
31381
31382 if (inline_entry_data_table)
31383 gcc_assert (inline_entry_data_table->is_empty ());
31384
31385 if (flag_checking)
31386 {
31387 verify_die (comp_unit_die ());
31388 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31389 verify_die (node->die);
31390 }
31391
31392 /* We shouldn't have any symbols with delayed asm names for
31393 DIEs generated after early finish. */
31394 gcc_assert (deferred_asm_name == NULL);
31395
31396 gen_remaining_tmpl_value_param_die_attribute ();
31397
31398 if (flag_generate_lto || flag_generate_offload)
31399 {
31400 gcc_assert (flag_fat_lto_objects || flag_generate_offload);
31401
31402 /* Prune stuff so that dwarf2out_finish runs successfully
31403 for the fat part of the object. */
31404 reset_dies (comp_unit_die ());
31405 for (limbo_die_node *node = cu_die_list; node; node = node->next)
31406 reset_dies (node->die);
31407
31408 hash_table<comdat_type_hasher> comdat_type_table (100);
31409 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31410 {
31411 comdat_type_node **slot
31412 = comdat_type_table.find_slot (ctnode, INSERT);
31413
31414 /* Don't reset types twice. */
31415 if (*slot != HTAB_EMPTY_ENTRY)
31416 continue;
31417
31418 /* Remove the pointer to the line table. */
31419 remove_AT (ctnode->root_die, DW_AT_stmt_list);
31420
31421 if (debug_info_level >= DINFO_LEVEL_TERSE)
31422 reset_dies (ctnode->root_die);
31423
31424 *slot = ctnode;
31425 }
31426
31427 /* Reset die CU symbol so we don't output it twice. */
31428 comp_unit_die ()->die_id.die_symbol = NULL;
31429
31430 /* Remove DW_AT_macro and DW_AT_stmt_list from the early output. */
31431 remove_AT (comp_unit_die (), DW_AT_stmt_list);
31432 if (have_macinfo)
31433 remove_AT (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE);
31434
31435 /* Remove indirect string decisions. */
31436 debug_str_hash->traverse<void *, reset_indirect_string> (NULL);
31437 if (debug_line_str_hash)
31438 {
31439 debug_line_str_hash->traverse<void *, reset_indirect_string> (NULL);
31440 debug_line_str_hash = NULL;
31441 }
31442 }
31443
31444 #if ENABLE_ASSERT_CHECKING
31445 {
31446 dw_die_ref die = comp_unit_die (), c;
31447 FOR_EACH_CHILD (die, c, gcc_assert (! c->die_mark));
31448 }
31449 #endif
31450 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31451 resolve_addr (ctnode->root_die);
31452 resolve_addr (comp_unit_die ());
31453 move_marked_base_types ();
31454
31455 if (dump_file)
31456 {
31457 fprintf (dump_file, "DWARF for %s\n", filename);
31458 print_die (comp_unit_die (), dump_file);
31459 }
31460
31461 /* Initialize sections and labels used for actual assembler output. */
31462 unsigned generation = init_sections_and_labels (false);
31463
31464 /* Traverse the DIE's and add sibling attributes to those DIE's that
31465 have children. */
31466 add_sibling_attributes (comp_unit_die ());
31467 limbo_die_node *node;
31468 for (node = cu_die_list; node; node = node->next)
31469 add_sibling_attributes (node->die);
31470 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31471 add_sibling_attributes (ctnode->root_die);
31472
31473 /* When splitting DWARF info, we put some attributes in the
31474 skeleton compile_unit DIE that remains in the .o, while
31475 most attributes go in the DWO compile_unit_die. */
31476 if (dwarf_split_debug_info)
31477 {
31478 limbo_die_node *cu;
31479 main_comp_unit_die = gen_compile_unit_die (NULL);
31480 if (dwarf_version >= 5)
31481 main_comp_unit_die->die_tag = DW_TAG_skeleton_unit;
31482 cu = limbo_die_list;
31483 gcc_assert (cu->die == main_comp_unit_die);
31484 limbo_die_list = limbo_die_list->next;
31485 cu->next = cu_die_list;
31486 cu_die_list = cu;
31487 }
31488 else
31489 main_comp_unit_die = comp_unit_die ();
31490
31491 /* Output a terminator label for the .text section. */
31492 switch_to_section (text_section);
31493 targetm.asm_out.internal_label (asm_out_file, TEXT_END_LABEL, 0);
31494 if (cold_text_section)
31495 {
31496 switch_to_section (cold_text_section);
31497 targetm.asm_out.internal_label (asm_out_file, COLD_END_LABEL, 0);
31498 }
31499
31500 /* We can only use the low/high_pc attributes if all of the code was
31501 in .text. */
31502 if (!have_multiple_function_sections
31503 || (dwarf_version < 3 && dwarf_strict))
31504 {
31505 /* Don't add if the CU has no associated code. */
31506 if (text_section_used)
31507 add_AT_low_high_pc (main_comp_unit_die, text_section_label,
31508 text_end_label, true);
31509 }
31510 else
31511 {
31512 unsigned fde_idx;
31513 dw_fde_ref fde;
31514 bool range_list_added = false;
31515
31516 if (text_section_used)
31517 add_ranges_by_labels (main_comp_unit_die, text_section_label,
31518 text_end_label, &range_list_added, true);
31519 if (cold_text_section_used)
31520 add_ranges_by_labels (main_comp_unit_die, cold_text_section_label,
31521 cold_end_label, &range_list_added, true);
31522
31523 FOR_EACH_VEC_ELT (*fde_vec, fde_idx, fde)
31524 {
31525 if (DECL_IGNORED_P (fde->decl))
31526 continue;
31527 if (!fde->in_std_section)
31528 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_begin,
31529 fde->dw_fde_end, &range_list_added,
31530 true);
31531 if (fde->dw_fde_second_begin && !fde->second_in_std_section)
31532 add_ranges_by_labels (main_comp_unit_die, fde->dw_fde_second_begin,
31533 fde->dw_fde_second_end, &range_list_added,
31534 true);
31535 }
31536
31537 if (range_list_added)
31538 {
31539 /* We need to give .debug_loc and .debug_ranges an appropriate
31540 "base address". Use zero so that these addresses become
31541 absolute. Historically, we've emitted the unexpected
31542 DW_AT_entry_pc instead of DW_AT_low_pc for this purpose.
31543 Emit both to give time for other tools to adapt. */
31544 add_AT_addr (main_comp_unit_die, DW_AT_low_pc, const0_rtx, true);
31545 if (! dwarf_strict && dwarf_version < 4)
31546 add_AT_addr (main_comp_unit_die, DW_AT_entry_pc, const0_rtx, true);
31547
31548 add_ranges (NULL);
31549 }
31550 }
31551
31552 /* AIX Assembler inserts the length, so adjust the reference to match the
31553 offset expected by debuggers. */
31554 strcpy (dl_section_ref, debug_line_section_label);
31555 if (XCOFF_DEBUGGING_INFO)
31556 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
31557
31558 if (debug_info_level >= DINFO_LEVEL_TERSE)
31559 add_AT_lineptr (main_comp_unit_die, DW_AT_stmt_list,
31560 dl_section_ref);
31561
31562 if (have_macinfo)
31563 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
31564 macinfo_section_label);
31565
31566 if (dwarf_split_debug_info)
31567 {
31568 if (have_location_lists)
31569 {
31570 /* Since we generate the loclists in the split DWARF .dwo
31571 file itself, we don't need to generate a loclists_base
31572 attribute for the split compile unit DIE. That attribute
31573 (and using relocatable sec_offset FORMs) isn't allowed
31574 for a split compile unit. Only if the .debug_loclists
31575 section was in the main file, would we need to generate a
31576 loclists_base attribute here (for the full or skeleton
31577 unit DIE). */
31578
31579 /* optimize_location_lists calculates the size of the lists,
31580 so index them first, and assign indices to the entries.
31581 Although optimize_location_lists will remove entries from
31582 the table, it only does so for duplicates, and therefore
31583 only reduces ref_counts to 1. */
31584 index_location_lists (comp_unit_die ());
31585 }
31586
31587 if (addr_index_table != NULL)
31588 {
31589 unsigned int index = 0;
31590 addr_index_table
31591 ->traverse_noresize<unsigned int *, index_addr_table_entry>
31592 (&index);
31593 }
31594 }
31595
31596 loc_list_idx = 0;
31597 if (have_location_lists)
31598 {
31599 optimize_location_lists (comp_unit_die ());
31600 /* And finally assign indexes to the entries for -gsplit-dwarf. */
31601 if (dwarf_version >= 5 && dwarf_split_debug_info)
31602 assign_location_list_indexes (comp_unit_die ());
31603 }
31604
31605 save_macinfo_strings ();
31606
31607 if (dwarf_split_debug_info)
31608 {
31609 unsigned int index = 0;
31610
31611 /* Add attributes common to skeleton compile_units and
31612 type_units. Because these attributes include strings, it
31613 must be done before freezing the string table. Top-level
31614 skeleton die attrs are added when the skeleton type unit is
31615 created, so ensure it is created by this point. */
31616 add_top_level_skeleton_die_attrs (main_comp_unit_die);
31617 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
31618 }
31619
31620 /* Output all of the compilation units. We put the main one last so that
31621 the offsets are available to output_pubnames. */
31622 for (node = cu_die_list; node; node = node->next)
31623 output_comp_unit (node->die, 0, NULL);
31624
31625 hash_table<comdat_type_hasher> comdat_type_table (100);
31626 for (ctnode = comdat_type_list; ctnode != NULL; ctnode = ctnode->next)
31627 {
31628 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
31629
31630 /* Don't output duplicate types. */
31631 if (*slot != HTAB_EMPTY_ENTRY)
31632 continue;
31633
31634 /* Add a pointer to the line table for the main compilation unit
31635 so that the debugger can make sense of DW_AT_decl_file
31636 attributes. */
31637 if (debug_info_level >= DINFO_LEVEL_TERSE)
31638 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
31639 (!dwarf_split_debug_info
31640 ? dl_section_ref
31641 : debug_skeleton_line_section_label));
31642
31643 output_comdat_type_unit (ctnode, false);
31644 *slot = ctnode;
31645 }
31646
31647 if (dwarf_split_debug_info)
31648 {
31649 int mark;
31650 struct md5_ctx ctx;
31651
31652 if (dwarf_version >= 5 && !vec_safe_is_empty (ranges_table))
31653 index_rnglists ();
31654
31655 /* Compute a checksum of the comp_unit to use as the dwo_id. */
31656 md5_init_ctx (&ctx);
31657 mark = 0;
31658 die_checksum (comp_unit_die (), &ctx, &mark);
31659 unmark_all_dies (comp_unit_die ());
31660 md5_finish_ctx (&ctx, checksum);
31661
31662 if (dwarf_version < 5)
31663 {
31664 /* Use the first 8 bytes of the checksum as the dwo_id,
31665 and add it to both comp-unit DIEs. */
31666 add_AT_data8 (main_comp_unit_die, DW_AT_GNU_dwo_id, checksum);
31667 add_AT_data8 (comp_unit_die (), DW_AT_GNU_dwo_id, checksum);
31668 }
31669
31670 /* Add the base offset of the ranges table to the skeleton
31671 comp-unit DIE. */
31672 if (!vec_safe_is_empty (ranges_table))
31673 {
31674 if (dwarf_version >= 5)
31675 add_AT_lineptr (main_comp_unit_die, DW_AT_rnglists_base,
31676 ranges_base_label);
31677 else
31678 add_AT_lineptr (main_comp_unit_die, DW_AT_GNU_ranges_base,
31679 ranges_section_label);
31680 }
31681
31682 output_addr_table ();
31683 }
31684
31685 /* Output the main compilation unit if non-empty or if .debug_macinfo
31686 or .debug_macro will be emitted. */
31687 output_comp_unit (comp_unit_die (), have_macinfo,
31688 dwarf_split_debug_info ? checksum : NULL);
31689
31690 if (dwarf_split_debug_info && info_section_emitted)
31691 output_skeleton_debug_sections (main_comp_unit_die, checksum);
31692
31693 /* Output the abbreviation table. */
31694 if (vec_safe_length (abbrev_die_table) != 1)
31695 {
31696 switch_to_section (debug_abbrev_section);
31697 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
31698 output_abbrev_section ();
31699 }
31700
31701 /* Output location list section if necessary. */
31702 if (have_location_lists)
31703 {
31704 char l1[MAX_ARTIFICIAL_LABEL_BYTES];
31705 char l2[MAX_ARTIFICIAL_LABEL_BYTES];
31706 /* Output the location lists info. */
31707 switch_to_section (debug_loc_section);
31708 if (dwarf_version >= 5)
31709 {
31710 ASM_GENERATE_INTERNAL_LABEL (l1, DEBUG_LOC_SECTION_LABEL, 2);
31711 ASM_GENERATE_INTERNAL_LABEL (l2, DEBUG_LOC_SECTION_LABEL, 3);
31712 if (DWARF_INITIAL_LENGTH_SIZE - DWARF_OFFSET_SIZE == 4)
31713 dw2_asm_output_data (4, 0xffffffff,
31714 "Initial length escape value indicating "
31715 "64-bit DWARF extension");
31716 dw2_asm_output_delta (DWARF_OFFSET_SIZE, l2, l1,
31717 "Length of Location Lists");
31718 ASM_OUTPUT_LABEL (asm_out_file, l1);
31719 output_dwarf_version ();
31720 dw2_asm_output_data (1, DWARF2_ADDR_SIZE, "Address Size");
31721 dw2_asm_output_data (1, 0, "Segment Size");
31722 dw2_asm_output_data (4, dwarf_split_debug_info ? loc_list_idx : 0,
31723 "Offset Entry Count");
31724 }
31725 ASM_OUTPUT_LABEL (asm_out_file, loc_section_label);
31726 if (dwarf_version >= 5 && dwarf_split_debug_info)
31727 {
31728 unsigned int save_loc_list_idx = loc_list_idx;
31729 loc_list_idx = 0;
31730 output_loclists_offsets (comp_unit_die ());
31731 gcc_assert (save_loc_list_idx == loc_list_idx);
31732 }
31733 output_location_lists (comp_unit_die ());
31734 if (dwarf_version >= 5)
31735 ASM_OUTPUT_LABEL (asm_out_file, l2);
31736 }
31737
31738 output_pubtables ();
31739
31740 /* Output the address range information if a CU (.debug_info section)
31741 was emitted. We output an empty table even if we had no functions
31742 to put in it. This because the consumer has no way to tell the
31743 difference between an empty table that we omitted and failure to
31744 generate a table that would have contained data. */
31745 if (info_section_emitted)
31746 {
31747 switch_to_section (debug_aranges_section);
31748 output_aranges ();
31749 }
31750
31751 /* Output ranges section if necessary. */
31752 if (!vec_safe_is_empty (ranges_table))
31753 {
31754 if (dwarf_version >= 5)
31755 output_rnglists (generation);
31756 else
31757 output_ranges ();
31758 }
31759
31760 /* Have to end the macro section. */
31761 if (have_macinfo)
31762 {
31763 switch_to_section (debug_macinfo_section);
31764 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
31765 output_macinfo (!dwarf_split_debug_info ? debug_line_section_label
31766 : debug_skeleton_line_section_label, false);
31767 dw2_asm_output_data (1, 0, "End compilation unit");
31768 }
31769
31770 /* Output the source line correspondence table. We must do this
31771 even if there is no line information. Otherwise, on an empty
31772 translation unit, we will generate a present, but empty,
31773 .debug_info section. IRIX 6.5 `nm' will then complain when
31774 examining the file. This is done late so that any filenames
31775 used by the debug_info section are marked as 'used'. */
31776 switch_to_section (debug_line_section);
31777 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
31778 if (! output_asm_line_debug_info ())
31779 output_line_info (false);
31780
31781 if (dwarf_split_debug_info && info_section_emitted)
31782 {
31783 switch_to_section (debug_skeleton_line_section);
31784 ASM_OUTPUT_LABEL (asm_out_file, debug_skeleton_line_section_label);
31785 output_line_info (true);
31786 }
31787
31788 /* If we emitted any indirect strings, output the string table too. */
31789 if (debug_str_hash || skeleton_debug_str_hash)
31790 output_indirect_strings ();
31791 if (debug_line_str_hash)
31792 {
31793 switch_to_section (debug_line_str_section);
31794 const enum dwarf_form form = DW_FORM_line_strp;
31795 debug_line_str_hash->traverse<enum dwarf_form,
31796 output_indirect_string> (form);
31797 }
31798
31799 /* ??? Move lvugid out of dwarf2out_source_line and reset it too? */
31800 symview_upper_bound = 0;
31801 if (zero_view_p)
31802 bitmap_clear (zero_view_p);
31803 }
31804
31805 /* Returns a hash value for X (which really is a variable_value_struct). */
31806
31807 inline hashval_t
31808 variable_value_hasher::hash (variable_value_struct *x)
31809 {
31810 return (hashval_t) x->decl_id;
31811 }
31812
31813 /* Return nonzero if decl_id of variable_value_struct X is the same as
31814 UID of decl Y. */
31815
31816 inline bool
31817 variable_value_hasher::equal (variable_value_struct *x, tree y)
31818 {
31819 return x->decl_id == DECL_UID (y);
31820 }
31821
31822 /* Helper function for resolve_variable_value, handle
31823 DW_OP_GNU_variable_value in one location expression.
31824 Return true if exprloc has been changed into loclist. */
31825
31826 static bool
31827 resolve_variable_value_in_expr (dw_attr_node *a, dw_loc_descr_ref loc)
31828 {
31829 dw_loc_descr_ref next;
31830 for (dw_loc_descr_ref prev = NULL; loc; prev = loc, loc = next)
31831 {
31832 next = loc->dw_loc_next;
31833 if (loc->dw_loc_opc != DW_OP_GNU_variable_value
31834 || loc->dw_loc_oprnd1.val_class != dw_val_class_decl_ref)
31835 continue;
31836
31837 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31838 if (DECL_CONTEXT (decl) != current_function_decl)
31839 continue;
31840
31841 dw_die_ref ref = lookup_decl_die (decl);
31842 if (ref)
31843 {
31844 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31845 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31846 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31847 continue;
31848 }
31849 dw_loc_list_ref l = loc_list_from_tree (decl, 0, NULL);
31850 if (l == NULL)
31851 continue;
31852 if (l->dw_loc_next)
31853 {
31854 if (AT_class (a) != dw_val_class_loc)
31855 continue;
31856 switch (a->dw_attr)
31857 {
31858 /* Following attributes allow both exprloc and loclist
31859 classes, so we can change them into a loclist. */
31860 case DW_AT_location:
31861 case DW_AT_string_length:
31862 case DW_AT_return_addr:
31863 case DW_AT_data_member_location:
31864 case DW_AT_frame_base:
31865 case DW_AT_segment:
31866 case DW_AT_static_link:
31867 case DW_AT_use_location:
31868 case DW_AT_vtable_elem_location:
31869 if (prev)
31870 {
31871 prev->dw_loc_next = NULL;
31872 prepend_loc_descr_to_each (l, AT_loc (a));
31873 }
31874 if (next)
31875 add_loc_descr_to_each (l, next);
31876 a->dw_attr_val.val_class = dw_val_class_loc_list;
31877 a->dw_attr_val.val_entry = NULL;
31878 a->dw_attr_val.v.val_loc_list = l;
31879 have_location_lists = true;
31880 return true;
31881 /* Following attributes allow both exprloc and reference,
31882 so if the whole expression is DW_OP_GNU_variable_value alone
31883 we could transform it into reference. */
31884 case DW_AT_byte_size:
31885 case DW_AT_bit_size:
31886 case DW_AT_lower_bound:
31887 case DW_AT_upper_bound:
31888 case DW_AT_bit_stride:
31889 case DW_AT_count:
31890 case DW_AT_allocated:
31891 case DW_AT_associated:
31892 case DW_AT_byte_stride:
31893 if (prev == NULL && next == NULL)
31894 break;
31895 /* FALLTHRU */
31896 default:
31897 if (dwarf_strict)
31898 continue;
31899 break;
31900 }
31901 /* Create DW_TAG_variable that we can refer to. */
31902 gen_decl_die (decl, NULL_TREE, NULL,
31903 lookup_decl_die (current_function_decl));
31904 ref = lookup_decl_die (decl);
31905 if (ref)
31906 {
31907 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
31908 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
31909 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
31910 }
31911 continue;
31912 }
31913 if (prev)
31914 {
31915 prev->dw_loc_next = l->expr;
31916 add_loc_descr (&prev->dw_loc_next, next);
31917 free_loc_descr (loc, NULL);
31918 next = prev->dw_loc_next;
31919 }
31920 else
31921 {
31922 memcpy (loc, l->expr, sizeof (dw_loc_descr_node));
31923 add_loc_descr (&loc, next);
31924 next = loc;
31925 }
31926 loc = prev;
31927 }
31928 return false;
31929 }
31930
31931 /* Attempt to resolve DW_OP_GNU_variable_value using loc_list_from_tree. */
31932
31933 static void
31934 resolve_variable_value (dw_die_ref die)
31935 {
31936 dw_attr_node *a;
31937 dw_loc_list_ref loc;
31938 unsigned ix;
31939
31940 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
31941 switch (AT_class (a))
31942 {
31943 case dw_val_class_loc:
31944 if (!resolve_variable_value_in_expr (a, AT_loc (a)))
31945 break;
31946 /* FALLTHRU */
31947 case dw_val_class_loc_list:
31948 loc = AT_loc_list (a);
31949 gcc_assert (loc);
31950 for (; loc; loc = loc->dw_loc_next)
31951 resolve_variable_value_in_expr (a, loc->expr);
31952 break;
31953 default:
31954 break;
31955 }
31956 }
31957
31958 /* Attempt to optimize DW_OP_GNU_variable_value refering to
31959 temporaries in the current function. */
31960
31961 static void
31962 resolve_variable_values (void)
31963 {
31964 if (!variable_value_hash || !current_function_decl)
31965 return;
31966
31967 struct variable_value_struct *node
31968 = variable_value_hash->find_with_hash (current_function_decl,
31969 DECL_UID (current_function_decl));
31970
31971 if (node == NULL)
31972 return;
31973
31974 unsigned int i;
31975 dw_die_ref die;
31976 FOR_EACH_VEC_SAFE_ELT (node->dies, i, die)
31977 resolve_variable_value (die);
31978 }
31979
31980 /* Helper function for note_variable_value, handle one location
31981 expression. */
31982
31983 static void
31984 note_variable_value_in_expr (dw_die_ref die, dw_loc_descr_ref loc)
31985 {
31986 for (; loc; loc = loc->dw_loc_next)
31987 if (loc->dw_loc_opc == DW_OP_GNU_variable_value
31988 && loc->dw_loc_oprnd1.val_class == dw_val_class_decl_ref)
31989 {
31990 tree decl = loc->dw_loc_oprnd1.v.val_decl_ref;
31991 dw_die_ref ref = lookup_decl_die (decl);
31992 if (! ref && (flag_generate_lto || flag_generate_offload))
31993 {
31994 /* ??? This is somewhat a hack because we do not create DIEs
31995 for variables not in BLOCK trees early but when generating
31996 early LTO output we need the dw_val_class_decl_ref to be
31997 fully resolved. For fat LTO objects we'd also like to
31998 undo this after LTO dwarf output. */
31999 gcc_assert (DECL_CONTEXT (decl));
32000 dw_die_ref ctx = lookup_decl_die (DECL_CONTEXT (decl));
32001 gcc_assert (ctx != NULL);
32002 gen_decl_die (decl, NULL_TREE, NULL, ctx);
32003 ref = lookup_decl_die (decl);
32004 gcc_assert (ref != NULL);
32005 }
32006 if (ref)
32007 {
32008 loc->dw_loc_oprnd1.val_class = dw_val_class_die_ref;
32009 loc->dw_loc_oprnd1.v.val_die_ref.die = ref;
32010 loc->dw_loc_oprnd1.v.val_die_ref.external = 0;
32011 continue;
32012 }
32013 if (VAR_P (decl)
32014 && DECL_CONTEXT (decl)
32015 && TREE_CODE (DECL_CONTEXT (decl)) == FUNCTION_DECL
32016 && lookup_decl_die (DECL_CONTEXT (decl)))
32017 {
32018 if (!variable_value_hash)
32019 variable_value_hash
32020 = hash_table<variable_value_hasher>::create_ggc (10);
32021
32022 tree fndecl = DECL_CONTEXT (decl);
32023 struct variable_value_struct *node;
32024 struct variable_value_struct **slot
32025 = variable_value_hash->find_slot_with_hash (fndecl,
32026 DECL_UID (fndecl),
32027 INSERT);
32028 if (*slot == NULL)
32029 {
32030 node = ggc_cleared_alloc<variable_value_struct> ();
32031 node->decl_id = DECL_UID (fndecl);
32032 *slot = node;
32033 }
32034 else
32035 node = *slot;
32036
32037 vec_safe_push (node->dies, die);
32038 }
32039 }
32040 }
32041
32042 /* Walk the tree DIE and note DIEs with DW_OP_GNU_variable_value still
32043 with dw_val_class_decl_ref operand. */
32044
32045 static void
32046 note_variable_value (dw_die_ref die)
32047 {
32048 dw_die_ref c;
32049 dw_attr_node *a;
32050 dw_loc_list_ref loc;
32051 unsigned ix;
32052
32053 FOR_EACH_VEC_SAFE_ELT (die->die_attr, ix, a)
32054 switch (AT_class (a))
32055 {
32056 case dw_val_class_loc_list:
32057 loc = AT_loc_list (a);
32058 gcc_assert (loc);
32059 if (!loc->noted_variable_value)
32060 {
32061 loc->noted_variable_value = 1;
32062 for (; loc; loc = loc->dw_loc_next)
32063 note_variable_value_in_expr (die, loc->expr);
32064 }
32065 break;
32066 case dw_val_class_loc:
32067 note_variable_value_in_expr (die, AT_loc (a));
32068 break;
32069 default:
32070 break;
32071 }
32072
32073 /* Mark children. */
32074 FOR_EACH_CHILD (die, c, note_variable_value (c));
32075 }
32076
32077 /* Perform any cleanups needed after the early debug generation pass
32078 has run. */
32079
32080 static void
32081 dwarf2out_early_finish (const char *filename)
32082 {
32083 set_early_dwarf s;
32084 char dl_section_ref[MAX_ARTIFICIAL_LABEL_BYTES];
32085
32086 /* PCH might result in DW_AT_producer string being restored from the
32087 header compilation, so always fill it with empty string initially
32088 and overwrite only here. */
32089 dw_attr_node *producer = get_AT (comp_unit_die (), DW_AT_producer);
32090 producer_string = gen_producer_string ();
32091 producer->dw_attr_val.v.val_str->refcount--;
32092 producer->dw_attr_val.v.val_str = find_AT_string (producer_string);
32093
32094 /* Add the name for the main input file now. We delayed this from
32095 dwarf2out_init to avoid complications with PCH. */
32096 add_name_attribute (comp_unit_die (), remap_debug_filename (filename));
32097 add_comp_dir_attribute (comp_unit_die ());
32098
32099 /* When emitting DWARF5 .debug_line_str, move DW_AT_name and
32100 DW_AT_comp_dir into .debug_line_str section. */
32101 if (!output_asm_line_debug_info ()
32102 && dwarf_version >= 5
32103 && DWARF5_USE_DEBUG_LINE_STR)
32104 {
32105 for (int i = 0; i < 2; i++)
32106 {
32107 dw_attr_node *a = get_AT (comp_unit_die (),
32108 i ? DW_AT_comp_dir : DW_AT_name);
32109 if (a == NULL
32110 || AT_class (a) != dw_val_class_str
32111 || strlen (AT_string (a)) + 1 <= DWARF_OFFSET_SIZE)
32112 continue;
32113
32114 if (! debug_line_str_hash)
32115 debug_line_str_hash
32116 = hash_table<indirect_string_hasher>::create_ggc (10);
32117
32118 struct indirect_string_node *node
32119 = find_AT_string_in_table (AT_string (a), debug_line_str_hash);
32120 set_indirect_string (node);
32121 node->form = DW_FORM_line_strp;
32122 a->dw_attr_val.v.val_str->refcount--;
32123 a->dw_attr_val.v.val_str = node;
32124 }
32125 }
32126
32127 /* With LTO early dwarf was really finished at compile-time, so make
32128 sure to adjust the phase after annotating the LTRANS CU DIE. */
32129 if (in_lto_p)
32130 {
32131 /* Force DW_TAG_imported_unit to be created now, otherwise
32132 we might end up without it or ordered after DW_TAG_inlined_subroutine
32133 referencing DIEs from it. */
32134 if (! flag_wpa && flag_incremental_link != INCREMENTAL_LINK_LTO)
32135 {
32136 unsigned i;
32137 tree tu;
32138 if (external_die_map)
32139 FOR_EACH_VEC_SAFE_ELT (all_translation_units, i, tu)
32140 if (sym_off_pair *desc = external_die_map->get (tu))
32141 {
32142 dw_die_ref import = new_die (DW_TAG_imported_unit,
32143 comp_unit_die (), NULL_TREE);
32144 add_AT_external_die_ref (import, DW_AT_import,
32145 desc->sym, desc->off);
32146 }
32147 }
32148
32149 early_dwarf_finished = true;
32150 if (dump_file)
32151 {
32152 fprintf (dump_file, "LTO EARLY DWARF for %s\n", filename);
32153 print_die (comp_unit_die (), dump_file);
32154 }
32155 return;
32156 }
32157
32158 /* Walk through the list of incomplete types again, trying once more to
32159 emit full debugging info for them. */
32160 retry_incomplete_types ();
32161
32162 /* The point here is to flush out the limbo list so that it is empty
32163 and we don't need to stream it for LTO. */
32164 flush_limbo_die_list ();
32165
32166 gen_scheduled_generic_parms_dies ();
32167 gen_remaining_tmpl_value_param_die_attribute ();
32168
32169 /* Add DW_AT_linkage_name for all deferred DIEs. */
32170 for (limbo_die_node *node = deferred_asm_name; node; node = node->next)
32171 {
32172 tree decl = node->created_for;
32173 if (DECL_ASSEMBLER_NAME (decl) != DECL_NAME (decl)
32174 /* A missing DECL_ASSEMBLER_NAME can be a constant DIE that
32175 ended up in deferred_asm_name before we knew it was
32176 constant and never written to disk. */
32177 && DECL_ASSEMBLER_NAME (decl))
32178 {
32179 add_linkage_attr (node->die, decl);
32180 move_linkage_attr (node->die);
32181 }
32182 }
32183 deferred_asm_name = NULL;
32184
32185 if (flag_eliminate_unused_debug_types)
32186 prune_unused_types ();
32187
32188 /* Generate separate COMDAT sections for type DIEs. */
32189 if (use_debug_types)
32190 {
32191 break_out_comdat_types (comp_unit_die ());
32192
32193 /* Each new type_unit DIE was added to the limbo die list when created.
32194 Since these have all been added to comdat_type_list, clear the
32195 limbo die list. */
32196 limbo_die_list = NULL;
32197
32198 /* For each new comdat type unit, copy declarations for incomplete
32199 types to make the new unit self-contained (i.e., no direct
32200 references to the main compile unit). */
32201 for (comdat_type_node *ctnode = comdat_type_list;
32202 ctnode != NULL; ctnode = ctnode->next)
32203 copy_decls_for_unworthy_types (ctnode->root_die);
32204 copy_decls_for_unworthy_types (comp_unit_die ());
32205
32206 /* In the process of copying declarations from one unit to another,
32207 we may have left some declarations behind that are no longer
32208 referenced. Prune them. */
32209 prune_unused_types ();
32210 }
32211
32212 /* Traverse the DIE's and note DIEs with DW_OP_GNU_variable_value still
32213 with dw_val_class_decl_ref operand. */
32214 note_variable_value (comp_unit_die ());
32215 for (limbo_die_node *node = cu_die_list; node; node = node->next)
32216 note_variable_value (node->die);
32217 for (comdat_type_node *ctnode = comdat_type_list; ctnode != NULL;
32218 ctnode = ctnode->next)
32219 note_variable_value (ctnode->root_die);
32220 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
32221 note_variable_value (node->die);
32222
32223 /* The AT_pubnames attribute needs to go in all skeleton dies, including
32224 both the main_cu and all skeleton TUs. Making this call unconditional
32225 would end up either adding a second copy of the AT_pubnames attribute, or
32226 requiring a special case in add_top_level_skeleton_die_attrs. */
32227 if (!dwarf_split_debug_info)
32228 add_AT_pubnames (comp_unit_die ());
32229
32230 /* The early debug phase is now finished. */
32231 early_dwarf_finished = true;
32232 if (dump_file)
32233 {
32234 fprintf (dump_file, "EARLY DWARF for %s\n", filename);
32235 print_die (comp_unit_die (), dump_file);
32236 }
32237
32238 /* Do not generate DWARF assembler now when not producing LTO bytecode. */
32239 if ((!flag_generate_lto && !flag_generate_offload)
32240 /* FIXME: Disable debug info generation for (PE-)COFF targets since the
32241 copy_lto_debug_sections operation of the simple object support in
32242 libiberty is not implemented for them yet. */
32243 || TARGET_PECOFF || TARGET_COFF)
32244 return;
32245
32246 /* Now as we are going to output for LTO initialize sections and labels
32247 to the LTO variants. We don't need a random-seed postfix as other
32248 LTO sections as linking the LTO debug sections into one in a partial
32249 link is fine. */
32250 init_sections_and_labels (true);
32251
32252 /* The output below is modeled after dwarf2out_finish with all
32253 location related output removed and some LTO specific changes.
32254 Some refactoring might make both smaller and easier to match up. */
32255
32256 /* Traverse the DIE's and add add sibling attributes to those DIE's
32257 that have children. */
32258 add_sibling_attributes (comp_unit_die ());
32259 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
32260 add_sibling_attributes (node->die);
32261 for (comdat_type_node *ctnode = comdat_type_list;
32262 ctnode != NULL; ctnode = ctnode->next)
32263 add_sibling_attributes (ctnode->root_die);
32264
32265 /* AIX Assembler inserts the length, so adjust the reference to match the
32266 offset expected by debuggers. */
32267 strcpy (dl_section_ref, debug_line_section_label);
32268 if (XCOFF_DEBUGGING_INFO)
32269 strcat (dl_section_ref, DWARF_INITIAL_LENGTH_SIZE_STR);
32270
32271 if (debug_info_level >= DINFO_LEVEL_TERSE)
32272 add_AT_lineptr (comp_unit_die (), DW_AT_stmt_list, dl_section_ref);
32273
32274 if (have_macinfo)
32275 add_AT_macptr (comp_unit_die (), DEBUG_MACRO_ATTRIBUTE,
32276 macinfo_section_label);
32277
32278 save_macinfo_strings ();
32279
32280 if (dwarf_split_debug_info)
32281 {
32282 unsigned int index = 0;
32283 debug_str_hash->traverse_noresize<unsigned int *, index_string> (&index);
32284 }
32285
32286 /* Output all of the compilation units. We put the main one last so that
32287 the offsets are available to output_pubnames. */
32288 for (limbo_die_node *node = limbo_die_list; node; node = node->next)
32289 output_comp_unit (node->die, 0, NULL);
32290
32291 hash_table<comdat_type_hasher> comdat_type_table (100);
32292 for (comdat_type_node *ctnode = comdat_type_list;
32293 ctnode != NULL; ctnode = ctnode->next)
32294 {
32295 comdat_type_node **slot = comdat_type_table.find_slot (ctnode, INSERT);
32296
32297 /* Don't output duplicate types. */
32298 if (*slot != HTAB_EMPTY_ENTRY)
32299 continue;
32300
32301 /* Add a pointer to the line table for the main compilation unit
32302 so that the debugger can make sense of DW_AT_decl_file
32303 attributes. */
32304 if (debug_info_level >= DINFO_LEVEL_TERSE)
32305 add_AT_lineptr (ctnode->root_die, DW_AT_stmt_list,
32306 (!dwarf_split_debug_info
32307 ? debug_line_section_label
32308 : debug_skeleton_line_section_label));
32309
32310 output_comdat_type_unit (ctnode, true);
32311 *slot = ctnode;
32312 }
32313
32314 /* Stick a unique symbol to the main debuginfo section. */
32315 compute_comp_unit_symbol (comp_unit_die ());
32316
32317 /* Output the main compilation unit. We always need it if only for
32318 the CU symbol. */
32319 output_comp_unit (comp_unit_die (), true, NULL);
32320
32321 /* Output the abbreviation table. */
32322 if (vec_safe_length (abbrev_die_table) != 1)
32323 {
32324 switch_to_section (debug_abbrev_section);
32325 ASM_OUTPUT_LABEL (asm_out_file, abbrev_section_label);
32326 output_abbrev_section ();
32327 }
32328
32329 /* Have to end the macro section. */
32330 if (have_macinfo)
32331 {
32332 /* We have to save macinfo state if we need to output it again
32333 for the FAT part of the object. */
32334 vec<macinfo_entry, va_gc> *saved_macinfo_table = macinfo_table;
32335 if (flag_fat_lto_objects)
32336 macinfo_table = macinfo_table->copy ();
32337
32338 switch_to_section (debug_macinfo_section);
32339 ASM_OUTPUT_LABEL (asm_out_file, macinfo_section_label);
32340 output_macinfo (debug_line_section_label, true);
32341 dw2_asm_output_data (1, 0, "End compilation unit");
32342
32343 if (flag_fat_lto_objects)
32344 {
32345 vec_free (macinfo_table);
32346 macinfo_table = saved_macinfo_table;
32347 }
32348 }
32349
32350 /* Emit a skeleton debug_line section. */
32351 switch_to_section (debug_line_section);
32352 ASM_OUTPUT_LABEL (asm_out_file, debug_line_section_label);
32353 output_line_info (true);
32354
32355 /* If we emitted any indirect strings, output the string table too. */
32356 if (debug_str_hash || skeleton_debug_str_hash)
32357 output_indirect_strings ();
32358 if (debug_line_str_hash)
32359 {
32360 switch_to_section (debug_line_str_section);
32361 const enum dwarf_form form = DW_FORM_line_strp;
32362 debug_line_str_hash->traverse<enum dwarf_form,
32363 output_indirect_string> (form);
32364 }
32365
32366 /* Switch back to the text section. */
32367 switch_to_section (text_section);
32368 }
32369
32370 /* Reset all state within dwarf2out.c so that we can rerun the compiler
32371 within the same process. For use by toplev::finalize. */
32372
32373 void
32374 dwarf2out_c_finalize (void)
32375 {
32376 last_var_location_insn = NULL;
32377 cached_next_real_insn = NULL;
32378 used_rtx_array = NULL;
32379 incomplete_types = NULL;
32380 debug_info_section = NULL;
32381 debug_skeleton_info_section = NULL;
32382 debug_abbrev_section = NULL;
32383 debug_skeleton_abbrev_section = NULL;
32384 debug_aranges_section = NULL;
32385 debug_addr_section = NULL;
32386 debug_macinfo_section = NULL;
32387 debug_line_section = NULL;
32388 debug_skeleton_line_section = NULL;
32389 debug_loc_section = NULL;
32390 debug_pubnames_section = NULL;
32391 debug_pubtypes_section = NULL;
32392 debug_str_section = NULL;
32393 debug_line_str_section = NULL;
32394 debug_str_dwo_section = NULL;
32395 debug_str_offsets_section = NULL;
32396 debug_ranges_section = NULL;
32397 debug_frame_section = NULL;
32398 fde_vec = NULL;
32399 debug_str_hash = NULL;
32400 debug_line_str_hash = NULL;
32401 skeleton_debug_str_hash = NULL;
32402 dw2_string_counter = 0;
32403 have_multiple_function_sections = false;
32404 text_section_used = false;
32405 cold_text_section_used = false;
32406 cold_text_section = NULL;
32407 current_unit_personality = NULL;
32408
32409 early_dwarf = false;
32410 early_dwarf_finished = false;
32411
32412 next_die_offset = 0;
32413 single_comp_unit_die = NULL;
32414 comdat_type_list = NULL;
32415 limbo_die_list = NULL;
32416 file_table = NULL;
32417 decl_die_table = NULL;
32418 common_block_die_table = NULL;
32419 decl_loc_table = NULL;
32420 call_arg_locations = NULL;
32421 call_arg_loc_last = NULL;
32422 call_site_count = -1;
32423 tail_call_site_count = -1;
32424 cached_dw_loc_list_table = NULL;
32425 abbrev_die_table = NULL;
32426 delete dwarf_proc_stack_usage_map;
32427 dwarf_proc_stack_usage_map = NULL;
32428 line_info_label_num = 0;
32429 cur_line_info_table = NULL;
32430 text_section_line_info = NULL;
32431 cold_text_section_line_info = NULL;
32432 separate_line_info = NULL;
32433 info_section_emitted = false;
32434 pubname_table = NULL;
32435 pubtype_table = NULL;
32436 macinfo_table = NULL;
32437 ranges_table = NULL;
32438 ranges_by_label = NULL;
32439 rnglist_idx = 0;
32440 have_location_lists = false;
32441 loclabel_num = 0;
32442 poc_label_num = 0;
32443 last_emitted_file = NULL;
32444 label_num = 0;
32445 tmpl_value_parm_die_table = NULL;
32446 generic_type_instances = NULL;
32447 frame_pointer_fb_offset = 0;
32448 frame_pointer_fb_offset_valid = false;
32449 base_types.release ();
32450 XDELETEVEC (producer_string);
32451 producer_string = NULL;
32452 }
32453
32454 #include "gt-dwarf2out.h"
This page took 1.396136 seconds and 6 git commands to generate.